content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Javascript | Javascript | remove first guards | acaf8a0316ec7bd737a3bdd813c1f02262fc80f6 | <ide><path>src/config.js
<ide> sizes. See [this document][watches] for more info.
<ide> })
<ide> }
<ide>
<del> getRawValue (keyPath, options) {
<add> getRawValue (keyPath, options = {}) {
<ide> let defaultValue, value
<del> if (!(__guard__(options != null ? options.excludeSources : undefined, x => x.indexOf(this.getUserConfigPath())) >= 0)) {
<add>
<add> const configIndex = (options.excludeSources || []).indexOf(this.getUserConfigPath())
<add> if (configIndex < 0) {
<ide> value = getValueAtKeyPath(this.settings, keyPath)
<ide> }
<del> if (!(__guard__(options != null ? options.sources : undefined, x1 => x1.length) > 0)) {
<add>
<add> const optionSources = (options.sources || []).length
<add> if (optionSources <= 0) {
<ide> defaultValue = getValueAtKeyPath(this.defaultSettings, keyPath)
<ide> }
<ide>
<ide> let sortObject = function (value) {
<ide> return result
<ide> }
<ide>
<del>let withoutEmptyObjects = function (object) {
<add>var withoutEmptyObjects = function (object) {
<ide> let resultObject
<ide> if (isPlainObject(object)) {
<ide> for (let key in object) { | 1 |
Go | Go | use prefix naming for attach tests | f312f784e18f3a0a087e615162c1e38b1bae8e51 | <ide><path>integration-cli/docker_cli_attach_test.go
<ide> import (
<ide>
<ide> const attachWait = 5 * time.Second
<ide>
<del>func TestMultipleAttachRestart(t *testing.T) {
<add>func TestAttachMultipleAndRestart(t *testing.T) {
<ide> defer deleteAllContainers()
<ide>
<ide> endGroup := &sync.WaitGroup{} | 1 |
Ruby | Ruby | remove arity check for `routeset#draw` | 8d23f37b15e1b3eca694ebbd6dd7dbf0016c5fbe | <ide><path>actionpack/lib/action_dispatch/routing/route_set.rb
<ide> def prepend(&block)
<ide> end
<ide>
<ide> def eval_block(block)
<del> if block.arity == 1
<del> raise "You are using the old router DSL which has been removed in Rails 3.1. " <<
<del> "Please check how to update your routes file at: http://www.engineyard.com/blog/2010/the-lowdown-on-routes-in-rails-3/"
<del> end
<ide> mapper = Mapper.new(self)
<ide> if default_scope
<ide> mapper.with_default_scope(default_scope, &block)
<ide><path>actionpack/test/controller/routing_test.rb
<ide> def test_non_greedy_glob_regexp
<ide> assert_equal({:id=>"1", :filters=>"foo", :format=>"js"}, params)
<ide> end
<ide>
<del> def test_draw_with_block_arity_one_raises
<del> assert_raise(RuntimeError) do
<del> rs.draw { |map| map.match '/:controller(/:action(/:id))' }
<del> end
<del> end
<del>
<ide> def test_specific_controller_action_failure
<ide> rs.draw do
<ide> mount lambda {} => "/foo" | 2 |
Ruby | Ruby | remove unnecessary deletes | 3e3e23f0484b4891a70eecd89f30b8ad81852e8b | <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb
<ide> class Mapping #:nodoc:
<ide> def self.build(scope, set, path, as, controller, default_action, to, via, formatted, options)
<ide> options = scope[:options].merge(options) if scope[:options]
<ide>
<del> options.delete :shallow_path
<del> options.delete :shallow_prefix
<del> options.delete :shallow
<del>
<ide> defaults = (scope[:defaults] || {}).dup
<ide> scope_constraints = scope[:constraints] || {}
<ide> | 1 |
PHP | PHP | fix cs and doc blocks | 117de2a24cb0885ea68b6564747cf84d62672e2b | <ide><path>src/Illuminate/Redis/PhpRedisDatabase.php
<ide> class PhpRedisDatabase extends Database
<ide> * Create a new Redis connection instance.
<ide> *
<ide> * @param array $servers
<add> * @return void
<ide> */
<ide> public function __construct(array $servers = [])
<ide> {
<ide> public function __construct(array $servers = [])
<ide> *
<ide> * @param array $clusters
<ide> * @param array $options
<add> * @return void
<ide> */
<ide> protected function createClusters(array $clusters, array $options = [])
<ide> {
<ide> protected function createClusters(array $clusters, array $options = [])
<ide> * @param array $servers
<ide> * @param array $options
<ide> * @param string $connection
<del> *
<ide> * @return array
<ide> */
<ide> protected function createAggregateClient(array $servers, array $options = [], $connection = 'default')
<ide> protected function createAggregateClient(array $servers, array $options = [], $c
<ide> *
<ide> * @param array $servers
<ide> * @param array $options
<del> *
<ide> * @return array
<ide> */
<ide> protected function createSingleClients(array $servers, array $options = [])
<ide> protected function createSingleClients(array $servers, array $options = [])
<ide> * Build a single cluster seed string from array.
<ide> *
<ide> * @param array $server
<del> *
<ide> * @return string
<ide> */
<ide> protected function buildClusterSeed(array $server)
<ide> public function subscribe($channels, Closure $callback, $connection = null, $met
<ide> * @param array|string $channels
<ide> * @param \Closure $callback
<ide> * @param string $connection
<add> * @return void
<ide> */
<ide> public function psubscribe($channels, Closure $callback, $connection = null)
<ide> {
<ide> public function psubscribe($channels, Closure $callback, $connection = null)
<ide> *
<ide> * @param array $servers
<ide> * @param array $options
<del> *
<ide> * @return RedisCluster
<ide> */
<ide> protected function createRedisClusterInstance(array $servers, array $options)
<ide> protected function createRedisClusterInstance(array $servers, array $options)
<ide> *
<ide> * @param array $server
<ide> * @param array $options
<del> *
<ide> * @return Redis
<ide> */
<ide> protected function createRedisInstance(array $server, array $options)
<ide> protected function createRedisInstance(array $server, array $options)
<ide> $client->connect($server['host'], $server['port'], $timeout);
<ide> }
<ide>
<del> if (!empty($server['prefix'])) {
<add> if (! empty($server['prefix'])) {
<ide> $client->setOption(Redis::OPT_PREFIX, $server['prefix']);
<ide> }
<ide>
<del> if (!empty($server['read_timeout'])) {
<add> if (! empty($server['read_timeout'])) {
<ide> $client->setOption(Redis::OPT_READ_TIMEOUT, $server['read_timeout']);
<ide> }
<ide>
<del> if (!empty($server['password'])) {
<add> if (! empty($server['password'])) {
<ide> $client->auth($server['password']);
<ide> }
<ide>
<del> if (!empty($server['database'])) {
<add> if (! empty($server['database'])) {
<ide> $client->select($server['database']);
<ide> }
<ide>
<ide><path>src/Illuminate/Redis/PredisDatabase.php
<ide> class PredisDatabase extends Database
<ide> * Create a new Redis connection instance.
<ide> *
<ide> * @param array $servers
<add> * @return void
<ide> */
<ide> public function __construct(array $servers = [])
<ide> {
<ide> public function __construct(array $servers = [])
<ide> *
<ide> * @param array $clusters
<ide> * @param array $options
<add> * @return void
<ide> */
<ide> protected function createClusters(array $clusters, array $options = [])
<ide> {
<ide> protected function createClusters(array $clusters, array $options = [])
<ide> * @param array $servers
<ide> * @param array $options
<ide> * @param string $connection
<del> *
<ide> * @return array
<ide> */
<ide> protected function createAggregateClient(array $servers, array $options = [], $connection = 'default')
<ide> protected function createAggregateClient(array $servers, array $options = [], $c
<ide> *
<ide> * @param array $servers
<ide> * @param array $options
<del> *
<ide> * @return array
<ide> */
<ide> protected function createSingleClients(array $servers, array $options = [])
<ide> protected function createSingleClients(array $servers, array $options = [])
<ide> * @param \Closure $callback
<ide> * @param string $connection
<ide> * @param string $method
<add> * @return void
<ide> */
<ide> public function subscribe($channels, Closure $callback, $connection = null, $method = 'subscribe')
<ide> {
<ide> public function subscribe($channels, Closure $callback, $connection = null, $met
<ide> * @param array|string $channels
<ide> * @param \Closure $callback
<ide> * @param string $connection
<add> * @return void
<ide> */
<ide> public function psubscribe($channels, Closure $callback, $connection = null)
<ide> {
<ide><path>tests/Redis/PhpRedisConnectionTest.php
<ide> protected function getRedis($cluster = false)
<ide> ],
<ide> 'clusters' => [
<ide> 'options' => [
<del> 'prefix' => 'cluster:'
<add> 'prefix' => 'cluster:',
<ide> ],
<ide> 'cluster-1' => [
<ide> [
<ide> protected function getRedis($cluster = false)
<ide> 'host' => '127.0.0.1',
<ide> 'port' => 6379,
<ide> 'database' => 0,
<del> ]
<add> ],
<ide> ],
<ide> ],
<ide> ];
<ide> protected function createRedisInstance(array $server, array $options)
<ide> }
<ide> }
<ide>
<del>class RedisStub {}
<add>class RedisStub
<add>{
<add>
<add>}
<ide>
<del>class RedisClusterStub {}
<add>class RedisClusterStub
<add>{
<add>
<add>}
<ide><path>tests/Redis/RedisConnectionTest.php
<ide> protected function getRedis($cluster = false)
<ide> ],
<ide> 'clusters' => [
<ide> 'options' => [
<del> 'prefix' => 'cluster:'
<add> 'prefix' => 'cluster:',
<ide> ],
<ide> 'cluster-1' => [
<ide> [
<ide> protected function getRedis($cluster = false)
<ide> 'host' => '127.0.0.1',
<ide> 'port' => 6379,
<ide> 'database' => 0,
<del> ]
<add> ],
<ide> ],
<ide> ],
<ide> ]; | 4 |
Text | Text | fix broken links | 36506b7944cf9cea9b3618f00ecfc68a41f86dd8 | <ide><path>man/docker-daemon.8.md
<ide> multiple plugins installed, at least one must allow the request for it to
<ide> complete.
<ide>
<ide> For information about how to create an authorization plugin, see [authorization
<del>plugin](https://docs.docker.com/engine/extend/plugins_authorization.md) section in the
<add>plugin](https://docs.docker.com/engine/extend/authorization/) section in the
<ide> Docker extend section of this documentation.
<ide>
<ide>
<ide><path>man/docker-network-create.1.md
<ide> The `docker daemon` options that support the `overlay` network are:
<ide>
<ide> To read more about these options and how to configure them, see ["*Get started
<ide> with multi-host
<del>network*"](https://www.docker.com/engine/userguide/networking/get-started-overlay.md).
<add>network*"](https://docs.docker.com/engine/userguide/networking/get-started-overlay/).
<ide>
<ide> It is also a good idea, though not required, that you install Docker Swarm on to
<ide> manage the cluster that makes up your network. Swarm provides sophisticated | 2 |
Ruby | Ruby | fix filesystem race condition | abd74308957b383777c6a391017d39097309fe07 | <ide><path>actionpack/test/controller/log_subscriber_test.rb
<ide> def setup
<ide>
<ide> @old_logger = ActionController::Base.logger
<ide>
<del> @cache_path = File.expand_path('../temp/test_cache', File.dirname(__FILE__))
<add> @cache_path = File.join Dir.tmpdir, Dir::Tmpname.make_tmpname('tmp', 'cache')
<ide> @controller.cache_store = :file_store, @cache_path
<ide> ActionController::LogSubscriber.attach_to :action_controller
<ide> end | 1 |
Python | Python | add random graph generator | ba710054844fde4ccca666464c4bd08207e64a0d | <ide><path>graphs/random_graph_generator.py
<add>"""
<add>* Author: Manuel Di Lullo (https://github.com/manueldilullo)
<add>* Description: Random graphs generator.
<add> Uses graphs represented with an adjacency list.
<add>
<add>URL: https://en.wikipedia.org/wiki/Random_graph
<add>"""
<add>
<add>import random
<add>
<add>
<add>def random_graph(
<add> vertices_number: int, probability: float, directed: bool = False
<add>) -> dict:
<add> """
<add> Generate a random graph
<add> @input: vertices_number (number of vertices),
<add> probability (probability that a generic edge (u,v) exists),
<add> directed (if True: graph will be a directed graph,
<add> otherwise it will be an undirected graph)
<add> @examples:
<add> >>> random.seed(1)
<add> >>> random_graph(4, 0.5)
<add> {0: [1], 1: [0, 2, 3], 2: [1, 3], 3: [1, 2]}
<add> >>> random.seed(1)
<add> >>> random_graph(4, 0.5, True)
<add> {0: [1], 1: [2, 3], 2: [3], 3: []}
<add> """
<add> graph = {i: [] for i in range(vertices_number)}
<add>
<add> # if probability is greater or equal than 1, then generate a complete graph
<add> if probability >= 1:
<add> return complete_graph(vertices_number)
<add> # if probability is lower or equal than 0, then return a graph without edges
<add> if probability <= 0:
<add> return graph
<add>
<add> # for each couple of nodes, add an edge from u to v
<add> # if the number randomly generated is greater than probability probability
<add> for i in range(vertices_number):
<add> for j in range(i + 1, vertices_number):
<add> if random.random() < probability:
<add> graph[i].append(j)
<add> if not directed:
<add> # if the graph is undirected, add an edge in from j to i, either
<add> graph[j].append(i)
<add> return graph
<add>
<add>
<add>def complete_graph(vertices_number: int) -> dict:
<add> """
<add> Generate a complete graph with vertices_number vertices.
<add> @input: vertices_number (number of vertices),
<add> directed (False if the graph is undirected, True otherwise)
<add> @example:
<add> >>> print(complete_graph(3))
<add> {0: [1, 2], 1: [0, 2], 2: [0, 1]}
<add> """
<add> return {
<add> i: [j for j in range(vertices_number) if i != j] for i in range(vertices_number)
<add> }
<add>
<add>
<add>if __name__ == "__main__":
<add> import doctest
<add>
<add> doctest.testmod() | 1 |
Text | Text | fix filehandle.truncate() sample codes | ac8226115e2192a7a46ba07789fa5136f74223e1 | <ide><path>doc/api/fs.md
<ide> console.log(fs.readFileSync('temp.txt', 'utf8'));
<ide> // Prints: Node.js
<ide>
<ide> async function doTruncate() {
<del> const fd = await fsPromises.open('temp.txt', 'r+');
<del> await fsPromises.ftruncate(fd, 4);
<add> let filehandle = null;
<add> try {
<add> filehandle = await fsPromises.open('temp.txt', 'r+');
<add> await filehandle.truncate(4);
<add> } finally {
<add> if (filehandle) {
<add> // close the file if it is opened.
<add> await filehandle.close();
<add> }
<add> }
<ide> console.log(fs.readFileSync('temp.txt', 'utf8')); // Prints: Node
<ide> }
<ide>
<ide> console.log(fs.readFileSync('temp.txt', 'utf8'));
<ide> // Prints: Node.js
<ide>
<ide> async function doTruncate() {
<del> const fd = await fsPromises.open('temp.txt', 'r+');
<del> await fsPromises.ftruncate(fd, 10);
<add> let filehandle = null;
<add> try {
<add> filehandle = await fsPromises.open('temp.txt', 'r+');
<add> await filehandle.truncate(10);
<add> } finally {
<add> if (filehandle) {
<add> // close the file if it is opened.
<add> await filehandle.close();
<add> }
<add> }
<ide> console.log(fs.readFileSync('temp.txt', 'utf8')); // Prints Node.js\0\0\0
<ide> }
<ide> | 1 |
Javascript | Javascript | relax permissionsandroid enforcement | f6e1c164c2021fdaa8a8b0659dd341343594bd61 | <ide><path>Libraries/PermissionsAndroid/NativePermissionsAndroid.js
<ide> export interface Spec extends TurboModule {
<ide> ) => Promise<{[permission: PermissionType]: PermissionStatus}>;
<ide> }
<ide>
<del>export default TurboModuleRegistry.get<Spec>('PermissionsAndroid');
<add>export default TurboModuleRegistry.getEnforcing<Spec>('PermissionsAndroid');
<ide><path>Libraries/PermissionsAndroid/PermissionsAndroid.js
<ide> 'use strict';
<ide>
<ide> import NativeDialogManagerAndroid from '../NativeModules/specs/NativeDialogManagerAndroid';
<add>const NativeModules = require('../BatchedBridge/NativeModules');
<ide> const Platform = require('../Utilities/Platform');
<ide> import NativePermissionsAndroid from './NativePermissionsAndroid';
<ide>
<del>import invariant from 'invariant';
<del>
<ide> import type {
<ide> PermissionStatus,
<ide> PermissionType,
<ide> class PermissionsAndroid {
<ide> return Promise.resolve(false);
<ide> }
<ide>
<del> invariant(
<del> NativePermissionsAndroid,
<del> 'PermissionsAndroid is not installed correctly.',
<del> );
<del>
<ide> return NativePermissionsAndroid.checkPermission(permission);
<ide> }
<ide>
<ide> class PermissionsAndroid {
<ide> );
<ide> return Promise.resolve(false);
<ide> }
<del>
<del> invariant(
<del> NativePermissionsAndroid,
<del> 'PermissionsAndroid is not installed correctly.',
<del> );
<del>
<ide> return NativePermissionsAndroid.checkPermission(permission);
<ide> }
<ide>
<ide> class PermissionsAndroid {
<ide> return Promise.resolve(this.RESULTS.DENIED);
<ide> }
<ide>
<del> invariant(
<del> NativePermissionsAndroid,
<del> 'PermissionsAndroid is not installed correctly.',
<del> );
<del>
<ide> if (rationale) {
<ide> const shouldShowRationale = await NativePermissionsAndroid.shouldShowRequestPermissionRationale(
<ide> permission,
<ide> class PermissionsAndroid {
<ide> return Promise.resolve({});
<ide> }
<ide>
<del> invariant(
<del> NativePermissionsAndroid,
<del> 'PermissionsAndroid is not installed correctly.',
<del> );
<del>
<ide> return NativePermissionsAndroid.requestMultiplePermissions(permissions);
<ide> }
<ide> } | 2 |
Text | Text | update fips instructions in readme.md | d98eed51f782ea44c3fd7823b2912f7fb30ab185 | <ide><path>README.md
<ide> NOTE: Windows is not yet supported
<ide> It is possible to build io.js with
<ide> [OpenSSL FIPS module](https://www.openssl.org/docs/fips/fipsnotes.html).
<ide>
<add>**Note** that building in this way does **not** allow you to
<add>claim that the runtime is FIPS 140-2 validated. Instead you
<add>can indicate that the runtime uses a validated module. See
<add>the [security policy]
<add>(http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf)
<add>page 60 for more details. In addition, the validation for
<add>the underlying module is only valid if it is deployed in
<add>accordance with its [security policy]
<add>(http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf).
<add>If you need FIPS validated cryptography it is recommended that you
<add>read both the [security policy]
<add>(http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf)
<add>and [user guide] (https://openssl.org/docs/fips/UserGuide-2.0.pdf).
<add>
<ide> Instructions:
<ide>
<del>1. Download and verify `openssl-fips-x.x.x.tar.gz` from
<del> https://www.openssl.org/source/
<del>2. Extract source to `openssl-fips` folder
<del>3. ``cd openssl-fips && ./config fipscanisterbuild --prefix=`pwd`/out``
<del> (NOTE: On OS X, you may want to run
<del> ``./Configure darwin64-x86_64-cc --prefix=`pwd`/out`` if you are going to
<del> build x64-mode io.js)
<del>4. `make -j && make install`
<del>5. Get into io.js checkout folder
<del>6. `./configure --openssl-fips=/path/to/openssl-fips/out`
<del>7. Build io.js with `make -j`
<del>8. Verify with `node -p "process.versions.openssl"` (`1.0.2a-fips`)
<add>1. Obtain a copy of openssl-fips-x.x.x.tar.gz.
<add> To comply with the security policy you must ensure the path
<add> through which you get the file complies with the requirements
<add> for a "secure intallation" as described in section 6.6 in
<add> the [user guide] (https://openssl.org/docs/fips/UserGuide-2.0.pdf).
<add> For evaluation/experimentation you can simply download and verify
<add> `openssl-fips-x.x.x.tar.gz` from https://www.openssl.org/source/
<add>2. Extract source to `openssl-fips` folder and `cd openssl-fips`
<add>3. `./config`
<add>4. `make`
<add>5. `make install`
<add> (NOTE: to comply with the security policy you must use the exact
<add> commands in steps 3-5 without any additional options as per
<add> Appendix A in the [security policy]
<add> (http://csrc.nist.gov/groups/STM/cmvp/documents/140-1/140sp/140sp1747.pdf).
<add> The only exception is that `./config no-asm` can be
<add> used in place of `./config` )
<add>6. Get into io.js checkout folder
<add>7. `./configure --openssl-fips=/path/to/openssl-fips/installdir`
<add> For example on ubuntu 12 the installation directory was
<add> /usr/local/ssl/fips-2.0
<add>8. Build io.js with `make -j`
<add>9. Verify with `node -p "process.versions.openssl"` (`1.0.2a-fips`)
<add>
<ide>
<ide> ## Resources for Newcomers
<ide> | 1 |
Javascript | Javascript | fix merge issue | 249e7d56bb83337596ef24d4936682f5b8cf700e | <ide><path>lib/DefinePlugin.js
<ide> const {
<ide> const NullFactory = require("./NullFactory");
<ide>
<ide> /** @typedef {import("./Compiler")} Compiler */
<del>/** @typedef {import("./Parser")} Parser */
<add>/** @typedef {import("./JavascriptParser")} JavascriptParser */
<ide> /** @typedef {null|undefined|RegExp|Function|string|number} CodeValuePrimitive */
<ide> /** @typedef {CodeValuePrimitive|Record<string, CodeValuePrimitive>|RuntimeValue} CodeValue */
<ide>
<ide> const stringifyObj = (obj, parser) => {
<ide> /**
<ide> * Convert code to a string that evaluates
<ide> * @param {CodeValue} code Code to evaluate
<del> * @param {Parser} parser Parser
<add> * @param {JavascriptParser} parser Parser
<ide> * @returns {string} code converted to string that evaluates
<ide> */
<ide> const toCode = (code, parser) => {
<ide> class DefinePlugin {
<ide>
<ide> /**
<ide> * Handler
<del> * @param {Parser} parser Parser
<add> * @param {JavascriptParser} parser Parser
<ide> * @returns {void}
<ide> */
<ide> const handler = parser => { | 1 |
Ruby | Ruby | improve doc of automatic inverse_of detection | 323cfe1aaaf82774503f72a51f91ef2c9b909a1b | <ide><path>activerecord/lib/active_record/associations.rb
<ide> def association_instance_set(name, association)
<ide> # #belongs_to associations.
<ide> #
<ide> # Extra options on the associations, as defined in the
<del> # <tt>AssociationReflection::INVALID_AUTOMATIC_INVERSE_OPTIONS</tt> constant, will
<del> # also prevent the association's inverse from being found automatically.
<add> # <tt>AssociationReflection::INVALID_AUTOMATIC_INVERSE_OPTIONS</tt>
<add> # constant, or a custom scope, will also prevent the association's inverse
<add> # from being found automatically.
<ide> #
<ide> # The automatic guessing of the inverse association uses a heuristic based
<ide> # on the name of the class, so it may not work for all associations, | 1 |
Go | Go | fix outdated comment | 137c8601a8c744d464855755ce756f07e83aa0b7 | <ide><path>registry/endpoint_v1.go
<ide> type V1Endpoint struct {
<ide> IsSecure bool
<ide> }
<ide>
<del>// NewV1Endpoint parses the given address to return a registry endpoint. v can be used to
<del>// specify a specific endpoint version
<add>// NewV1Endpoint parses the given address to return a registry endpoint.
<ide> func NewV1Endpoint(index *registrytypes.IndexInfo, userAgent string, metaHeaders http.Header) (*V1Endpoint, error) {
<ide> tlsConfig, err := newTLSConfig(index.Name, index.Secure)
<ide> if err != nil { | 1 |
Java | Java | fix several typos | d747d5259280ad1c7a17415e51082c614d1b2ab7 | <ide><path>src/test/java/rx/internal/operators/OperatorOnBackpressureBufferTest.java
<ide> public void testFixBackpressureBufferNegativeCapacity() throws InterruptedExcept
<ide>
<ide> @Test(expected = IllegalArgumentException.class)
<ide> public void testFixBackpressureBufferZeroCapacity() throws InterruptedException {
<del> Observable.empty().onBackpressureBuffer(-1);
<add> Observable.empty().onBackpressureBuffer(0);
<ide> }
<ide>
<ide> @Test
<ide><path>src/test/java/rx/internal/operators/OperatorOnBackpressureDropTest.java
<ide> public void testNoBackpressureSupport() {
<ide> @Test(timeout = 500)
<ide> public void testWithObserveOn() throws InterruptedException {
<ide> TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
<del> Observable.range(0, RxRingBuffer.SIZE * 10).onBackpressureDrop().onBackpressureDrop().observeOn(Schedulers.io()).subscribe(ts);
<add> Observable.range(0, RxRingBuffer.SIZE * 10).onBackpressureDrop().observeOn(Schedulers.io()).subscribe(ts);
<ide> ts.awaitTerminalEvent();
<ide> }
<ide> | 2 |
Go | Go | convert dockerd to use cobra and pflag | fb83394714a9797f8ca5a08023a89560ce6c4aa3 | <ide><path>cli/flags/client.go
<ide> package flags
<ide>
<del>import flag "github.com/docker/docker/pkg/mflag"
<add>import (
<add> "github.com/spf13/pflag"
<add>)
<ide>
<ide> // ClientFlags represents flags for the docker client.
<ide> type ClientFlags struct {
<del> FlagSet *flag.FlagSet
<del> Common *CommonFlags
<add> FlagSet *pflag.FlagSet
<add> Common *CommonOptions
<ide> PostParse func()
<ide>
<ide> ConfigDir string
<ide><path>cli/flags/common.go
<ide> import (
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/cliconfig"
<ide> "github.com/docker/docker/opts"
<del> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/go-connections/tlsconfig"
<add> "github.com/spf13/pflag"
<ide> )
<ide>
<ide> const (
<ide> const (
<ide> DefaultKeyFile = "key.pem"
<ide> // DefaultCertFile is the default filename for the cert pem file
<ide> DefaultCertFile = "cert.pem"
<del> // TLSVerifyKey is the default flag name for the tls verification option
<del> TLSVerifyKey = "tlsverify"
<add> // FlagTLSVerify is the flag name for the tls verification option
<add> FlagTLSVerify = "tlsverify"
<ide> )
<ide>
<ide> var (
<ide> dockerCertPath = os.Getenv("DOCKER_CERT_PATH")
<ide> dockerTLSVerify = os.Getenv("DOCKER_TLS_VERIFY") != ""
<ide> )
<ide>
<del>// CommonFlags are flags common to both the client and the daemon.
<del>type CommonFlags struct {
<del> FlagSet *flag.FlagSet
<del> PostParse func()
<del>
<add>// CommonOptions are options common to both the client and the daemon.
<add>type CommonOptions struct {
<ide> Debug bool
<ide> Hosts []string
<ide> LogLevel string
<ide> type CommonFlags struct {
<ide> TrustKey string
<ide> }
<ide>
<del>// InitCommonFlags initializes flags common to both client and daemon
<del>func InitCommonFlags() *CommonFlags {
<del> var commonFlags = &CommonFlags{FlagSet: new(flag.FlagSet)}
<add>// NewCommonOptions returns a new CommonOptions
<add>func NewCommonOptions() *CommonOptions {
<add> return &CommonOptions{
<add> TLSOptions: &tlsconfig.Options{},
<add> }
<add>}
<ide>
<add>// InstallFlags adds flags for the common options on the FlagSet
<add>func (commonOpts *CommonOptions) InstallFlags(flags *pflag.FlagSet) {
<ide> if dockerCertPath == "" {
<ide> dockerCertPath = cliconfig.ConfigDir()
<ide> }
<ide>
<del> commonFlags.PostParse = func() { postParseCommon(commonFlags) }
<del>
<del> cmd := commonFlags.FlagSet
<add> flags.BoolVarP(&commonOpts.Debug, "debug", "D", false, "Enable debug mode")
<add> flags.StringVarP(&commonOpts.LogLevel, "log-level", "l", "info", "Set the logging level")
<add> flags.BoolVar(&commonOpts.TLS, "tls", false, "Use TLS; implied by --tlsverify")
<add> flags.BoolVar(&commonOpts.TLSVerify, FlagTLSVerify, dockerTLSVerify, "Use TLS and verify the remote")
<ide>
<del> cmd.BoolVar(&commonFlags.Debug, []string{"D", "-debug"}, false, "Enable debug mode")
<del> cmd.StringVar(&commonFlags.LogLevel, []string{"l", "-log-level"}, "info", "Set the logging level")
<del> cmd.BoolVar(&commonFlags.TLS, []string{"-tls"}, false, "Use TLS; implied by --tlsverify")
<del> cmd.BoolVar(&commonFlags.TLSVerify, []string{"-tlsverify"}, dockerTLSVerify, "Use TLS and verify the remote")
<add> // TODO use flag flags.String("identity"}, "i", "", "Path to libtrust key file")
<ide>
<del> // TODO use flag flag.String([]string{"i", "-identity"}, "", "Path to libtrust key file")
<add> tlsOptions := commonOpts.TLSOptions
<add> flags.StringVar(&tlsOptions.CAFile, "tlscacert", filepath.Join(dockerCertPath, DefaultCaFile), "Trust certs signed only by this CA")
<add> flags.StringVar(&tlsOptions.CertFile, "tlscert", filepath.Join(dockerCertPath, DefaultCertFile), "Path to TLS certificate file")
<add> flags.StringVar(&tlsOptions.KeyFile, "tlskey", filepath.Join(dockerCertPath, DefaultKeyFile), "Path to TLS key file")
<ide>
<del> var tlsOptions tlsconfig.Options
<del> commonFlags.TLSOptions = &tlsOptions
<del> cmd.StringVar(&tlsOptions.CAFile, []string{"-tlscacert"}, filepath.Join(dockerCertPath, DefaultCaFile), "Trust certs signed only by this CA")
<del> cmd.StringVar(&tlsOptions.CertFile, []string{"-tlscert"}, filepath.Join(dockerCertPath, DefaultCertFile), "Path to TLS certificate file")
<del> cmd.StringVar(&tlsOptions.KeyFile, []string{"-tlskey"}, filepath.Join(dockerCertPath, DefaultKeyFile), "Path to TLS key file")
<del>
<del> cmd.Var(opts.NewNamedListOptsRef("hosts", &commonFlags.Hosts, opts.ValidateHost), []string{"H", "-host"}, "Daemon socket(s) to connect to")
<del> return commonFlags
<add> hostOpt := opts.NewNamedListOptsRef("hosts", &commonOpts.Hosts, opts.ValidateHost)
<add> flags.VarP(hostOpt, "-host", "H", "Daemon socket(s) to connect to")
<ide> }
<ide>
<del>func postParseCommon(commonFlags *CommonFlags) {
<del> cmd := commonFlags.FlagSet
<del>
<del> SetDaemonLogLevel(commonFlags.LogLevel)
<del>
<add>// SetDefaultOptions sets default values for options after flag parsing is
<add>// complete
<add>func (commonOpts *CommonOptions) SetDefaultOptions(flags *pflag.FlagSet) {
<ide> // Regardless of whether the user sets it to true or false, if they
<ide> // specify --tlsverify at all then we need to turn on tls
<ide> // TLSVerify can be true even if not set due to DOCKER_TLS_VERIFY env var, so we need
<ide> // to check that here as well
<del> if cmd.IsSet("-"+TLSVerifyKey) || commonFlags.TLSVerify {
<del> commonFlags.TLS = true
<add> if flags.Changed(FlagTLSVerify) || commonOpts.TLSVerify {
<add> commonOpts.TLS = true
<ide> }
<ide>
<del> if !commonFlags.TLS {
<del> commonFlags.TLSOptions = nil
<add> if !commonOpts.TLS {
<add> commonOpts.TLSOptions = nil
<ide> } else {
<del> tlsOptions := commonFlags.TLSOptions
<del> tlsOptions.InsecureSkipVerify = !commonFlags.TLSVerify
<add> tlsOptions := commonOpts.TLSOptions
<add> tlsOptions.InsecureSkipVerify = !commonOpts.TLSVerify
<ide>
<ide> // Reset CertFile and KeyFile to empty string if the user did not specify
<ide> // the respective flags and the respective default files were not found.
<del> if !cmd.IsSet("-tlscert") {
<add> if !flags.Changed("tlscert") {
<ide> if _, err := os.Stat(tlsOptions.CertFile); os.IsNotExist(err) {
<ide> tlsOptions.CertFile = ""
<ide> }
<ide> }
<del> if !cmd.IsSet("-tlskey") {
<add> if !flags.Changed("tlskey") {
<ide> if _, err := os.Stat(tlsOptions.KeyFile); os.IsNotExist(err) {
<ide> tlsOptions.KeyFile = ""
<ide> }
<ide><path>cmd/docker/docker.go
<ide> func initClientFlags(commonFlags *cliflags.CommonFlags) *cliflags.ClientFlags {
<ide>
<ide> clientFlags.PostParse = func() {
<ide> clientFlags.Common.PostParse()
<add> cliflags.SetDaemonLogLevel(commonOpts.LogLevel)
<ide>
<ide> if clientFlags.ConfigDir != "" {
<ide> cliconfig.SetConfigDir(clientFlags.ConfigDir)
<ide><path>cmd/dockerd/daemon.go
<ide> import (
<ide> "io"
<ide> "os"
<ide> "path/filepath"
<del> "runtime"
<ide> "strings"
<ide> "time"
<ide>
<ide> import (
<ide> "github.com/docker/docker/daemon/logger"
<ide> "github.com/docker/docker/dockerversion"
<ide> "github.com/docker/docker/libcontainerd"
<del> "github.com/docker/docker/opts"
<add> dopts "github.com/docker/docker/opts"
<ide> "github.com/docker/docker/pkg/authorization"
<ide> "github.com/docker/docker/pkg/jsonlog"
<ide> "github.com/docker/docker/pkg/listeners"
<del> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/pkg/pidfile"
<ide> "github.com/docker/docker/pkg/signal"
<ide> "github.com/docker/docker/pkg/system"
<ide> "github.com/docker/docker/registry"
<ide> "github.com/docker/docker/runconfig"
<ide> "github.com/docker/docker/utils"
<ide> "github.com/docker/go-connections/tlsconfig"
<add> "github.com/spf13/pflag"
<ide> )
<ide>
<ide> const (
<del> daemonConfigFileFlag = "-config-file"
<add> flagDaemonConfigFile = "config-file"
<ide> )
<ide>
<ide> // DaemonCli represents the daemon CLI.
<ide> type DaemonCli struct {
<ide> *daemon.Config
<del> commonFlags *cliflags.CommonFlags
<del> configFile *string
<add> configFile *string
<add> flags *pflag.FlagSet
<ide>
<ide> api *apiserver.Server
<ide> d *daemon.Daemon
<ide> authzMiddleware *authorization.Middleware // authzMiddleware enables to dynamically reload the authorization plugins
<ide> }
<ide>
<del>func presentInHelp(usage string) string { return usage }
<del>func absentFromHelp(string) string { return "" }
<del>
<del>// NewDaemonCli returns a pre-configured daemon CLI
<add>// NewDaemonCli returns a daemon CLI
<ide> func NewDaemonCli() *DaemonCli {
<del> // TODO(tiborvass): remove InstallFlags?
<del> daemonConfig := new(daemon.Config)
<del> daemonConfig.LogConfig.Config = make(map[string]string)
<del> daemonConfig.ClusterOpts = make(map[string]string)
<del>
<del> daemonConfig.InstallFlags(flag.CommandLine, presentInHelp)
<del> configFile := flag.CommandLine.String([]string{daemonConfigFileFlag}, defaultDaemonConfigFile, "Daemon configuration file")
<del> flag.CommandLine.Require(flag.Exact, 0)
<del>
<del> if runtime.GOOS != "linux" {
<del> daemonConfig.V2Only = true
<del> }
<del>
<del> return &DaemonCli{
<del> Config: daemonConfig,
<del> commonFlags: cliflags.InitCommonFlags(),
<del> configFile: configFile,
<del> }
<add> return &DaemonCli{}
<ide> }
<ide>
<ide> func migrateKey() (err error) {
<ide> func migrateKey() (err error) {
<ide> return nil
<ide> }
<ide>
<del>func (cli *DaemonCli) start() (err error) {
<add>func (cli *DaemonCli) start(opts daemonOptions) (err error) {
<ide> stopc := make(chan bool)
<ide> defer close(stopc)
<ide>
<ide> // warn from uuid package when running the daemon
<ide> uuid.Loggerf = logrus.Warnf
<ide>
<del> flags := flag.CommandLine
<del> cli.commonFlags.PostParse()
<add> opts.common.SetDefaultOptions(opts.flags)
<ide>
<del> if cli.commonFlags.TrustKey == "" {
<del> cli.commonFlags.TrustKey = filepath.Join(getDaemonConfDir(), cliflags.DefaultTrustKeyFile)
<add> if opts.common.TrustKey == "" {
<add> opts.common.TrustKey = filepath.Join(
<add> getDaemonConfDir(),
<add> cliflags.DefaultTrustKeyFile)
<ide> }
<del> cliConfig, err := loadDaemonCliConfig(cli.Config, flags, cli.commonFlags, *cli.configFile)
<del> if err != nil {
<add> if cli.Config, err = loadDaemonCliConfig(opts); err != nil {
<ide> return err
<ide> }
<del> cli.Config = cliConfig
<add> cli.configFile = &opts.configFile
<add> cli.flags = opts.flags
<ide>
<ide> if cli.Config.Debug {
<ide> utils.EnableDebug()
<ide> func (cli *DaemonCli) start() (err error) {
<ide>
<ide> for i := 0; i < len(cli.Config.Hosts); i++ {
<ide> var err error
<del> if cli.Config.Hosts[i], err = opts.ParseHost(cli.Config.TLS, cli.Config.Hosts[i]); err != nil {
<add> if cli.Config.Hosts[i], err = dopts.ParseHost(cli.Config.TLS, cli.Config.Hosts[i]); err != nil {
<ide> return fmt.Errorf("error parsing -H %s : %v", cli.Config.Hosts[i], err)
<ide> }
<ide>
<ide> func (cli *DaemonCli) start() (err error) {
<ide> if err := migrateKey(); err != nil {
<ide> return err
<ide> }
<del> cli.TrustKeyPath = cli.commonFlags.TrustKey
<add> // FIXME: why is this down here instead of with the other TrustKey logic above?
<add> cli.TrustKeyPath = opts.common.TrustKey
<ide>
<ide> registryService := registry.NewService(cli.Config.ServiceOptions)
<ide> containerdRemote, err := libcontainerd.New(cli.getLibcontainerdRoot(), cli.getPlatformRemoteOptions()...)
<ide> func (cli *DaemonCli) reloadConfig() {
<ide> }
<ide> }
<ide>
<del> if err := daemon.ReloadConfiguration(*cli.configFile, flag.CommandLine, reload); err != nil {
<add> if err := daemon.ReloadConfiguration(*cli.configFile, cli.flags, reload); err != nil {
<ide> logrus.Error(err)
<ide> }
<ide> }
<ide> func shutdownDaemon(d *daemon.Daemon, timeout time.Duration) {
<ide> }
<ide> }
<ide>
<del>func loadDaemonCliConfig(config *daemon.Config, flags *flag.FlagSet, commonConfig *cliflags.CommonFlags, configFile string) (*daemon.Config, error) {
<del> config.Debug = commonConfig.Debug
<del> config.Hosts = commonConfig.Hosts
<del> config.LogLevel = commonConfig.LogLevel
<del> config.TLS = commonConfig.TLS
<del> config.TLSVerify = commonConfig.TLSVerify
<add>func loadDaemonCliConfig(opts daemonOptions) (*daemon.Config, error) {
<add> config := opts.daemonConfig
<add> flags := opts.flags
<add> config.Debug = opts.common.Debug
<add> config.Hosts = opts.common.Hosts
<add> config.LogLevel = opts.common.LogLevel
<add> config.TLS = opts.common.TLS
<add> config.TLSVerify = opts.common.TLSVerify
<ide> config.CommonTLSOptions = daemon.CommonTLSOptions{}
<ide>
<del> if commonConfig.TLSOptions != nil {
<del> config.CommonTLSOptions.CAFile = commonConfig.TLSOptions.CAFile
<del> config.CommonTLSOptions.CertFile = commonConfig.TLSOptions.CertFile
<del> config.CommonTLSOptions.KeyFile = commonConfig.TLSOptions.KeyFile
<add> if opts.common.TLSOptions != nil {
<add> config.CommonTLSOptions.CAFile = opts.common.TLSOptions.CAFile
<add> config.CommonTLSOptions.CertFile = opts.common.TLSOptions.CertFile
<add> config.CommonTLSOptions.KeyFile = opts.common.TLSOptions.KeyFile
<ide> }
<ide>
<del> if configFile != "" {
<del> c, err := daemon.MergeDaemonConfigurations(config, flags, configFile)
<add> if opts.configFile != "" {
<add> c, err := daemon.MergeDaemonConfigurations(config, flags, opts.configFile)
<ide> if err != nil {
<del> if flags.IsSet(daemonConfigFileFlag) || !os.IsNotExist(err) {
<del> return nil, fmt.Errorf("unable to configure the Docker daemon with file %s: %v\n", configFile, err)
<add> if flags.Changed(flagDaemonConfigFile) || !os.IsNotExist(err) {
<add> return nil, fmt.Errorf("unable to configure the Docker daemon with file %s: %v\n", opts.configFile, err)
<ide> }
<ide> }
<ide> // the merged configuration can be nil if the config file didn't exist.
<ide> func loadDaemonCliConfig(config *daemon.Config, flags *flag.FlagSet, commonConfi
<ide>
<ide> // Regardless of whether the user sets it to true or false, if they
<ide> // specify TLSVerify at all then we need to turn on TLS
<del> if config.IsValueSet(cliflags.TLSVerifyKey) {
<add> if config.IsValueSet(cliflags.FlagTLSVerify) {
<ide> config.TLS = true
<ide> }
<ide>
<ide><path>cmd/dockerd/docker.go
<ide> package main
<ide>
<ide> import (
<ide> "fmt"
<del> "os"
<ide>
<ide> "github.com/Sirupsen/logrus"
<add> "github.com/docker/docker/cli"
<add> cliflags "github.com/docker/docker/cli/flags"
<add> "github.com/docker/docker/daemon"
<ide> "github.com/docker/docker/dockerversion"
<del> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/pkg/reexec"
<ide> "github.com/docker/docker/pkg/term"
<ide> "github.com/docker/docker/utils"
<add> "github.com/spf13/cobra"
<add> "github.com/spf13/pflag"
<ide> )
<ide>
<del>var (
<del> daemonCli = NewDaemonCli()
<del> flHelp = flag.Bool([]string{"h", "-help"}, false, "Print usage")
<del> flVersion = flag.Bool([]string{"v", "-version"}, false, "Print version information and quit")
<del>)
<add>type daemonOptions struct {
<add> version bool
<add> configFile string
<add> daemonConfig *daemon.Config
<add> common *cliflags.CommonOptions
<add> flags *pflag.FlagSet
<add>}
<ide>
<del>func main() {
<del> if reexec.Init() {
<del> return
<add>func newDaemonCommand() *cobra.Command {
<add> opts := daemonOptions{
<add> daemonConfig: daemon.NewConfig(),
<add> common: cliflags.NewCommonOptions(),
<ide> }
<ide>
<del> // Set terminal emulation based on platform as required.
<del> _, stdout, stderr := term.StdStreams()
<del>
<del> logrus.SetOutput(stderr)
<del>
<del> flag.Merge(flag.CommandLine, daemonCli.commonFlags.FlagSet)
<del>
<del> flag.Usage = func() {
<del> fmt.Fprint(stdout, "Usage: dockerd [OPTIONS]\n\n")
<del> fmt.Fprint(stdout, "A self-sufficient runtime for containers.\n\nOptions:\n")
<del>
<del> flag.CommandLine.SetOutput(stdout)
<del> flag.PrintDefaults()
<del> }
<del> flag.CommandLine.ShortUsage = func() {
<del> fmt.Fprint(stderr, "\nUsage:\tdockerd [OPTIONS]\n")
<add> cmd := &cobra.Command{
<add> Use: "dockerd [OPTIONS]",
<add> Short: "A self-sufficient runtime for containers.",
<add> SilenceUsage: true,
<add> SilenceErrors: true,
<add> Args: cli.NoArgs,
<add> RunE: func(cmd *cobra.Command, args []string) error {
<add> opts.flags = cmd.Flags()
<add> return runDaemon(opts)
<add> },
<ide> }
<add> // TODO: SetUsageTemplate, SetHelpTemplate, SetFlagErrorFunc
<ide>
<del> if err := flag.CommandLine.ParseFlags(os.Args[1:], false); err != nil {
<del> os.Exit(1)
<del> }
<add> flags := cmd.Flags()
<add> flags.BoolP("help", "h", false, "Print usage")
<add> flags.MarkShorthandDeprecated("help", "please use --help")
<add> flags.BoolVarP(&opts.version, "version", "v", false, "Print version information and quit")
<add> flags.StringVar(&opts.configFile, flagDaemonConfigFile, defaultDaemonConfigFile, "Daemon configuration file")
<add> opts.common.InstallFlags(flags)
<add> opts.daemonConfig.InstallFlags(flags)
<ide>
<del> if *flVersion {
<del> showVersion()
<del> return
<del> }
<add> return cmd
<add>}
<ide>
<del> if *flHelp {
<del> // if global flag --help is present, regardless of what other options and commands there are,
<del> // just print the usage.
<del> flag.Usage()
<del> return
<add>func runDaemon(opts daemonOptions) error {
<add> if opts.version {
<add> showVersion()
<add> return nil
<ide> }
<ide>
<ide> // On Windows, this may be launching as a service or with an option to
<ide> func main() {
<ide> logrus.Fatal(err)
<ide> }
<ide>
<del> if !stop {
<del> err = daemonCli.start()
<del> notifyShutdown(err)
<del> if err != nil {
<del> logrus.Fatal(err)
<del> }
<add> if stop {
<add> return nil
<ide> }
<add>
<add> err = NewDaemonCli().start(opts)
<add> notifyShutdown(err)
<add> return err
<ide> }
<ide>
<ide> func showVersion() {
<ide> func showVersion() {
<ide> fmt.Printf("Docker version %s, build %s\n", dockerversion.Version, dockerversion.GitCommit)
<ide> }
<ide> }
<add>
<add>func main() {
<add> if reexec.Init() {
<add> return
<add> }
<add>
<add> // Set terminal emulation based on platform as required.
<add> _, stdout, stderr := term.StdStreams()
<add> logrus.SetOutput(stderr)
<add>
<add> cmd := newDaemonCommand()
<add> cmd.SetOutput(stdout)
<add> if err := cmd.Execute(); err != nil {
<add> logrus.Fatal(err)
<add> }
<add>}
<ide><path>daemon/config.go
<ide> import (
<ide> "fmt"
<ide> "io"
<ide> "io/ioutil"
<add> "runtime"
<ide> "strings"
<ide> "sync"
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/opts"
<ide> "github.com/docker/docker/pkg/discovery"
<del> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/registry"
<ide> "github.com/imdario/mergo"
<add> "github.com/spf13/pflag"
<ide> )
<ide>
<ide> const (
<ide> type CommonConfig struct {
<ide> valuesSet map[string]interface{}
<ide> }
<ide>
<del>// InstallCommonFlags adds command-line options to the top-level flag parser for
<del>// the current process.
<del>// Subsequent calls to `flag.Parse` will populate config with values parsed
<del>// from the command-line.
<del>func (config *Config) InstallCommonFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<add>// InstallCommonFlags adds flags to the pflag.FlagSet to configure the daemon
<add>func (config *Config) InstallCommonFlags(flags *pflag.FlagSet) {
<ide> var maxConcurrentDownloads, maxConcurrentUploads int
<ide>
<del> config.ServiceOptions.InstallCliFlags(cmd, usageFn)
<del>
<del> cmd.Var(opts.NewNamedListOptsRef("storage-opts", &config.GraphOptions, nil), []string{"-storage-opt"}, usageFn("Storage driver options"))
<del> cmd.Var(opts.NewNamedListOptsRef("authorization-plugins", &config.AuthorizationPlugins, nil), []string{"-authorization-plugin"}, usageFn("Authorization plugins to load"))
<del> cmd.Var(opts.NewNamedListOptsRef("exec-opts", &config.ExecOptions, nil), []string{"-exec-opt"}, usageFn("Runtime execution options"))
<del> cmd.StringVar(&config.Pidfile, []string{"p", "-pidfile"}, defaultPidFile, usageFn("Path to use for daemon PID file"))
<del> cmd.StringVar(&config.Root, []string{"g", "-graph"}, defaultGraph, usageFn("Root of the Docker runtime"))
<del> cmd.BoolVar(&config.AutoRestart, []string{"#r", "#-restart"}, true, usageFn("--restart on the daemon has been deprecated in favor of --restart policies on docker run"))
<del> cmd.StringVar(&config.GraphDriver, []string{"s", "-storage-driver"}, "", usageFn("Storage driver to use"))
<del> cmd.IntVar(&config.Mtu, []string{"#mtu", "-mtu"}, 0, usageFn("Set the containers network MTU"))
<del> cmd.BoolVar(&config.RawLogs, []string{"-raw-logs"}, false, usageFn("Full timestamps without ANSI coloring"))
<add> config.ServiceOptions.InstallCliFlags(flags)
<add>
<add> flags.Var(opts.NewNamedListOptsRef("storage-opts", &config.GraphOptions, nil), "storage-opt", "Storage driver options")
<add> flags.Var(opts.NewNamedListOptsRef("authorization-plugins", &config.AuthorizationPlugins, nil), "authorization-plugin", "Authorization plugins to load")
<add> flags.Var(opts.NewNamedListOptsRef("exec-opts", &config.ExecOptions, nil), "exec-opt", "Runtime execution options")
<add> flags.StringVarP(&config.Pidfile, "pidfile", "p", defaultPidFile, "Path to use for daemon PID file")
<add> flags.StringVarP(&config.Root, "graph", "g", defaultGraph, "Root of the Docker runtime")
<add> flags.BoolVarP(&config.AutoRestart, "restart", "r", true, "--restart on the daemon has been deprecated in favor of --restart policies on docker run")
<add> flags.MarkDeprecated("restart", "Please use a restart policy on ducker run")
<add> flags.StringVarP(&config.GraphDriver, "storage-driver", "s", "", "Storage driver to use")
<add> flags.IntVar(&config.Mtu, "mtu", 0, "Set the containers network MTU")
<add> flags.BoolVar(&config.RawLogs, "raw-logs", false, "Full timestamps without ANSI coloring")
<ide> // FIXME: why the inconsistency between "hosts" and "sockets"?
<del> cmd.Var(opts.NewListOptsRef(&config.DNS, opts.ValidateIPAddress), []string{"#dns", "-dns"}, usageFn("DNS server to use"))
<del> cmd.Var(opts.NewNamedListOptsRef("dns-opts", &config.DNSOptions, nil), []string{"-dns-opt"}, usageFn("DNS options to use"))
<del> cmd.Var(opts.NewListOptsRef(&config.DNSSearch, opts.ValidateDNSSearch), []string{"-dns-search"}, usageFn("DNS search domains to use"))
<del> cmd.Var(opts.NewNamedListOptsRef("labels", &config.Labels, opts.ValidateLabel), []string{"-label"}, usageFn("Set key=value labels to the daemon"))
<del> cmd.StringVar(&config.LogConfig.Type, []string{"-log-driver"}, "json-file", usageFn("Default driver for container logs"))
<del> cmd.Var(opts.NewNamedMapOpts("log-opts", config.LogConfig.Config, nil), []string{"-log-opt"}, usageFn("Default log driver options for containers"))
<del> cmd.StringVar(&config.ClusterAdvertise, []string{"-cluster-advertise"}, "", usageFn("Address or interface name to advertise"))
<del> cmd.StringVar(&config.ClusterStore, []string{"-cluster-store"}, "", usageFn("URL of the distributed storage backend"))
<del> cmd.Var(opts.NewNamedMapOpts("cluster-store-opts", config.ClusterOpts, nil), []string{"-cluster-store-opt"}, usageFn("Set cluster store options"))
<del> cmd.StringVar(&config.CorsHeaders, []string{"-api-cors-header"}, "", usageFn("Set CORS headers in the remote API"))
<del> cmd.IntVar(&maxConcurrentDownloads, []string{"-max-concurrent-downloads"}, defaultMaxConcurrentDownloads, usageFn("Set the max concurrent downloads for each pull"))
<del> cmd.IntVar(&maxConcurrentUploads, []string{"-max-concurrent-uploads"}, defaultMaxConcurrentUploads, usageFn("Set the max concurrent uploads for each push"))
<add> flags.Var(opts.NewListOptsRef(&config.DNS, opts.ValidateIPAddress), "dns", "DNS server to use")
<add> flags.Var(opts.NewNamedListOptsRef("dns-opts", &config.DNSOptions, nil), "dns-opt", "DNS options to use")
<add> flags.Var(opts.NewListOptsRef(&config.DNSSearch, opts.ValidateDNSSearch), "dns-search", "DNS search domains to use")
<add> flags.Var(opts.NewNamedListOptsRef("labels", &config.Labels, opts.ValidateLabel), "label", "Set key=value labels to the daemon")
<add> flags.StringVar(&config.LogConfig.Type, "log-driver", "json-file", "Default driver for container logs")
<add> flags.Var(opts.NewNamedMapOpts("log-opts", config.LogConfig.Config, nil), "log-opt", "Default log driver options for containers")
<add> flags.StringVar(&config.ClusterAdvertise, "cluster-advertise", "", "Address or interface name to advertise")
<add> flags.StringVar(&config.ClusterStore, "cluster-store", "", "URL of the distributed storage backend")
<add> flags.Var(opts.NewNamedMapOpts("cluster-store-opts", config.ClusterOpts, nil), "cluster-store-opt", "Set cluster store options")
<add> flags.StringVar(&config.CorsHeaders, "api-cors-header", "", "Set CORS headers in the remote API")
<add> flags.IntVar(&maxConcurrentDownloads, "max-concurrent-downloads", defaultMaxConcurrentDownloads, "Set the max concurrent downloads for each pull")
<add> flags.IntVar(&maxConcurrentUploads, "max-concurrent-uploads", defaultMaxConcurrentUploads, "Set the max concurrent uploads for each push")
<ide>
<ide> cmd.StringVar(&config.SwarmDefaultAdvertiseAddr, []string{"-swarm-default-advertise-addr"}, "", usageFn("Set default address or interface for swarm advertised address"))
<ide>
<ide> func (config *Config) IsValueSet(name string) bool {
<ide> return ok
<ide> }
<ide>
<add>// NewConfig returns a new fully initialized Config struct
<add>func NewConfig() *Config {
<add> config := Config{}
<add> config.LogConfig.Config = make(map[string]string)
<add> config.ClusterOpts = make(map[string]string)
<add>
<add> if runtime.GOOS != "linux" {
<add> config.V2Only = true
<add> }
<add> return &config
<add>}
<add>
<ide> func parseClusterAdvertiseSettings(clusterStore, clusterAdvertise string) (string, error) {
<ide> if clusterAdvertise == "" {
<ide> return "", errDiscoveryDisabled
<ide> func parseClusterAdvertiseSettings(clusterStore, clusterAdvertise string) (strin
<ide> }
<ide>
<ide> // ReloadConfiguration reads the configuration in the host and reloads the daemon and server.
<del>func ReloadConfiguration(configFile string, flags *flag.FlagSet, reload func(*Config)) error {
<add>func ReloadConfiguration(configFile string, flags *pflag.FlagSet, reload func(*Config)) error {
<ide> logrus.Infof("Got signal to reload configuration, reloading from: %s", configFile)
<ide> newConfig, err := getConflictFreeConfiguration(configFile, flags)
<ide> if err != nil {
<ide> type boolValue interface {
<ide> // loads the file configuration in an isolated structure,
<ide> // and merges the configuration provided from flags on top
<ide> // if there are no conflicts.
<del>func MergeDaemonConfigurations(flagsConfig *Config, flags *flag.FlagSet, configFile string) (*Config, error) {
<add>func MergeDaemonConfigurations(flagsConfig *Config, flags *pflag.FlagSet, configFile string) (*Config, error) {
<ide> fileConfig, err := getConflictFreeConfiguration(configFile, flags)
<ide> if err != nil {
<ide> return nil, err
<ide> func MergeDaemonConfigurations(flagsConfig *Config, flags *flag.FlagSet, configF
<ide> // getConflictFreeConfiguration loads the configuration from a JSON file.
<ide> // It compares that configuration with the one provided by the flags,
<ide> // and returns an error if there are conflicts.
<del>func getConflictFreeConfiguration(configFile string, flags *flag.FlagSet) (*Config, error) {
<add>func getConflictFreeConfiguration(configFile string, flags *pflag.FlagSet) (*Config, error) {
<ide> b, err := ioutil.ReadFile(configFile)
<ide> if err != nil {
<ide> return nil, err
<ide> func getConflictFreeConfiguration(configFile string, flags *flag.FlagSet) (*Conf
<ide> }
<ide> if len(namedOptions) > 0 {
<ide> // set also default for mergeVal flags that are boolValue at the same time.
<del> flags.VisitAll(func(f *flag.Flag) {
<add> flags.VisitAll(func(f *pflag.Flag) {
<ide> if opt, named := f.Value.(opts.NamedOption); named {
<ide> v, set := namedOptions[opt.Name()]
<ide> _, boolean := f.Value.(boolValue)
<ide> func configValuesSet(config map[string]interface{}) map[string]interface{} {
<ide> // findConfigurationConflicts iterates over the provided flags searching for
<ide> // duplicated configurations and unknown keys. It returns an error with all the conflicts if
<ide> // it finds any.
<del>func findConfigurationConflicts(config map[string]interface{}, flags *flag.FlagSet) error {
<add>func findConfigurationConflicts(config map[string]interface{}, flags *pflag.FlagSet) error {
<ide> // 1. Search keys from the file that we don't recognize as flags.
<ide> unknownKeys := make(map[string]interface{})
<ide> for key, value := range config {
<ide> func findConfigurationConflicts(config map[string]interface{}, flags *flag.FlagS
<ide> // 2. Discard values that implement NamedOption.
<ide> // Their configuration name differs from their flag name, like `labels` and `label`.
<ide> if len(unknownKeys) > 0 {
<del> unknownNamedConflicts := func(f *flag.Flag) {
<add> unknownNamedConflicts := func(f *pflag.Flag) {
<ide> if namedOption, ok := f.Value.(opts.NamedOption); ok {
<ide> if _, valid := unknownKeys[namedOption.Name()]; valid {
<ide> delete(unknownKeys, namedOption.Name())
<ide> func findConfigurationConflicts(config map[string]interface{}, flags *flag.FlagS
<ide> }
<ide>
<ide> // 3. Search keys that are present as a flag and as a file option.
<del> duplicatedConflicts := func(f *flag.Flag) {
<add> duplicatedConflicts := func(f *pflag.Flag) {
<ide> // search option name in the json configuration payload if the value is a named option
<ide> if namedOption, ok := f.Value.(opts.NamedOption); ok {
<ide> if optsValue, ok := config[namedOption.Name()]; ok {
<ide> conflicts = append(conflicts, printConflict(namedOption.Name(), f.Value.String(), optsValue))
<ide> }
<ide> } else {
<del> // search flag name in the json configuration payload without trailing dashes
<del> for _, name := range f.Names {
<add> // search flag name in the json configuration payload
<add> for _, name := range []string{f.Name, f.Shorthand} {
<ide> name = strings.TrimLeft(name, "-")
<ide>
<ide> if value, ok := config[name]; ok {
<ide><path>daemon/config_experimental.go
<ide>
<ide> package daemon
<ide>
<del>import flag "github.com/docker/docker/pkg/mflag"
<add>import (
<add> "github.com/spf13/pflag"
<add>)
<ide>
<del>func (config *Config) attachExperimentalFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<add>func (config *Config) attachExperimentalFlags(cmd *pflag.FlagSet) {
<ide> }
<ide><path>daemon/config_solaris.go
<ide> package daemon
<ide>
<ide> import (
<del> flag "github.com/docker/docker/pkg/mflag"
<add> "github.com/spf13/pflag"
<ide> )
<ide>
<ide> var (
<ide> type bridgeConfig struct {
<ide>
<ide> // InstallFlags adds command-line options to the top-level flag parser for
<ide> // the current process.
<del>// Subsequent calls to `flag.Parse` will populate config with values parsed
<del>// from the command-line.
<del>func (config *Config) InstallFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<add>func (config *Config) InstallFlags(flags *pflag.FlagSet) {
<ide> // First handle install flags which are consistent cross-platform
<del> config.InstallCommonFlags(cmd, usageFn)
<add> config.InstallCommonFlags(flags)
<ide>
<ide> // Then platform-specific install flags
<del> config.attachExperimentalFlags(cmd, usageFn)
<add> config.attachExperimentalFlags(flags)
<ide> }
<ide>
<ide> // GetExecRoot returns the user configured Exec-root
<ide><path>daemon/config_stub.go
<ide>
<ide> package daemon
<ide>
<del>import flag "github.com/docker/docker/pkg/mflag"
<add>import (
<add> "github.com/spf13/pflag"
<add>)
<ide>
<del>func (config *Config) attachExperimentalFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<add>func (config *Config) attachExperimentalFlags(cmd *pflag.FlagSet) {
<ide> }
<ide><path>daemon/config_unix.go
<ide> import (
<ide> "net"
<ide>
<ide> "github.com/docker/docker/opts"
<del> flag "github.com/docker/docker/pkg/mflag"
<ide> runconfigopts "github.com/docker/docker/runconfig/opts"
<ide> "github.com/docker/engine-api/types"
<del> "github.com/docker/go-units"
<add> units "github.com/docker/go-units"
<add> "github.com/spf13/pflag"
<ide> )
<ide>
<ide> var (
<ide> type bridgeConfig struct {
<ide> InterContainerCommunication bool `json:"icc,omitempty"`
<ide> }
<ide>
<del>// InstallFlags adds command-line options to the top-level flag parser for
<del>// the current process.
<del>// Subsequent calls to `flag.Parse` will populate config with values parsed
<del>// from the command-line.
<del>func (config *Config) InstallFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<add>// InstallFlags adds flags to the pflag.FlagSet to configure the daemon
<add>func (config *Config) InstallFlags(flags *pflag.FlagSet) {
<ide> // First handle install flags which are consistent cross-platform
<del> config.InstallCommonFlags(cmd, usageFn)
<add> config.InstallCommonFlags(flags)
<ide>
<del> // Then platform-specific install flags
<del> cmd.BoolVar(&config.EnableSelinuxSupport, []string{"-selinux-enabled"}, false, usageFn("Enable selinux support"))
<del> cmd.StringVar(&config.SocketGroup, []string{"G", "-group"}, "docker", usageFn("Group for the unix socket"))
<ide> config.Ulimits = make(map[string]*units.Ulimit)
<del> cmd.Var(runconfigopts.NewUlimitOpt(&config.Ulimits), []string{"-default-ulimit"}, usageFn("Default ulimits for containers"))
<del> cmd.BoolVar(&config.bridgeConfig.EnableIPTables, []string{"#iptables", "-iptables"}, true, usageFn("Enable addition of iptables rules"))
<del> cmd.BoolVar(&config.bridgeConfig.EnableIPForward, []string{"#ip-forward", "-ip-forward"}, true, usageFn("Enable net.ipv4.ip_forward"))
<del> cmd.BoolVar(&config.bridgeConfig.EnableIPMasq, []string{"-ip-masq"}, true, usageFn("Enable IP masquerading"))
<del> cmd.BoolVar(&config.bridgeConfig.EnableIPv6, []string{"-ipv6"}, false, usageFn("Enable IPv6 networking"))
<del> cmd.StringVar(&config.ExecRoot, []string{"-exec-root"}, defaultExecRoot, usageFn("Root directory for execution state files"))
<del> cmd.StringVar(&config.bridgeConfig.IP, []string{"#bip", "-bip"}, "", usageFn("Specify network bridge IP"))
<del> cmd.StringVar(&config.bridgeConfig.Iface, []string{"b", "-bridge"}, "", usageFn("Attach containers to a network bridge"))
<del> cmd.StringVar(&config.bridgeConfig.FixedCIDR, []string{"-fixed-cidr"}, "", usageFn("IPv4 subnet for fixed IPs"))
<del> cmd.StringVar(&config.bridgeConfig.FixedCIDRv6, []string{"-fixed-cidr-v6"}, "", usageFn("IPv6 subnet for fixed IPs"))
<del> cmd.Var(opts.NewIPOpt(&config.bridgeConfig.DefaultGatewayIPv4, ""), []string{"-default-gateway"}, usageFn("Container default gateway IPv4 address"))
<del> cmd.Var(opts.NewIPOpt(&config.bridgeConfig.DefaultGatewayIPv6, ""), []string{"-default-gateway-v6"}, usageFn("Container default gateway IPv6 address"))
<del> cmd.BoolVar(&config.bridgeConfig.InterContainerCommunication, []string{"#icc", "-icc"}, true, usageFn("Enable inter-container communication"))
<del> cmd.Var(opts.NewIPOpt(&config.bridgeConfig.DefaultIP, "0.0.0.0"), []string{"#ip", "-ip"}, usageFn("Default IP when binding container ports"))
<del> cmd.BoolVar(&config.bridgeConfig.EnableUserlandProxy, []string{"-userland-proxy"}, true, usageFn("Use userland proxy for loopback traffic"))
<del> cmd.BoolVar(&config.EnableCors, []string{"#api-enable-cors", "#-api-enable-cors"}, false, usageFn("Enable CORS headers in the remote API, this is deprecated by --api-cors-header"))
<del> cmd.StringVar(&config.CgroupParent, []string{"-cgroup-parent"}, "", usageFn("Set parent cgroup for all containers"))
<del> cmd.StringVar(&config.RemappedRoot, []string{"-userns-remap"}, "", usageFn("User/Group setting for user namespaces"))
<del> cmd.StringVar(&config.ContainerdAddr, []string{"-containerd"}, "", usageFn("Path to containerd socket"))
<del> cmd.BoolVar(&config.LiveRestoreEnabled, []string{"-live-restore"}, false, usageFn("Enable live restore of docker when containers are still running"))
<ide> config.Runtimes = make(map[string]types.Runtime)
<del> cmd.Var(runconfigopts.NewNamedRuntimeOpt("runtimes", &config.Runtimes, stockRuntimeName), []string{"-add-runtime"}, usageFn("Register an additional OCI compatible runtime"))
<del> cmd.StringVar(&config.DefaultRuntime, []string{"-default-runtime"}, stockRuntimeName, usageFn("Default OCI runtime for containers"))
<del> cmd.IntVar(&config.OOMScoreAdjust, []string{"-oom-score-adjust"}, -500, usageFn("Set the oom_score_adj for the daemon"))
<ide>
<del> config.attachExperimentalFlags(cmd, usageFn)
<add> // Then platform-specific install flags
<add> flags.BoolVar(&config.EnableSelinuxSupport, "selinux-enabled", false, "Enable selinux support")
<add> flags.Var(runconfigopts.NewUlimitOpt(&config.Ulimits), "default-ulimit", "Default ulimits for containers")
<add> flags.BoolVar(&config.bridgeConfig.EnableIPTables, "iptables", true, "Enable addition of iptables rules")
<add> flags.BoolVar(&config.bridgeConfig.EnableIPForward, "ip-forward", true, "Enable net.ipv4.ip_forward")
<add> flags.BoolVar(&config.bridgeConfig.EnableIPMasq, "ip-masq", true, "Enable IP masquerading")
<add> flags.BoolVar(&config.bridgeConfig.EnableIPv6, "ipv6", false, "Enable IPv6 networking")
<add> flags.StringVar(&config.ExecRoot, "exec-root", defaultExecRoot, "Root directory for execution state files")
<add> flags.StringVar(&config.bridgeConfig.IP, "bip", "", "Specify network bridge IP")
<add> flags.StringVarP(&config.bridgeConfig.Iface, "bridge", "b", "", "Attach containers to a network bridge")
<add> flags.StringVar(&config.bridgeConfig.FixedCIDR, "fixed-cidr", "", "IPv4 subnet for fixed IPs")
<add> flags.StringVar(&config.bridgeConfig.FixedCIDRv6, "fixed-cidr-v6", "", "IPv6 subnet for fixed IPs")
<add> flags.Var(opts.NewIPOpt(&config.bridgeConfig.DefaultGatewayIPv4, ""), "default-gateway", "Container default gateway IPv4 address")
<add> flags.Var(opts.NewIPOpt(&config.bridgeConfig.DefaultGatewayIPv6, ""), "default-gateway-v6", "Container default gateway IPv6 address")
<add> flags.BoolVar(&config.bridgeConfig.InterContainerCommunication, "icc", true, "Enable inter-container communication")
<add> flags.Var(opts.NewIPOpt(&config.bridgeConfig.DefaultIP, "0.0.0.0"), "ip", "Default IP when binding container ports")
<add> flags.BoolVar(&config.bridgeConfig.EnableUserlandProxy, "userland-proxy", true, "Use userland proxy for loopback traffic")
<add> flags.BoolVar(&config.EnableCors, "api-enable-cors", false, "Enable CORS headers in the remote API, this is deprecated by --api-cors-header")
<add> flags.MarkDeprecated("api-enable-cors", "Please use --api-cors-header")
<add> flags.StringVar(&config.CgroupParent, "cgroup-parent", "", "Set parent cgroup for all containers")
<add> flags.StringVar(&config.RemappedRoot, "userns-remap", "", "User/Group setting for user namespaces")
<add> flags.StringVar(&config.ContainerdAddr, "containerd", "", "Path to containerd socket")
<add> flags.BoolVar(&config.LiveRestoreEnabled, "live-restore", false, "Enable live restore of docker when containers are still running")
<add> flags.Var(runconfigopts.NewNamedRuntimeOpt("runtimes", &config.Runtimes, stockRuntimeName), "add-runtime", "Register an additional OCI compatible runtime")
<add> flags.StringVar(&config.DefaultRuntime, "default-runtime", stockRuntimeName, "Default OCI runtime for containers")
<add> flags.IntVar(&config.OOMScoreAdjust, "oom-score-adjust", -500, "Set the oom_score_adj for the daemon")
<add>
<add> config.attachExperimentalFlags(flags)
<ide> }
<ide>
<ide> // GetRuntime returns the runtime path and arguments for a given
<ide><path>opts/ip.go
<ide> func (o *IPOpt) String() string {
<ide> }
<ide> return o.IP.String()
<ide> }
<add>
<add>// Type returns the type of the option
<add>func (o *IPOpt) Type() string {
<add> return "ip"
<add>}
<ide><path>registry/config.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/docker/docker/opts"
<del> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/reference"
<ide> registrytypes "github.com/docker/engine-api/types/registry"
<add> "github.com/spf13/pflag"
<ide> )
<ide>
<ide> // ServiceOptions holds command line options.
<ide> var lookupIP = net.LookupIP
<ide>
<ide> // InstallCliFlags adds command-line options to the top-level flag parser for
<ide> // the current process.
<del>func (options *ServiceOptions) InstallCliFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<add>func (options *ServiceOptions) InstallCliFlags(flags *pflag.FlagSet) {
<ide> mirrors := opts.NewNamedListOptsRef("registry-mirrors", &options.Mirrors, ValidateMirror)
<del> cmd.Var(mirrors, []string{"-registry-mirror"}, usageFn("Preferred Docker registry mirror"))
<del>
<ide> insecureRegistries := opts.NewNamedListOptsRef("insecure-registries", &options.InsecureRegistries, ValidateIndexName)
<del> cmd.Var(insecureRegistries, []string{"-insecure-registry"}, usageFn("Enable insecure registry communication"))
<ide>
<del> options.installCliPlatformFlags(cmd, usageFn)
<add> flags.Var(mirrors, "registry-mirror", "Preferred Docker registry mirror")
<add> flags.Var(insecureRegistries, "insecure-registry", "Enable insecure registry communication")
<add>
<add> options.installCliPlatformFlags(flags)
<ide> }
<ide>
<ide> // newServiceConfig returns a new instance of ServiceConfig
<ide><path>registry/config_unix.go
<ide> func cleanPath(s string) string {
<ide> }
<ide>
<ide> // installCliPlatformFlags handles any platform specific flags for the service.
<del>func (options *ServiceOptions) installCliPlatformFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<del> cmd.BoolVar(&options.V2Only, []string{"-disable-legacy-registry"}, false, usageFn("Disable contacting legacy registries"))
<add>func (options *ServiceOptions) installCliPlatformFlags(flags *flag.FlagSet) string) {
<add> flags.BoolVar(&options.V2Only, "disable-legacy-registry", false, "Disable contacting legacy registries")
<ide> }
<ide><path>registry/config_windows.go
<ide> func cleanPath(s string) string {
<ide> }
<ide>
<ide> // installCliPlatformFlags handles any platform specific flags for the service.
<del>func (options *ServiceOptions) installCliPlatformFlags(cmd *flag.FlagSet, usageFn func(string) string) {
<add>func (options *ServiceOptions) installCliPlatformFlags(flags *flag.FlagSet) string) {
<ide> // No Windows specific flags.
<ide> }
<ide><path>runconfig/opts/runtime.go
<ide> func (o *RuntimeOpt) GetMap() map[string]types.Runtime {
<ide>
<ide> return map[string]types.Runtime{}
<ide> }
<add>
<add>// Type returns the type of the option
<add>func (o *RuntimeOpt) Type() string {
<add> return "runtime"
<add>} | 15 |
Text | Text | update feluelle user in airflow inthewild users | 5e9589c685bcec769041e0a1692035778869f718 | <ide><path>INTHEWILD.md
<ide> Currently, **officially** using Airflow:
<ide> 1. [Arrive](https://www.arrive.com/)
<ide> 1. [Artelys](https://www.artelys.com/) [[@fortierq](https://github.com/fortierq)]
<ide> 1. [Asana](https://asana.com/) [[@chang](https://github.com/chang), [@dima-asana](https://github.com/dima-asana), [@jdavidheiser](https://github.com/jdavidheiser), [@ricardoandresrojas](https://github.com/ricardoandresrojas)]
<del>1. [Astronomer](https://www.astronomer.io) [[@schnie](https://github.com/schnie), [@ashb](https://github.com/ashb), [@kaxil](https://github.com/kaxil), [@dimberman](https://github.com/dimberman), [@andriisoldatenko](https://github.com/andriisoldatenko), [@ryw](https://github.com/ryw), [@ryanahamilton](https://github.com/ryanahamilton), [@jhtimmins](https://github.com/jhtimmins), [@vikramkoka](https://github.com/vikramkoka), [@jedcunningham](https://github.com/jedcunningham), [@BasPH](https://github.com/basph), [@ephraimbuddy](https://github.com/ephraimbuddy)]
<add>1. [Astronomer](https://www.astronomer.io) [[@schnie](https://github.com/schnie), [@ashb](https://github.com/ashb), [@kaxil](https://github.com/kaxil), [@dimberman](https://github.com/dimberman), [@andriisoldatenko](https://github.com/andriisoldatenko), [@ryw](https://github.com/ryw), [@ryanahamilton](https://github.com/ryanahamilton), [@jhtimmins](https://github.com/jhtimmins), [@vikramkoka](https://github.com/vikramkoka), [@jedcunningham](https://github.com/jedcunningham), [@BasPH](https://github.com/basph), [@ephraimbuddy](https://github.com/ephraimbuddy), [@feluelle](https://github.com/feluelle)]
<ide> 1. [Auth0](https://auth0.com) [[@scottypate](https://github.com/scottypate)], [[@dm03514](https://github.com/dm03514)], [[@karangale](https://github.com/karangale)]
<ide> 1. [Automattic](https://automattic.com/) [[@anandnalya](https://github.com/anandnalya), [@bperson](https://github.com/bperson), [@khrol](https://github.com/Khrol), [@xyu](https://github.com/xyu)]
<ide> 1. [Avesta Technologies](https://avestatechnologies.com) [[@TheRum](https://github.com/TheRum)]
<ide> Currently, **officially** using Airflow:
<ide> 1. [Tink](https://tink.com/) [[@tink-ab](https://github.com/tink-ab)]
<ide> 1. [TokenAnalyst](https://github.com/tokenanalyst) [[@simonohanlon101](https://github.com/simonohanlon101), [@ankitchiplunkar](https://github.com/ankitchiplunkar), [@sidshekhar](https://github.com/sidshekhar), [@sp6pe](https://github.com/sp6pe)]
<ide> 1. [Tokopedia](https://www.tokopedia.com/) [[@topedmaria](https://github.com/topedmaria)]
<del>1. [Trade Republic](https://traderepublic.com/) [[@feluelle](https://github.com/feluelle)]
<add>1. [Trade Republic](https://traderepublic.com/)
<ide> 1. [Trakken](https://www.trkkn.com/) [[@itroulli](https://github.com/itroulli), [@gthar](https://github.com/gthar), [@qulo](https://github.com/qulo), [@Oscar-Rod](https://github.com/Oscar-Rod), [@kondla](https://github.com/kondla), [@semuar](https://github.com/semuar), [@ManuelFreytag](https://github.com/ManuelFreytag)
<ide> 1. [Travix](https://www.travix.com/)
<ide> 1. [Trocafone](https://www.trocafone.com/) [[@idontdomath](https://github.com/idontdomath) & [@gseva](https://github.com/gseva) & [@ordonezf](https://github.com/ordonezf) & [@PalmaLeandro](https://github.com/PalmaLeandro)] | 1 |
Ruby | Ruby | remove delegation test | d1987846b3857f49db8c15f2d1ae8dc8a14d8a2f | <ide><path>activerecord/test/cases/relations_test.rb
<ide> def test_presence
<ide> assert !Post.all.respond_to?(:by_lifo)
<ide> end
<ide>
<del> class OMGTopic < ActiveRecord::Base
<del> self.table_name = 'topics'
<add> test "merge collapses wheres from the LHS only" do
<add> left = Post.where(title: "omg").where(comments_count: 1)
<add> right = Post.where(title: "wtf").where(title: "bbq")
<ide>
<del> def self.__omg__
<del> "omgtopic"
<del> end
<add> expected = [left.where_values[1]] + right.where_values
<add> merged = left.merge(right)
<add>
<add> assert_equal expected, merged.where_values
<add> assert !merged.to_sql.include?("omg")
<add> assert merged.to_sql.include?("wtf")
<add> assert merged.to_sql.include?("bbq")
<ide> end
<ide>
<del> test "delegations do not clash across classes" do
<del> begin
<del> class ::Array
<del> def __omg__
<del> "array"
<del> end
<del> end
<add> def test_merging_removes_rhs_bind_parameters
<add> left = Post.where(id: Arel::Nodes::BindParam.new('?'))
<add> column = Post.columns_hash['id']
<add> left.bind_values += [[column, 20]]
<add> right = Post.where(id: 10)
<ide>
<del> assert_equal "array", Topic.all.__omg__
<del> assert_equal "omgtopic", OMGTopic.all.__omg__
<del> ensure
<del> Array.send(:remove_method, :__omg__)
<del> end
<add> merged = left.merge(right)
<add> assert_equal [], merged.bind_values
<add> end
<add>
<add> def test_merging_keeps_lhs_bind_parameters
<add> column = Post.columns_hash['id']
<add> binds = [[column, 20]]
<add>
<add> right = Post.where(id: Arel::Nodes::BindParam.new('?'))
<add> right.bind_values += binds
<add> left = Post.where(id: 10)
<add>
<add> merged = left.merge(right)
<add> assert_equal binds, merged.bind_values
<add> end
<add>
<add> def test_merging_reorders_bind_params
<add> post = Post.first
<add> id_column = Post.columns_hash['id']
<add> title_column = Post.columns_hash['title']
<add>
<add> bv = Post.connection.substitute_at id_column, 0
<add>
<add> right = Post.where(id: bv)
<add> right.bind_values += [[id_column, post.id]]
<add>
<add> left = Post.where(title: bv)
<add> left.bind_values += [[title_column, post.title]]
<add>
<add> merged = left.merge(right)
<add> assert_equal post, merged.first
<ide> end
<ide> end | 1 |
Java | Java | support @cache* as merged composed annotations | 59c88eb3c0611aac802e162d53503ce71864baef | <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/SpringCacheAnnotationParser.java
<ide> import org.springframework.cache.interceptor.CacheOperation;
<ide> import org.springframework.cache.interceptor.CachePutOperation;
<ide> import org.springframework.cache.interceptor.CacheableOperation;
<add>import org.springframework.core.annotation.AnnotatedElementUtils;
<ide> import org.springframework.core.annotation.AnnotationUtils;
<ide> import org.springframework.util.ObjectUtils;
<ide> import org.springframework.util.StringUtils;
<ide> public Collection<CacheOperation> parseCacheAnnotations(Method method) {
<ide> protected Collection<CacheOperation> parseCacheAnnotations(DefaultCacheConfig cachingConfig, AnnotatedElement ae) {
<ide> Collection<CacheOperation> ops = null;
<ide>
<del> Collection<Cacheable> cacheables = getAnnotations(ae, Cacheable.class);
<del> if (cacheables != null) {
<add> Collection<Cacheable> cacheables = AnnotatedElementUtils.findAllMergedAnnotations(ae, Cacheable.class);
<add> if (!cacheables.isEmpty()) {
<ide> ops = lazyInit(ops);
<ide> for (Cacheable cacheable : cacheables) {
<ide> ops.add(parseCacheableAnnotation(ae, cachingConfig, cacheable));
<ide> }
<ide> }
<del> Collection<CacheEvict> evicts = getAnnotations(ae, CacheEvict.class);
<del> if (evicts != null) {
<add> Collection<CacheEvict> evicts = AnnotatedElementUtils.findAllMergedAnnotations(ae, CacheEvict.class);
<add> if (!evicts.isEmpty()) {
<ide> ops = lazyInit(ops);
<ide> for (CacheEvict evict : evicts) {
<ide> ops.add(parseEvictAnnotation(ae, cachingConfig, evict));
<ide> }
<ide> }
<del> Collection<CachePut> puts = getAnnotations(ae, CachePut.class);
<del> if (puts != null) {
<add> Collection<CachePut> puts = AnnotatedElementUtils.findAllMergedAnnotations(ae, CachePut.class);
<add> if (!puts.isEmpty()) {
<ide> ops = lazyInit(ops);
<ide> for (CachePut put : puts) {
<ide> ops.add(parsePutAnnotation(ae, cachingConfig, put));
<ide> }
<ide> }
<del> Collection<Caching> cachings = getAnnotations(ae, Caching.class);
<del> if (cachings != null) {
<add> Collection<Caching> cachings = AnnotatedElementUtils.findAllMergedAnnotations(ae, Caching.class);
<add> if (!cachings.isEmpty()) {
<ide> ops = lazyInit(ops);
<ide> for (Caching caching : cachings) {
<ide> ops.addAll(parseCachingAnnotation(ae, cachingConfig, caching));
<ide> DefaultCacheConfig getDefaultCacheConfig(Class<?> target) {
<ide> return new DefaultCacheConfig();
<ide> }
<ide>
<del> private <A extends Annotation> Collection<A> getAnnotations(AnnotatedElement ae, Class<A> annotationType) {
<del> Collection<A> anns = new ArrayList<A>(1);
<del>
<del> // look at raw annotation
<del> A ann = ae.getAnnotation(annotationType);
<del> if (ann != null) {
<del> anns.add(AnnotationUtils.synthesizeAnnotation(ann, ae));
<del> }
<del>
<del> // scan meta-annotations
<del> for (Annotation metaAnn : ae.getAnnotations()) {
<del> ann = metaAnn.annotationType().getAnnotation(annotationType);
<del> if (ann != null) {
<del> anns.add(AnnotationUtils.synthesizeAnnotation(ann, ae));
<del> }
<del> }
<del>
<del> return (!anns.isEmpty() ? anns : null);
<del> }
<del>
<ide> /**
<ide> * Validates the specified {@link CacheOperation}.
<ide> * <p>Throws an {@link IllegalStateException} if the state of the operation is
<ide><path>spring-context/src/test/java/org/springframework/cache/annotation/AnnotationCacheOperationSourceTests.java
<ide> /*
<del> * Copyright 2002-2015 the original author or authors.
<add> * Copyright 2002-2016 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import java.util.Collections;
<ide> import java.util.Iterator;
<ide>
<del>import org.junit.Ignore;
<ide> import org.junit.Rule;
<ide> import org.junit.Test;
<ide> import org.junit.rules.ExpectedException;
<ide> public void multipleStereotypes() throws Exception {
<ide> assertTrue(next.getCacheNames().contains("bar"));
<ide> }
<ide>
<del> // TODO [SPR-13475] Enable test once @Cache* is supported as a composed annotation.
<del> @Ignore("Disabled until SPR-13475 is resolved")
<ide> @Test
<ide> public void singleComposedAnnotation() throws Exception {
<del> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "singleComposed", 1);
<del> CacheOperation cacheOperation = ops.iterator().next();
<add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "singleComposed", 2);
<add> Iterator<CacheOperation> it = ops.iterator();
<add>
<add> CacheOperation cacheOperation = it.next();
<add> assertThat(cacheOperation, instanceOf(CacheableOperation.class));
<add> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("directly declared")));
<add> assertThat(cacheOperation.getKey(), equalTo(""));
<add>
<add> cacheOperation = it.next();
<ide> assertThat(cacheOperation, instanceOf(CacheableOperation.class));
<del> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("composed")));
<add> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("composedCache")));
<add> assertThat(cacheOperation.getKey(), equalTo("composedKey"));
<ide> }
<ide>
<del> // TODO [SPR-13475] Enable test once @Cache* is supported as a composed annotation.
<del> @Ignore("Disabled until SPR-13475 is resolved")
<ide> @Test
<ide> public void multipleComposedAnnotations() throws Exception {
<del> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "multipleComposed", 3);
<add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "multipleComposed", 4);
<ide> Iterator<CacheOperation> it = ops.iterator();
<ide>
<ide> CacheOperation cacheOperation = it.next();
<ide> assertThat(cacheOperation, instanceOf(CacheableOperation.class));
<add> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("directly declared")));
<add> assertThat(cacheOperation.getKey(), equalTo(""));
<add>
<add> cacheOperation = it.next();
<add> assertThat(cacheOperation, instanceOf(CacheableOperation.class));
<ide> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("composedCache")));
<add> assertThat(cacheOperation.getKey(), equalTo("composedKey"));
<ide>
<ide> cacheOperation = it.next();
<ide> assertThat(cacheOperation, instanceOf(CacheableOperation.class));
<ide> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("foo")));
<add> assertThat(cacheOperation.getKey(), equalTo(""));
<ide>
<ide> cacheOperation = it.next();
<ide> assertThat(cacheOperation, instanceOf(CacheEvictOperation.class));
<del> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("composedCache")));
<add> assertThat(cacheOperation.getCacheNames(), equalTo(Collections.singleton("composedCacheEvict")));
<add> assertThat(cacheOperation.getKey(), equalTo("composedEvictionKey"));
<ide> }
<ide>
<ide> @Test
<ide> public void singleStereotype() {
<ide> public void multipleStereotype() {
<ide> }
<ide>
<del> @ComposedCacheable("composed")
<add> @Cacheable("directly declared")
<add> @ComposedCacheable(cacheNames = "composedCache", key = "composedKey")
<ide> public void singleComposed() {
<ide> }
<ide>
<add> @Cacheable("directly declared")
<ide> @ComposedCacheable(cacheNames = "composedCache", key = "composedKey")
<ide> @CacheableFoo
<del> @ComposedCacheEvict(cacheNames = "composedCache", key = "composedKey")
<add> @ComposedCacheEvict(cacheNames = "composedCacheEvict", key = "composedEvictionKey")
<ide> public void multipleComposed() {
<ide> }
<ide>
<ide> public void multipleCacheConfig() {
<ide>
<ide> @Retention(RetentionPolicy.RUNTIME)
<ide> @Target(ElementType.TYPE)
<del> @CacheConfig(keyGenerator = "classKeyGenerator",
<del> cacheManager = "classCacheManager", cacheResolver = "classCacheResolver")
<add> @CacheConfig(keyGenerator = "classKeyGenerator", cacheManager = "classCacheManager", cacheResolver = "classCacheResolver")
<ide> public @interface CacheConfigFoo {
<ide> }
<ide>
<ide> @Retention(RetentionPolicy.RUNTIME)
<ide> @Target({ ElementType.METHOD, ElementType.TYPE })
<ide> @Cacheable(cacheNames = "shadowed cache name", key = "shadowed key")
<del> public @interface ComposedCacheable {
<add> @interface ComposedCacheable {
<ide>
<del> @AliasFor(annotation = Cacheable.class, attribute = "cacheNames")
<add> @AliasFor(annotation = Cacheable.class)
<ide> String[] value() default {};
<ide>
<del> @AliasFor(annotation = Cacheable.class, attribute = "cacheNames")
<add> @AliasFor(annotation = Cacheable.class)
<ide> String[] cacheNames() default {};
<ide>
<del> @AliasFor(annotation = Cacheable.class, attribute = "key")
<add> @AliasFor(annotation = Cacheable.class)
<ide> String key() default "";
<ide> }
<ide>
<ide> @Retention(RetentionPolicy.RUNTIME)
<ide> @Target({ ElementType.METHOD, ElementType.TYPE })
<ide> @CacheEvict(cacheNames = "shadowed cache name", key = "shadowed key")
<del> public @interface ComposedCacheEvict {
<add> @interface ComposedCacheEvict {
<ide>
<del> @AliasFor(annotation = Cacheable.class, attribute = "cacheNames")
<add> @AliasFor(annotation = CacheEvict.class)
<ide> String[] value() default {};
<ide>
<del> @AliasFor(annotation = Cacheable.class, attribute = "cacheNames")
<add> @AliasFor(annotation = CacheEvict.class)
<ide> String[] cacheNames() default {};
<ide>
<del> @AliasFor(annotation = Cacheable.class, attribute = "key")
<add> @AliasFor(annotation = CacheEvict.class)
<ide> String key() default "";
<ide> }
<ide> | 2 |
Go | Go | use opencontainers/go-digest package | 7a855799175b6b984886ef1cfa337d6df1d4c668 | <ide><path>api/types/reference/image_reference_test.go
<ide> package reference
<ide>
<ide> import (
<add> _ "crypto/sha256"
<ide> "testing"
<ide> )
<ide>
<ide><path>cli/command/image/trust.go
<ide> import (
<ide> "sort"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/cli/command"
<ide> "github.com/docker/docker/cli/trust"
<ide> import (
<ide> "github.com/docker/docker/registry"
<ide> "github.com/docker/notary/client"
<ide> "github.com/docker/notary/tuf/data"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide> func PushTrustedReference(cli *command.DockerCli, repoInfo *registry.RepositoryI
<ide> var pushResult types.PushResult
<ide> err := json.Unmarshal(*aux, &pushResult)
<ide> if err == nil && pushResult.Tag != "" {
<del> if dgst, err := digest.ParseDigest(pushResult.Digest); err == nil {
<add> if dgst, err := digest.Parse(pushResult.Digest); err == nil {
<ide> h, err := hex.DecodeString(dgst.Hex())
<ide> if err != nil {
<ide> target = nil
<ide><path>cli/command/service/trust.go
<ide> import (
<ide> "fmt"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> distreference "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/api/types/swarm"
<ide> "github.com/docker/docker/cli/command"
<ide> "github.com/docker/docker/cli/trust"
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<ide> "github.com/docker/notary/tuf/data"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/pkg/errors"
<ide> "golang.org/x/net/context"
<ide> )
<ide> func resolveServiceImageDigest(dockerCli *command.DockerCli, service *swarm.Serv
<ide> // could be parsed as a digest reference. Specifying an image ID
<ide> // is valid but not resolvable. There is no warning message for
<ide> // an image ID because it's valid to use one.
<del> if _, err := digest.ParseDigest(image); err == nil {
<add> if _, err := digest.Parse(image); err == nil {
<ide> return nil
<ide> }
<ide>
<ide><path>cli/compose/schema/bindata.go
<ide> type bintree struct {
<ide> Func func() (*asset, error)
<ide> Children map[string]*bintree
<ide> }
<add>
<ide> var _bintree = &bintree{nil, map[string]*bintree{
<ide> "data": &bintree{nil, map[string]*bintree{
<ide> "config_schema_v3.0.json": &bintree{dataConfig_schema_v30Json, map[string]*bintree{}},
<ide> func _filePath(dir, name string) string {
<ide> cannonicalName := strings.Replace(name, "\\", "/", -1)
<ide> return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
<ide> }
<del>
<ide><path>daemon/cluster/cluster.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> distreference "github.com/docker/distribution/reference"
<ide> apierrors "github.com/docker/docker/api/errors"
<ide> apitypes "github.com/docker/docker/api/types"
<ide> import (
<ide> "github.com/docker/swarmkit/manager/encryption"
<ide> swarmnode "github.com/docker/swarmkit/node"
<ide> "github.com/docker/swarmkit/protobuf/ptypes"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/pkg/errors"
<ide> "golang.org/x/net/context"
<ide> )
<ide> func (c *Cluster) GetServices(options apitypes.ServiceListOptions) ([]types.Serv
<ide> // TODO(nishanttotla): After the packages converge, the function must
<ide> // convert distreference.Named -> distreference.Canonical, and the logic simplified.
<ide> func (c *Cluster) imageWithDigestString(ctx context.Context, image string, authConfig *apitypes.AuthConfig) (string, error) {
<del> if _, err := digest.ParseDigest(image); err == nil {
<add> if _, err := digest.Parse(image); err == nil {
<ide> return "", errors.New("image reference is an image ID")
<ide> }
<ide> ref, err := distreference.ParseNamed(image)
<ide><path>daemon/cluster/convert/node.go
<ide> func NodeFromGRPC(n swarmapi.Node) types.Node {
<ide> node := types.Node{
<ide> ID: n.ID,
<ide> Spec: types.NodeSpec{
<del> Role: types.NodeRole(strings.ToLower(n.Spec.Role.String())),
<add> Role: types.NodeRole(strings.ToLower(n.Spec.DesiredRole.String())),
<ide> Availability: types.NodeAvailability(strings.ToLower(n.Spec.Availability.String())),
<ide> },
<ide> Status: types.NodeStatus{
<ide> func NodeSpecToGRPC(s types.NodeSpec) (swarmapi.NodeSpec, error) {
<ide> },
<ide> }
<ide> if role, ok := swarmapi.NodeRole_value[strings.ToUpper(string(s.Role))]; ok {
<del> spec.Role = swarmapi.NodeRole(role)
<add> spec.DesiredRole = swarmapi.NodeRole(role)
<ide> } else {
<ide> return swarmapi.NodeSpec{}, fmt.Errorf("invalid Role: %q", s.Role)
<ide> }
<ide><path>daemon/cluster/executor/container/adapter.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/api/types/backend"
<ide> containertypes "github.com/docker/docker/api/types/container"
<ide> import (
<ide> "github.com/docker/swarmkit/api"
<ide> "github.com/docker/swarmkit/log"
<ide> "github.com/docker/swarmkit/protobuf/ptypes"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> "golang.org/x/time/rate"
<ide> )
<ide> func (c *containerAdapter) pullImage(ctx context.Context) error {
<ide> spec := c.container.spec()
<ide>
<ide> // Skip pulling if the image is referenced by image ID.
<del> if _, err := digest.ParseDigest(spec.Image); err == nil {
<add> if _, err := digest.Parse(spec.Image); err == nil {
<ide> return nil
<ide> }
<ide>
<ide><path>daemon/disk_usage.go
<ide> import (
<ide> "fmt"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/api/types/filters"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/directory"
<ide> "github.com/docker/docker/volume"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func (daemon *Daemon) getLayerRefs() map[layer.ChainID]int {
<ide><path>daemon/image_pull.go
<ide> import (
<ide> "strings"
<ide>
<ide> dist "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/builder"
<ide> "github.com/docker/docker/distribution"
<ide> progressutils "github.com/docker/docker/distribution/utils"
<ide> "github.com/docker/docker/pkg/progress"
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide> func (daemon *Daemon) PullImage(ctx context.Context, image, tag string, metaHead
<ide> if tag != "" {
<ide> // The "tag" could actually be a digest.
<ide> var dgst digest.Digest
<del> dgst, err = digest.ParseDigest(tag)
<add> dgst, err = digest.Parse(tag)
<ide> if err == nil {
<ide> ref, err = reference.WithDigest(reference.TrimNamed(ref), dgst)
<ide> } else {
<ide><path>daemon/prune.go
<ide> import (
<ide> "regexp"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/api/types/filters"
<ide> "github.com/docker/docker/image"
<ide> import (
<ide> "github.com/docker/docker/runconfig"
<ide> "github.com/docker/docker/volume"
<ide> "github.com/docker/libnetwork"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // ContainersPrune removes unused containers
<ide><path>distribution/config.go
<ide> import (
<ide> "runtime"
<ide>
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/distribution/metadata"
<ide> import (
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<ide> "github.com/docker/libtrust"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide><path>distribution/metadata/v2_metadata_service.go
<ide> import (
<ide> "encoding/json"
<ide> "errors"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/layer"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // V2MetadataService maps layer IDs to a set of known metadata for
<ide><path>distribution/metadata/v2_metadata_service_test.go
<ide> import (
<ide> "reflect"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/layer"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func TestV2MetadataService(t *testing.T) {
<ide><path>distribution/pull.go
<ide> import (
<ide> "fmt"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api"
<ide> "github.com/docker/docker/distribution/metadata"
<ide> "github.com/docker/docker/pkg/progress"
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide><path>distribution/pull_v2.go
<ide> import (
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest/manifestlist"
<ide> "github.com/docker/distribution/manifest/schema1"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> import (
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide> func (ld *v2LayerDescriptor) Download(ctx context.Context, progressOutput progre
<ide> defer reader.Close()
<ide>
<ide> if ld.verifier == nil {
<del> ld.verifier, err = digest.NewDigestVerifier(ld.digest)
<del> if err != nil {
<del> return nil, 0, xfer.DoNotRetry{Err: err}
<del> }
<add> ld.verifier = ld.digest.Verifier()
<ide> }
<ide>
<ide> _, err = io.Copy(tmpFile, io.TeeReader(reader, ld.verifier))
<ide> func (p *v2Puller) pullSchema2Config(ctx context.Context, dgst digest.Digest) (c
<ide> }
<ide>
<ide> // Verify image config digest
<del> verifier, err := digest.NewDigestVerifier(dgst)
<del> if err != nil {
<del> return nil, err
<del> }
<add> verifier := dgst.Verifier()
<ide> if _, err := verifier.Write(configJSON); err != nil {
<ide> return nil, err
<ide> }
<ide> func schema2ManifestDigest(ref reference.Named, mfst distribution.Manifest) (dig
<ide>
<ide> // If pull by digest, then verify the manifest digest.
<ide> if digested, isDigested := ref.(reference.Canonical); isDigested {
<del> verifier, err := digest.NewDigestVerifier(digested.Digest())
<del> if err != nil {
<del> return "", err
<del> }
<add> verifier := digested.Digest().Verifier()
<ide> if _, err := verifier.Write(canonical); err != nil {
<ide> return "", err
<ide> }
<ide> func verifySchema1Manifest(signedManifest *schema1.SignedManifest, ref reference
<ide> // important to do this first, before any other content validation. If the
<ide> // digest cannot be verified, don't even bother with those other things.
<ide> if digested, isCanonical := ref.(reference.Canonical); isCanonical {
<del> verifier, err := digest.NewDigestVerifier(digested.Digest())
<del> if err != nil {
<del> return nil, err
<del> }
<add> verifier := digested.Digest().Verifier()
<ide> if _, err := verifier.Write(signedManifest.Canonical); err != nil {
<ide> return nil, err
<ide> }
<ide><path>distribution/pull_v2_test.go
<ide> import (
<ide> "strings"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest/schema1"
<ide> "github.com/docker/docker/reference"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // TestFixManifestLayers checks that fixManifestLayers removes a duplicate
<ide><path>distribution/push_v1.go
<ide> import (
<ide> "sync"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/registry/client/transport"
<ide> "github.com/docker/docker/distribution/metadata"
<ide> "github.com/docker/docker/dockerversion"
<ide> import (
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide><path>distribution/push_v2.go
<ide> import (
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest/schema1"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> distreference "github.com/docker/distribution/reference"
<ide> import (
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> const (
<ide> func (pd *v2PushDescriptor) uploadUsingSession(
<ide> return distribution.Descriptor{}, fmt.Errorf("unsupported layer media type %s", m)
<ide> }
<ide>
<del> digester := digest.Canonical.New()
<add> digester := digest.Canonical.Digester()
<ide> tee := io.TeeReader(reader, digester.Hash())
<ide>
<ide> nn, err := layerUpload.ReadFrom(tee)
<ide><path>distribution/push_v2_test.go
<ide> import (
<ide>
<ide> "github.com/docker/distribution"
<ide> "github.com/docker/distribution/context"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> distreference "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/distribution/metadata"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/progress"
<ide> "github.com/docker/docker/reference"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func TestGetRepositoryMountCandidates(t *testing.T) {
<ide><path>distribution/xfer/download_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/progress"
<add> "github.com/opencontainers/go-digest"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide><path>image/fs.go
<ide> import (
<ide> "sync"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/pkg/ioutils"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // DigestWalkFunc is function called by StoreBackend.Walk
<ide><path>image/fs_test.go
<ide> import (
<ide> "path/filepath"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func TestFSGetSet(t *testing.T) {
<ide><path>image/image.go
<ide> import (
<ide> "io"
<ide> "time"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types/container"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // ID is the content-addressable ID of an image.
<ide><path>image/store.go
<ide> import (
<ide> "sync"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<add> "github.com/docker/distribution/digestset"
<ide> "github.com/docker/docker/layer"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // Store is an interface for creating and accessing images
<ide> type store struct {
<ide> ls LayerGetReleaser
<ide> images map[ID]*imageMeta
<ide> fs StoreBackend
<del> digestSet *digest.Set
<add> digestSet *digestset.Set
<ide> }
<ide>
<ide> // NewImageStore returns new store object for given layer store
<ide> func NewImageStore(fs StoreBackend, ls LayerGetReleaser) (Store, error) {
<ide> ls: ls,
<ide> images: make(map[ID]*imageMeta),
<ide> fs: fs,
<del> digestSet: digest.NewSet(),
<add> digestSet: digestset.NewSet(),
<ide> }
<ide>
<ide> // load all current images and retain layers
<ide> func (is *store) Search(term string) (ID, error) {
<ide>
<ide> dgst, err := is.digestSet.Lookup(term)
<ide> if err != nil {
<del> if err == digest.ErrDigestNotFound {
<add> if err == digestset.ErrDigestNotFound {
<ide> err = fmt.Errorf("No such image: %s", term)
<ide> }
<ide> return "", err
<ide><path>image/store_test.go
<ide> import (
<ide> "os"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/layer"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func TestRestore(t *testing.T) {
<ide><path>image/tarexport/load.go
<ide> import (
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/image/v1"
<ide> "github.com/docker/docker/layer"
<ide> import (
<ide> "github.com/docker/docker/pkg/symlink"
<ide> "github.com/docker/docker/pkg/system"
<ide> "github.com/docker/docker/reference"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func (l *tarexporter) Load(inTar io.ReadCloser, outStream io.Writer, quiet bool) error {
<ide><path>image/tarexport/save.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/image/v1"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/archive"
<ide> "github.com/docker/docker/pkg/system"
<ide> "github.com/docker/docker/reference"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/pkg/errors"
<ide> )
<ide>
<ide><path>image/v1/imagev1.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types/versions"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/stringid"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // noFallbackMinVersion is the minimum version for which v1compatibility
<ide><path>integration-cli/docker_cli_by_digest_test.go
<ide> import (
<ide> "regexp"
<ide> "strings"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest/schema1"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/integration-cli/checker"
<ide> "github.com/docker/docker/pkg/stringutils"
<ide> "github.com/go-check/check"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> var (
<ide><path>integration-cli/docker_cli_pull_local_test.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest"
<ide> "github.com/docker/distribution/manifest/manifestlist"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> "github.com/docker/docker/integration-cli/checker"
<ide> "github.com/go-check/check"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // testPullImageWithAliases pulls a specific image tag and verifies that any aliases (i.e., other
<ide><path>integration-cli/docker_cli_pull_test.go
<ide> import (
<ide> "sync"
<ide> "time"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/integration-cli/checker"
<ide> "github.com/go-check/check"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // TestPullFromCentralRegistry pulls an image from the central registry and verifies that the client
<ide> func (s *DockerHubPullSuite) TestPullFromCentralRegistry(c *check.C) {
<ide> matches := regexp.MustCompile(`Digest: (.+)\n`).FindAllStringSubmatch(out, -1)
<ide> c.Assert(len(matches), checker.Equals, 1, check.Commentf("expected exactly one image digest in the output"))
<ide> c.Assert(len(matches[0]), checker.Equals, 2, check.Commentf("unexpected number of submatches for the digest"))
<del> _, err := digest.ParseDigest(matches[0][1])
<add> _, err := digest.Parse(matches[0][1])
<ide> c.Check(err, checker.IsNil, check.Commentf("invalid digest %q in output", matches[0][1]))
<ide>
<ide> // We should have a single entry in images.
<ide><path>integration-cli/docker_cli_save_load_test.go
<ide> import (
<ide> "strings"
<ide> "time"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/integration-cli/checker"
<ide> "github.com/docker/docker/pkg/testutil"
<ide> "github.com/go-check/check"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // save a repo using gz compression and try to load it using stdout
<ide><path>integration-cli/registry/registry.go
<ide> import (
<ide> "os/exec"
<ide> "path/filepath"
<ide>
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> const (
<ide><path>layer/empty_test.go
<ide> import (
<ide> "io"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func TestEmptyLayer(t *testing.T) {
<ide> func TestEmptyLayer(t *testing.T) {
<ide> t.Fatalf("error streaming tar for empty layer: %v", err)
<ide> }
<ide>
<del> digester := digest.Canonical.New()
<add> digester := digest.Canonical.Digester()
<ide> _, err = io.Copy(digester.Hash(), tarStream)
<ide>
<ide> if err != nil {
<ide><path>layer/filestore.go
<ide> import (
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/pkg/ioutils"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> var (
<ide> func (fms *fileMetadataStore) GetParent(layer ChainID) (ChainID, error) {
<ide> return "", err
<ide> }
<ide>
<del> dgst, err := digest.ParseDigest(strings.TrimSpace(string(content)))
<add> dgst, err := digest.Parse(strings.TrimSpace(string(content)))
<ide> if err != nil {
<ide> return "", err
<ide> }
<ide> func (fms *fileMetadataStore) GetDiffID(layer ChainID) (DiffID, error) {
<ide> return "", err
<ide> }
<ide>
<del> dgst, err := digest.ParseDigest(strings.TrimSpace(string(content)))
<add> dgst, err := digest.Parse(strings.TrimSpace(string(content)))
<ide> if err != nil {
<ide> return "", err
<ide> }
<ide> func (fms *fileMetadataStore) GetMountParent(mount string) (ChainID, error) {
<ide> return "", err
<ide> }
<ide>
<del> dgst, err := digest.ParseDigest(strings.TrimSpace(string(content)))
<add> dgst, err := digest.Parse(strings.TrimSpace(string(content)))
<ide> if err != nil {
<ide> return "", err
<ide> }
<ide><path>layer/filestore_test.go
<ide> import (
<ide> "syscall"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func randomLayerID(seed int64) ChainID {
<ide><path>layer/layer.go
<ide> import (
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/pkg/archive"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> var (
<ide><path>layer/layer_store.go
<ide> import (
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/daemon/graphdriver"
<ide> "github.com/docker/docker/pkg/idtools"
<ide> "github.com/docker/docker/pkg/plugingetter"
<ide> "github.com/docker/docker/pkg/stringid"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/vbatts/tar-split/tar/asm"
<ide> "github.com/vbatts/tar-split/tar/storage"
<ide> )
<ide> func (ls *layerStore) loadMount(mount string) error {
<ide> }
<ide>
<ide> func (ls *layerStore) applyTar(tx MetadataTransaction, ts io.Reader, parent string, layer *roLayer) error {
<del> digester := digest.Canonical.New()
<add> digester := digest.Canonical.Digester()
<ide> tr := io.TeeReader(ts, digester.Hash())
<ide>
<ide> tsw, err := tx.TarSplitWriter(true)
<ide><path>layer/layer_test.go
<ide> import (
<ide> "strings"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/daemon/graphdriver"
<ide> "github.com/docker/docker/daemon/graphdriver/vfs"
<ide> "github.com/docker/docker/pkg/archive"
<ide> "github.com/docker/docker/pkg/idtools"
<ide> "github.com/docker/docker/pkg/stringid"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func init() {
<ide><path>layer/layer_windows.go
<ide> import (
<ide> "fmt"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/daemon/graphdriver"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // GetLayerPath returns the path to a layer
<ide><path>layer/migration.go
<ide> import (
<ide> "os"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/vbatts/tar-split/tar/asm"
<ide> "github.com/vbatts/tar-split/tar/storage"
<ide> )
<ide> func (ls *layerStore) ChecksumForGraphID(id, parent, oldTarDataPath, newTarDataP
<ide> return
<ide> }
<ide>
<del> dgst := digest.Canonical.New()
<add> dgst := digest.Canonical.Digester()
<ide> err = ls.assembleTarTo(id, uncompressed, &size, dgst.Hash())
<ide> if err != nil {
<ide> return
<ide><path>layer/ro_layer.go
<ide> import (
<ide> "io"
<ide>
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> type roLayer struct {
<ide> func storeLayer(tx MetadataTransaction, layer *roLayer) error {
<ide> }
<ide>
<ide> func newVerifiedReadCloser(rc io.ReadCloser, dgst digest.Digest) (io.ReadCloser, error) {
<del> verifier, err := digest.NewDigestVerifier(dgst)
<del> if err != nil {
<del> return nil, err
<del> }
<ide> return &verifiedReadCloser{
<ide> rc: rc,
<ide> dgst: dgst,
<del> verifier: verifier,
<add> verifier: dgst.Verifier(),
<ide> }, nil
<ide> }
<ide>
<ide><path>migrate/v1/migratev1.go
<ide> import (
<ide> "encoding/json"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/distribution/metadata"
<ide> "github.com/docker/docker/image"
<ide> imagev1 "github.com/docker/docker/image/v1"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/ioutils"
<ide> "github.com/docker/docker/reference"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> type graphIDRegistrar interface {
<ide> func migrateRefs(root, driverName string, rs refAdder, mappings map[string]image
<ide> logrus.Errorf("migrate tags: invalid name %q, %q", name, err)
<ide> continue
<ide> }
<del> if dgst, err := digest.ParseDigest(tag); err == nil {
<add> if dgst, err := digest.Parse(tag); err == nil {
<ide> canonical, err := reference.WithDigest(reference.TrimNamed(ref), dgst)
<ide> if err != nil {
<ide> logrus.Errorf("migrate tags: invalid digest %q, %q", dgst, err)
<ide> func migrateImage(id, root string, ls graphIDRegistrar, is image.Store, ms metad
<ide> if err != nil {
<ide> return err
<ide> }
<del> diffID, err := digest.ParseDigest(string(diffIDData))
<add> diffID, err := digest.Parse(string(diffIDData))
<ide> if err != nil {
<ide> return err
<ide> }
<ide> func migrateImage(id, root string, ls graphIDRegistrar, is image.Store, ms metad
<ide>
<ide> checksum, err := ioutil.ReadFile(filepath.Join(root, graphDirName, id, "checksum"))
<ide> if err == nil { // best effort
<del> dgst, err := digest.ParseDigest(string(checksum))
<add> dgst, err := digest.Parse(string(checksum))
<ide> if err == nil {
<ide> V2MetadataService := metadata.NewV2MetadataService(ms)
<ide> V2MetadataService.Add(layer.DiffID(), metadata.V2Metadata{Digest: dgst})
<ide><path>migrate/v1/migratev1_test.go
<ide> import (
<ide> "runtime"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/distribution/metadata"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/reference"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func TestMigrateRefs(t *testing.T) {
<ide><path>plugin/backend_linux.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/distribution"
<ide> import (
<ide> "github.com/docker/docker/pkg/progress"
<ide> "github.com/docker/docker/plugin/v2"
<ide> "github.com/docker/docker/reference"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/pkg/errors"
<ide> "golang.org/x/net/context"
<ide> )
<ide> func (s *tempConfigStore) Put(c []byte) (digest.Digest, error) {
<ide>
<ide> func (s *tempConfigStore) Get(d digest.Digest) ([]byte, error) {
<ide> if d != s.configDigest {
<del> return nil, digest.ErrDigestNotFound
<add> return nil, fmt.Errorf("digest not found")
<ide> }
<ide> return s.config, nil
<ide> }
<ide> func (pm *Manager) CreateFromContext(ctx context.Context, tarCtx io.ReadCloser,
<ide> }
<ide> defer rootFSBlob.Close()
<ide> gzw := gzip.NewWriter(rootFSBlob)
<del> layerDigester := digest.Canonical.New()
<add> layerDigester := digest.Canonical.Digester()
<ide> rootFSReader := io.TeeReader(rootFS, io.MultiWriter(gzw, layerDigester.Hash()))
<ide>
<ide> if err := chrootarchive.Untar(rootFSReader, tmpRootFSDir, nil); err != nil {
<ide><path>plugin/blobstore.go
<ide> package plugin
<ide>
<ide> import (
<add> "fmt"
<ide> "io"
<ide> "io/ioutil"
<ide> "os"
<ide> "path/filepath"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/distribution/xfer"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/archive"
<ide> "github.com/docker/docker/pkg/progress"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/pkg/errors"
<ide> "golang.org/x/net/context"
<ide> )
<ide> type insertion struct {
<ide> }
<ide>
<ide> func newInsertion(tempFile *os.File) *insertion {
<del> digester := digest.Canonical.New()
<add> digester := digest.Canonical.Digester()
<ide> return &insertion{f: tempFile, digester: digester, Writer: io.MultiWriter(tempFile, digester.Hash())}
<ide> }
<ide>
<ide> func (dm *downloadManager) Download(ctx context.Context, initialRootFS image.Roo
<ide> if err != nil {
<ide> return initialRootFS, nil, err
<ide> }
<del> digester := digest.Canonical.New()
<add> digester := digest.Canonical.Digester()
<ide> if _, err := archive.ApplyLayer(dm.tmpDir, io.TeeReader(inflatedLayerData, digester.Hash())); err != nil {
<ide> return initialRootFS, nil, err
<ide> }
<ide> func (dm *downloadManager) Put(dt []byte) (digest.Digest, error) {
<ide> }
<ide>
<ide> func (dm *downloadManager) Get(d digest.Digest) ([]byte, error) {
<del> return nil, digest.ErrDigestNotFound
<add> return nil, fmt.Errorf("digest not found")
<ide> }
<ide> func (dm *downloadManager) RootFSFromConfig(c []byte) (*image.RootFS, error) {
<ide> return configToRootFS(c)
<ide><path>plugin/manager.go
<ide> import (
<ide> "sync"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/layer"
<ide> import (
<ide> "github.com/docker/docker/plugin/v2"
<ide> "github.com/docker/docker/reference"
<ide> "github.com/docker/docker/registry"
<add> "github.com/opencontainers/go-digest"
<ide> "github.com/pkg/errors"
<ide> )
<ide>
<ide><path>plugin/manager_linux.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/Sirupsen/logrus"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/daemon/initlayer"
<ide> "github.com/docker/docker/libcontainerd"
<ide> "github.com/docker/docker/pkg/mount"
<ide> "github.com/docker/docker/pkg/plugins"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/docker/plugin/v2"
<add> "github.com/opencontainers/go-digest"
<ide> specs "github.com/opencontainers/runtime-spec/specs-go"
<ide> "github.com/pkg/errors"
<ide> )
<ide><path>plugin/v2/plugin.go
<ide> import (
<ide> "strings"
<ide> "sync"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/pkg/plugingetter"
<ide> "github.com/docker/docker/pkg/plugins"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> // Plugin represents an individual plugin.
<ide><path>reference/reference.go
<ide> import (
<ide> "fmt"
<ide> "strings"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> distreference "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/pkg/stringid"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> const (
<ide> func ParseIDOrReference(idOrRef string) (digest.Digest, Named, error) {
<ide> if err := stringid.ValidateID(idOrRef); err == nil {
<ide> idOrRef = "sha256:" + idOrRef
<ide> }
<del> if dgst, err := digest.ParseDigest(idOrRef); err == nil {
<add> if dgst, err := digest.Parse(idOrRef); err == nil {
<ide> return dgst, nil, nil
<ide> }
<ide> ref, err := ParseNamed(idOrRef)
<ide><path>reference/reference_test.go
<ide> package reference
<ide> import (
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> func TestValidateReferenceName(t *testing.T) {
<ide><path>reference/store.go
<ide> import (
<ide> "sort"
<ide> "sync"
<ide>
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/pkg/ioutils"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> var (
<ide><path>reference/store_test.go
<ide> import (
<ide> "strings"
<ide> "testing"
<ide>
<del> "github.com/docker/distribution/digest"
<add> "github.com/opencontainers/go-digest"
<ide> )
<ide>
<ide> var ( | 53 |
Python | Python | fix scalar case for corrcoef | 9003457feff1a215d296a300dc8e2682fa797592 | <ide><path>numpy/lib/function_base.py
<ide> def corrcoef(x, y=None):
<ide> """The correlation coefficients
<ide> """
<ide> c = cov(x, y)
<del> d = diag(c)
<add> try:
<add> d = diag(c)
<add> except ValueError: # scalar covariance
<add> return 1
<ide> return c/sqrt(multiply.outer(d,d))
<ide>
<ide> def blackman(M): | 1 |
Ruby | Ruby | use -= in place of the expanded form | 132f3e2d37057eb4de13e32bf52c13097183c622 | <ide><path>actionpack/lib/action_view/helpers/number_helper.rb
<ide> def number_with_precision(number, options = {})
<ide> digits = (Math.log10(number.abs) + 1).floor
<ide> rounded_number = BigDecimal.new((number / 10 ** (digits - precision)).to_s).round.to_f * 10 ** (digits - precision)
<ide> end
<del> precision = precision - digits
<add> precision -= digits
<ide> precision = precision > 0 ? precision : 0 #don't let it be negative
<ide> else
<ide> rounded_number = BigDecimal.new((number * (10 ** precision)).to_s).round.to_f / 10 ** precision | 1 |
Python | Python | remove an annoying line feed from skipif decorator | 9d849ede86cdb156a124722ce14abf1e985e6d0b | <ide><path>numpy/testing/decorators.py
<ide> def get_msg(func,msg=None):
<ide> if msg is None:
<ide> out = 'Test skipped due to test condition'
<ide> else:
<del> out = '\n'+msg
<add> out = msg
<ide>
<del> return "Skipping test: %s%s" % (func.__name__,out)
<add> return "Skipping test: %s: %s" % (func.__name__, out)
<ide>
<ide> # We need to define *two* skippers because Python doesn't allow both
<ide> # return with value and yield inside the same function. | 1 |
Text | Text | update readme with escape sequence | 2e6a5bc7ee932b3d723ca4b0a319477310b12c34 | <ide><path>README.md
<ide> docker pull base
<ide> docker run -i -t base /bin/bash
<ide> ```
<ide>
<add>Detaching from the interactive shell
<add>------------------------------------
<add>```
<add># In order to detach without killing the shell, you can use the escape sequence Ctrl-p + Ctrl-q
<add># Note: this works only in tty mode (run with -t option).
<add>```
<ide>
<ide> Starting a long-running worker process
<ide> -------------------------------------- | 1 |
Ruby | Ruby | remove unused event_object_subscriber method | c816006a73dda484a9cd043f5d9e276e5dfc3213 | <ide><path>activesupport/lib/active_support/notifications/fanout.rb
<ide> def self.new(pattern, listener, monotonic)
<ide> wrap_all pattern, subscriber_class.new(pattern, listener)
<ide> end
<ide>
<del> def self.event_object_subscriber(pattern, block)
<del> wrap_all pattern, EventObject.new(pattern, block)
<del> end
<del>
<ide> def self.wrap_all(pattern, subscriber)
<ide> unless pattern
<ide> AllMessages.new(subscriber) | 1 |
Javascript | Javascript | fix apm bin path on appveyor | 748e61c88f4e688ec139dfe6d9e984f17fdbfcb0 | <ide><path>script/config.js
<ide> function isBuildingPR () {
<ide>
<ide> function getApmBinPath () {
<ide> const apmBinName = process.platform === 'win32' ? 'apm.cmd' : 'apm'
<del> return path.join(apmRootPath, 'node_modules', '.bin', apmBinName)
<add> return path.join(apmRootPath, 'node_modules', 'atom-package-manager', 'bin', apmBinName)
<ide> }
<ide>
<ide> function getNpmBinPath () { | 1 |
Python | Python | make openmp on windows optional | 5f699883dd5b68998846443d9d34faba79758ff6 | <ide><path>setup.py
<ide> # which is really known only after finalize_options
<ide> # http://stackoverflow.com/questions/724664/python-distutils-how-to-get-a-compiler-that-is-going-to-be-used
<ide> compile_options = {
<del> 'msvc': ['/Ox', '/EHsc', '/openmp'],
<add> 'msvc': ['/Ox', '/EHsc'],
<ide> 'mingw32' : ['-O3', '-Wno-strict-prototypes', '-Wno-unused-function'],
<ide> 'other' : ['-O3', '-Wno-strict-prototypes', '-Wno-unused-function']
<ide> }
<ide> }
<ide>
<ide>
<add>if os.environ.get('USE_OPENMP') == '1':
<add> compile_options['msvc'].append('/openmp')
<add>
<add>
<ide> if not sys.platform.startswith('darwin'):
<ide> compile_options['other'].append('-fopenmp')
<ide> link_options['other'].append('-fopenmp') | 1 |
Javascript | Javascript | move no hits case into own component | 00e0f574dfbd431d714a0086e3803982e5a9a846 | <ide><path>client/src/components/search/searchBar/NoHitsSuggestion.js
<add>import React from 'react';
<add>import PropTypes from 'prop-types';
<add>
<add>const NoHitsSuggestion = ({ title, handleMouseEnter, handleMouseLeave }) => {
<add> return (
<add> <div
<add> className={'no-hits-footer fcc_suggestion_item'}
<add> onMouseEnter={handleMouseEnter}
<add> onMouseLeave={handleMouseLeave}
<add> >
<add> <span className='hit-name'>{title}</span>
<add> </div>
<add> );
<add>};
<add>
<add>NoHitsSuggestion.propTypes = {
<add> handleMouseEnter: PropTypes.func.isRequired,
<add> handleMouseLeave: PropTypes.func.isRequired,
<add> title: PropTypes.string
<add>};
<add>
<add>export default NoHitsSuggestion;
<ide><path>client/src/components/search/searchBar/SearchHits.js
<ide> import PropTypes from 'prop-types';
<ide> import { connectStateResults, connectHits } from 'react-instantsearch-dom';
<ide> import isEmpty from 'lodash/isEmpty';
<ide> import Suggestion from './SearchSuggestion';
<add>import NoHitsSuggestion from './NoHitsSuggestion';
<ide>
<ide> const CustomHits = connectHits(
<ide> ({
<ide> const CustomHits = connectHits(
<ide> handleHits
<ide> }) => {
<ide> const noHits = isEmpty(hits);
<add> const noHitsTitle = 'No tutorials found';
<ide> const footer = [
<ide> {
<ide> objectID: `footer-${searchQuery}`,
<ide> const CustomHits = connectHits(
<ide> : `https://www.freecodecamp.org/news/search/?query=${encodeURIComponent(
<ide> searchQuery
<ide> )}`,
<del> title: noHits
<del> ? 'No tutorials found'
<del> : `See all results for ${searchQuery}`,
<add> title: noHits ? noHitsTitle : `See all results for ${searchQuery}`,
<ide> _highlightResult: {
<ide> query: {
<ide> value: noHits
<del> ? 'No tutorials found'
<add> ? noHitsTitle
<ide> : `
<ide> <ais-highlight-0000000000>
<ide> See all results for
<ide> const CustomHits = connectHits(
<ide> data-fccobjectid={hit.objectID}
<ide> key={hit.objectID}
<ide> >
<del> <Suggestion
<del> handleMouseEnter={handleMouseEnter}
<del> handleMouseLeave={handleMouseLeave}
<del> hit={hit}
<del> />
<add> {noHits ? (
<add> <NoHitsSuggestion
<add> handleMouseEnter={handleMouseEnter}
<add> handleMouseLeave={handleMouseLeave}
<add> title={noHitsTitle}
<add> />
<add> ) : (
<add> <Suggestion
<add> handleMouseEnter={handleMouseEnter}
<add> handleMouseLeave={handleMouseLeave}
<add> hit={hit}
<add> />
<add> )}
<ide> </li>
<ide> ))}
<ide> </ul>
<ide><path>client/src/components/search/searchBar/SearchSuggestion.js
<ide> import { Highlight } from 'react-instantsearch-dom';
<ide>
<ide> const Suggestion = ({ hit, handleMouseEnter, handleMouseLeave }) => {
<ide> const dropdownFooter = hit.objectID.includes('footer-');
<del> const noHits = hit.title === 'No tutorials found';
<del> return noHits ? (
<del> <div
<del> className={'no-hits-footer fcc_suggestion_item'}
<del> onMouseEnter={handleMouseEnter}
<del> onMouseLeave={handleMouseLeave}
<del> >
<del> <span className='hit-name'>{hit.title}</span>
<del> </div>
<del> ) : (
<add> return (
<ide> <a
<ide> className={
<ide> dropdownFooter | 3 |
PHP | PHP | fix import order | 01a79b4a9695999b5f7a24de1553916e70e233e5 | <ide><path>tests/TestCase/Auth/FallbackPasswordHasherTest.php
<ide> use Cake\Auth\DefaultPasswordHasher;
<ide> use Cake\Auth\FallbackPasswordHasher;
<ide> use Cake\Auth\WeakPasswordHasher;
<del>use Cake\Utility\Security;
<ide> use Cake\TestSuite\TestCase;
<add>use Cake\Utility\Security;
<ide>
<ide> /**
<ide> * Test case for FallbackPasswordHasher | 1 |
Java | Java | fix non-deterministic test | a5d885d15164eda588d63170e1340a74579f0d83 | <ide><path>rxjava-core/src/test/java/rx/internal/util/RxRingBufferWithoutUnsafeTest.java
<ide> protected RxRingBuffer createRingBuffer() {
<ide> /**
<ide> * Single producer, 2 consumers. The request() ensures it gets scheduled back on the same Producer thread.
<ide> */
<del> @Test(timeout = 2000)
<add> @Test
<ide> public void testConcurrency() throws InterruptedException {
<ide> final RxRingBuffer b = createRingBuffer();
<del> final CountDownLatch latch = new CountDownLatch(255);
<add> final CountDownLatch emitLatch = new CountDownLatch(255);
<add> final CountDownLatch drainLatch = new CountDownLatch(2);
<ide>
<ide> final Scheduler.Worker w1 = Schedulers.newThread().createWorker();
<ide> Scheduler.Worker w2 = Schedulers.newThread().createWorker();
<ide> public void testConcurrency() throws InterruptedException {
<ide>
<ide> @Override
<ide> public void request(final long n) {
<del> System.out.println("request[" + c.incrementAndGet() + "]: " + n + " Thread: " + Thread.currentThread());
<add> // System.out.println("request[" + c.incrementAndGet() + "]: " + n + " Thread: " + Thread.currentThread());
<ide> w1.schedule(new Action0() {
<ide>
<ide> @Override
<ide> public void call() {
<del> if (latch.getCount() == 0) {
<add> if (emitLatch.getCount() == 0) {
<ide> return;
<ide> }
<ide> for (int i = 0; i < n; i++) {
<ide> try {
<del> emit.incrementAndGet();
<ide> b.onNext("one");
<add> emit.incrementAndGet();
<ide> } catch (MissingBackpressureException e) {
<ide> System.out.println("BackpressureException => item: " + i + " requested: " + n + " emit: " + emit.get() + " poll: " + poll.get());
<ide> backpressureExceptions.incrementAndGet();
<ide> }
<ide> }
<ide> // we'll release after n batches
<del> latch.countDown();
<add> emitLatch.countDown();
<ide> }
<ide>
<ide> });
<ide> }
<ide>
<ide> };
<ide> final TestSubscriber<String> ts = new TestSubscriber<String>();
<del>
<ide> w1.schedule(new Action0() {
<ide>
<ide> @Override
<ide> public void call() {
<ide>
<ide> });
<ide>
<del> w2.schedule(new Action0() {
<add> Action0 drainer = new Action0() {
<ide>
<ide> @Override
<ide> public void call() {
<ide> public void call() {
<ide> if (emitted > 0) {
<ide> ts.requestMore(emitted);
<ide> emitted = 0;
<add> } else {
<add> if (emitLatch.getCount() == 0) {
<add> // this works with SynchronizedQueue, if changing to a non-blocking Queue
<add> // then this will likely need to change like the SpmcTest version
<add> drainLatch.countDown();
<add> return;
<add> }
<ide> }
<ide> }
<ide> }
<ide>
<ide> }
<ide>
<del> });
<del>
<del> w3.schedule(new Action0() {
<add> };
<ide>
<del> @Override
<del> public void call() {
<del> int emitted = 0;
<del> while (true) {
<del> Object o = b.poll();
<del> if (o != null) {
<del> emitted++;
<del> poll.incrementAndGet();
<del> } else {
<del> if (emitted > 0) {
<del> ts.requestMore(emitted);
<del> emitted = 0;
<del> }
<del> }
<del> }
<del> }
<add> w2.schedule(drainer);
<add> w3.schedule(drainer);
<ide>
<del> });
<add> emitLatch.await();
<add> drainLatch.await();
<ide>
<del> latch.await();
<del> w1.unsubscribe();
<ide> w2.unsubscribe();
<ide> w3.unsubscribe();
<add> w1.unsubscribe(); // put this one last as unsubscribing from it can cause Exceptions to be throw in w2/w3
<ide>
<ide> System.out.println("emit: " + emit.get() + " poll: " + poll.get());
<ide> assertEquals(0, backpressureExceptions.get()); | 1 |
Ruby | Ruby | add which method | cc78050dc5607ac827429fe27686f71a7e77716c | <ide><path>Library/Homebrew/utils.rb
<ide> def puts_columns items, star_items=[]
<ide> end
<ide> end
<ide>
<add>def which cmd
<add> path = `/usr/bin/which #{cmd}`.chomp
<add> if path.empty?
<add> nil
<add> else
<add> Pathname.new(path)
<add> end
<add>end
<add>
<ide> def which_editor
<ide> editor = ENV['HOMEBREW_EDITOR'] || ENV['EDITOR']
<ide> # If an editor wasn't set, try to pick a sane default | 1 |
Python | Python | remove duplicate entry in __all__ | 6a1c30344b42d3fb49fa80e5a4e0d04493166f25 | <ide><path>celery/worker/state.py
<ide>
<ide> __all__ = ['SOFTWARE_INFO', 'reserved_requests', 'active_requests',
<ide> 'total_count', 'revoked', 'task_reserved', 'maybe_shutdown',
<del> 'task_accepted', 'task_ready', 'task_reserved', 'task_ready',
<del> 'Persistent']
<add> 'task_accepted', 'task_reserved', 'task_ready', 'Persistent']
<ide>
<ide> #: Worker software/platform information.
<ide> SOFTWARE_INFO = {'sw_ident': 'py-celery', | 1 |
Javascript | Javascript | move check into parse arc function | 139866dd3ff4b6a57074cd9c691aa4846393d3d6 | <ide><path>examples/jsm/loaders/SVGLoader.js
<ide> SVGLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
<ide> }
<ide>
<ide> break;
<del>
<add>
<ide> case 'L':
<ide> var numbers = parseFloats( data );
<ide>
<ide> SVGLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
<ide> var numbers = parseFloats( data );
<ide>
<ide> for ( var j = 0, jl = numbers.length; j < jl; j += 7 ) {
<add>
<ide> // skip command if start point == end point
<del> if( numbers[ j + 5 ] == point.x && numbers[ j + 6 ] == point.y ) continue
<add> if( numbers[ j + 5 ] == point.x && numbers[ j + 6 ] == point.y ) continue;
<ide>
<del>
<ide> var start = point.clone();
<ide> point.x = numbers[ j + 5 ];
<ide> point.y = numbers[ j + 6 ];
<ide> control.x = point.x;
<ide> control.y = point.y;
<del>
<del> if( numbers[ j ] == 0 || numbers[ j + 1 ] == 0 ) {
<del> // draw a line if either of the radii == 0
<del> path.lineTo( point.x, point.y );
<del> }
<del> else {
<del> parseArcCommand(
<del> path, numbers[ j ], numbers[ j + 1 ], numbers[ j + 2 ], numbers[ j + 3 ], numbers[ j + 4 ], start, point
<del> );
<del> }
<add> parseArcCommand(
<add> path, numbers[ j ], numbers[ j + 1 ], numbers[ j + 2 ], numbers[ j + 3 ], numbers[ j + 4 ], start, point
<add> );
<ide>
<ide> if ( j === 0 && doSetFirstPoint === true ) firstPoint.copy( point );
<ide>
<ide> SVGLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
<ide>
<ide> case 'a':
<ide> var numbers = parseFloats( data );
<add>
<ide> for ( var j = 0, jl = numbers.length; j < jl; j += 7 ) {
<del> // skip command if start point == end point
<del> if( numbers[ j + 5 ] == 0 && numbers[ j + 6 ] == 0 ) continue
<add>
<add> // skip command if no displacement
<add> if( numbers[ j + 5 ] == 0 && numbers[ j + 6 ] == 0 ) continue;
<ide>
<ide> var start = point.clone();
<ide> point.x += numbers[ j + 5 ];
<ide> point.y += numbers[ j + 6 ];
<ide> control.x = point.x;
<ide> control.y = point.y;
<del>
<del> if( numbers[ j ] == 0 || numbers[ j + 1 ] == 0 ) {
<del> // draw a line if either of the radii == 0
<del> path.lineTo( point.x, point.y );
<del> }
<del> else {
<del> parseArcCommand(
<del> path, numbers[ j ], numbers[ j + 1 ], numbers[ j + 2 ], numbers[ j + 3 ], numbers[ j + 4 ], start, point
<del> );
<del> }
<add> parseArcCommand(
<add> path, numbers[ j ], numbers[ j + 1 ], numbers[ j + 2 ], numbers[ j + 3 ], numbers[ j + 4 ], start, point
<add> );
<ide>
<ide> if ( j === 0 && doSetFirstPoint === true ) firstPoint.copy( point );
<add>
<ide> }
<ide>
<ide> break;
<ide> SVGLoader.prototype = Object.assign( Object.create( Loader.prototype ), {
<ide>
<ide> function parseArcCommand( path, rx, ry, x_axis_rotation, large_arc_flag, sweep_flag, start, end ) {
<ide>
<add> if( rx == 0 || ry == 0 ) {
<add> // draw a line if either of the radii == 0
<add> path.lineTo( end.x, end.y );
<add> return;
<add> }
<add>
<ide> x_axis_rotation = x_axis_rotation * Math.PI / 180;
<ide>
<ide> // Ensure radii are positive | 1 |
Python | Python | fix mape objective | 32f483fe33fc9ff5474067b4a284616def479d8f | <ide><path>keras/objectives.py
<ide> def mean_absolute_error(y_true, y_pred):
<ide> return T.abs_(y_pred - y_true).mean(axis=-1)
<ide>
<ide> def mean_absolute_percentage_error(y_true, y_pred):
<del> return T.abs_((y_true - y_pred) / y_true).mean() * 100
<add> return T.abs_((y_true - y_pred) / y_true).mean(axis=-1) * 100
<ide>
<ide> def mean_squared_logarithmic_error(y_true, y_pred):
<ide> return T.sqr(T.log(T.clip(y_pred, epsilon, np.inf) + 1.) - T.log(T.clip(y_true, epsilon, np.inf) + 1.)).mean(axis=-1) | 1 |
Text | Text | add docs for catch all routes | 23a04e466c9c82ad6a6ef0fd9c46102d7512bbe3 | <ide><path>docs/routing/dynamic-routes.md
<ide> Multiple dynamic route segments work the same way. The page `pages/post/[pid]/[c
<ide>
<ide> Client-side navigations to a dynamic route can be handled with [`next/link`](/docs/api-reference/next/link.md#dynamic-routes).
<ide>
<add>### Catch all routes
<add>
<add>Dynamic routes can be extended to catch all paths by adding three dots (`...`) inside the brackets. For example:
<add>
<add>- `pages/post/[...slug]` matches `/post/a`, but also `post/a/b`, `post/a/b/c` and so on.
<add>
<add>Matched parameters will be sent as a query parameter (`slug` in the example) to the page, and it will always be an array, so, the path `/post/a` will have the following `query` object:
<add>
<add>```json
<add>{ "slug": ["a"] }
<add>```
<add>
<add>And in the case of `post/a/b`, and any other matching path, new parameters will be added to the array, like so:
<add>
<add>```json
<add>{ "slug": ["a", "b"] }
<add>```
<add>
<add>> A good example of catch all routes is the Next.js docs, a single page called [pages/docs/[...slug].js](https://github.com/zeit/next-site/blob/master/pages/docs/%5B...slug%5D.js) takes care of all the docs you're currently looking at.
<add>
<ide> ## Caveats
<ide>
<ide> - Predefined routes take precedence over dynamic routes. Take a look at the following examples:
<ide><path>docs/routing/introduction.md
<ide> To match a dynamic segment you can use the bracket syntax. This allows you to ma
<ide>
<ide> - `pages/blog/[slug].js` → `/blog/:slug` (`/blog/hello-world`)
<ide> - `pages/[username]/settings.js` → `/:username/settings` (`/foo/settings`)
<add>- `pages/post/[...all]` → `/post/*` (`/post/2020/id/title`)
<ide>
<ide> ## Linking between pages
<ide> | 2 |
Mixed | Python | fix reversions plus a few improvements | 50ec3371b10a6eaafe73b903760eb43c869ea285 | <ide><path>slim/README.md
<ide> Model | TF-Slim File | Checkpoint | Top-1 Accuracy| Top-5 Accuracy |
<ide> [Inception V2](http://arxiv.org/abs/1502.03167)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/inception_v2.py)|[inception_v2_2016_08_28.tar.gz](http://download.tensorflow.org/models/inception_v2_2016_08_28.tar.gz)|73.9|91.8|
<ide> [Inception V3](http://arxiv.org/abs/1512.00567)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/inception_v3.py)|[inception_v3_2016_08_28.tar.gz](http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz)|78.0|93.9|
<ide> [Inception V4](http://arxiv.org/abs/1602.07261)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/inception_v4.py)|[inception_v4_2016_09_09.tar.gz](http://download.tensorflow.org/models/inception_v4_2016_09_09.tar.gz)|80.2|95.2|
<del>[Inception-ResNet-v2](http://arxiv.org/abs/1602.07261)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/inception_resnet_v2.py)|[inception_resnet_v2_2016_08_30.tar.gz](http://download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz)|80.4|95.3|
<del>[ResNet V1 50](https://arxiv.org/abs/1512.03385)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v1.py)|[resnet_v1_50_2016_08_28.tar.gz](http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz)|75.2|92.2|
<del>[ResNet V1 101](https://arxiv.org/abs/1512.03385)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v1.py)|[resnet_v1_101_2016_08_28.tar.gz](http://download.tensorflow.org/models/resnet_v1_101_2016_08_28.tar.gz)|76.4|92.9|
<del>[ResNet V1 152](https://arxiv.org/abs/1512.03385)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v1.py)|[resnet_v1_152_2016_08_28.tar.gz](http://download.tensorflow.org/models/resnet_v1_152_2016_08_28.tar.gz)|76.8|93.2|
<del>[ResNet V2 50](https://arxiv.org/abs/1603.05027)^|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v2.py)|[resnet_v2_50_2017_04_14.tar.gz](http://download.tensorflow.org/models/resnet_v2_50_2017_04_14.tar.gz)|75.6|92.8|
<del>[ResNet V2 101](https://arxiv.org/abs/1603.05027)^|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v2.py)|[resnet_v2_101_2017_04_14.tar.gz](http://download.tensorflow.org/models/resnet_v2_101_2017_04_14.tar.gz)|77.0|93.7|
<del>[ResNet V2 152](https://arxiv.org/abs/1603.05027)^|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v2.py)|[resnet_v2_152_2017_04_14.tar.gz](http://download.tensorflow.org/models/resnet_v2_152_2017_04_14.tar.gz)|77.8|94.1|
<del>[ResNet V2 200](https://arxiv.org/abs/1603.05027)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v2.py)|[TBA]()|79.9\*|95.2\*|
<del>[VGG 16](http://arxiv.org/abs/1409.1556.pdf)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/vgg.py)|[vgg_16_2016_08_28.tar.gz](http://download.tensorflow.org/models/vgg_16_2016_08_28.tar.gz)|71.5|89.8|
<del>[VGG 19](http://arxiv.org/abs/1409.1556.pdf)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/vgg.py)|[vgg_19_2016_08_28.tar.gz](http://download.tensorflow.org/models/vgg_19_2016_08_28.tar.gz)|71.1|89.8|
<del>[MobileNet_v1_1.0_224](https://arxiv.org/pdf/1704.04861.pdf)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/mobilenet_v1.py)|[mobilenet_v1_1.0_224_2017_06_14.tar.gz](http://download.tensorflow.org/models/mobilenet_v1_1.0_224_2017_06_14.tar.gz)|70.7|89.5|
<del>[MobileNet_v1_0.50_160](https://arxiv.org/pdf/1704.04861.pdf)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/mobilenet_v1.py)|[mobilenet_v1_0.50_160_2017_06_14.tar.gz](http://download.tensorflow.org/models/mobilenet_v1_0.50_160_2017_06_14.tar.gz)|59.9|82.5|
<del>[MobileNet_v1_0.25_128](https://arxiv.org/pdf/1704.04861.pdf)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/mobilenet_v1.py)|[mobilenet_v1_0.25_128_2017_06_14.tar.gz](http://download.tensorflow.org/models/mobilenet_v1_0.25_128_2017_06_14.tar.gz)|41.3|66.2|
<add>[Inception-ResNet-v2](http://arxiv.org/abs/1602.07261)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/inception_resnet_v2.py)|[inception_resnet_v2.tar.gz](http://download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz)|80.4|95.3|
<add>[ResNet V1 50](https://arxiv.org/abs/1512.03385)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v1.py)|[resnet_v1_50.tar.gz](http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz)|75.2|92.2|
<add>[ResNet V1 101](https://arxiv.org/abs/1512.03385)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v1.py)|[resnet_v1_101.tar.gz](http://download.tensorflow.org/models/resnet_v1_101_2016_08_28.tar.gz)|76.4|92.9|
<add>[ResNet V1 152](https://arxiv.org/abs/1512.03385)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v1.py)|[resnet_v1_152.tar.gz](http://download.tensorflow.org/models/resnet_v1_152_2016_08_28.tar.gz)|76.8|93.2|
<add>[ResNet V2 50](https://arxiv.org/abs/1603.05027)^|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v2.py)|[resnet_v2_50.tar.gz](http://download.tensorflow.org/models/resnet_v2_50_2017_04_14.tar.gz)|75.6|92.8|
<add>[ResNet V2 101](https://arxiv.org/abs/1603.05027)^|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v2.py)|[resnet_v2_101.tar.gz](http://download.tensorflow.org/models/resnet_v2_101_2017_04_14.tar.gz)|77.0|93.7|
<add>[ResNet V2 152](https://arxiv.org/abs/1603.05027)^|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/resnet_v2.py)|[resnet_v2_152.tar.gz](http://download.tensorflow.org/models/resnet_v2_152_2017_04_14.tar.gz)|77.8|94.1|
<add>[VGG 16](http://arxiv.org/abs/1409.1556.pdf)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/vgg.py)|[vgg_16.tar.gz](http://download.tensorflow.org/models/vgg_16_2016_08_28.tar.gz)|71.5|89.8|
<add>[VGG 19](http://arxiv.org/abs/1409.1556.pdf)|[Code](https://github.com/tensorflow/models/blob/master/slim/nets/vgg.py)|[vgg_19.tar.gz](http://download.tensorflow.org/models/vgg_19_2016_08_28.tar.gz)|71.1|89.8|
<ide>
<ide> ^ ResNet V2 models use Inception pre-processing and input image size of 299 (use
<ide> `--preprocessing_name inception --eval_image_size 299` when using
<ide> `eval_image_classifier.py`). Performance numbers for ResNet V2 models are
<del>reported on ImageNet valdiation set.
<add>reported on the ImageNet validation set.
<ide>
<ide> All 16 MobileNet Models reported in the [MobileNet Paper](https://arxiv.org/abs/1704.04861) can be found [here](https://github.com/tensorflow/models/tree/master/slim/nets/mobilenet_v1.md).
<ide>
<ide><path>slim/nets/nets_factory.py
<ide> def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False):
<ide> """
<ide> if name not in networks_map:
<ide> raise ValueError('Name of network unknown %s' % name)
<del> arg_scope = arg_scopes_map[name](weight_decay=weight_decay)
<ide> func = networks_map[name]
<ide> @functools.wraps(func)
<ide> def network_fn(images):
<add> arg_scope = arg_scopes_map[name](weight_decay=weight_decay)
<ide> with slim.arg_scope(arg_scope):
<ide> return func(images, num_classes, is_training=is_training)
<ide> if hasattr(func, 'default_image_size'):
<ide><path>slim/preprocessing/preprocessing_factory.py
<ide> def get_preprocessing(name, is_training=False):
<ide> 'resnet_v1_50': vgg_preprocessing,
<ide> 'resnet_v1_101': vgg_preprocessing,
<ide> 'resnet_v1_152': vgg_preprocessing,
<add> 'resnet_v1_200': vgg_preprocessing,
<add> 'resnet_v2_50': vgg_preprocessing,
<add> 'resnet_v2_101': vgg_preprocessing,
<add> 'resnet_v2_152': vgg_preprocessing,
<add> 'resnet_v2_200': vgg_preprocessing,
<ide> 'vgg': vgg_preprocessing,
<ide> 'vgg_a': vgg_preprocessing,
<ide> 'vgg_16': vgg_preprocessing, | 3 |
PHP | PHP | fix typos and cs | d0cbb90a310b6842c80867c9ab3d83be5b866c8f | <ide><path>src/Database/Type/DateTimeType.php
<ide> class DateTimeType extends \Cake\Database\Type
<ide> protected $_useLocaleParser = false;
<ide>
<ide> /**
<del> * The date formate to use for parsing incoming dates for marshalling.
<add> * The date format to use for parsing incoming dates for marshalling.
<ide> *
<ide> * @var string|array|int
<ide> */
<ide> public function marshal($value)
<ide> }
<ide>
<ide> /**
<del> * Sets whether or not to pase dates passed to the marshal() function
<add> * Sets whether or not to parse dates passed to the marshal() function
<ide> * by using a locale aware parser.
<ide> *
<ide> * @param bool $enable Whether or not to enable
<ide> * @return $this
<ide> */
<del> public function useLocaleParser($enable = true)
<del> {
<add> public function useLocaleParser($enable = true)
<add> {
<ide> if ($enable === false) {
<ide> $this->_useLocaleParser = $enable;
<ide> return $this;
<ide><path>src/I18n/Time.php
<ide> public static function setToStringFormat($format)
<ide> /**
<ide> * Returns a new Time object after parsing the provided time string based on
<ide> * the passed or configured date time format. This method is locale dependent,
<del> * Any string that is passed to this function will be intepreted as a locale
<add> * Any string that is passed to this function will be interpreted as a locale
<ide> * dependent string.
<ide> *
<ide> * When no $format is provided, the `toString` format will be used.
<ide> public static function setToStringFormat($format)
<ide> *
<ide> * Example:
<ide> *
<del> * {{{
<add> * ```
<ide> * $time = Time::parseDateTime('10/13/2013 12:54am');
<ide> * $time = Time::parseDateTime('13 Oct, 2013 13:54', 'dd MMM, y H:mm');
<ide> * $time = Time::parseDateTime('10/10/2015', [IntlDateFormatter::SHORT, -1]);
<del> * }}}
<add> * ```
<ide> *
<ide> * @param string $time The time string to parse.
<ide> * @param string|array $format Any format accepted by IntlDateFormatter.
<ide> public static function parseDateTime($time, $format = null)
<ide> /**
<ide> * Returns a new Time object after parsing the provided $date string based on
<ide> * the passed or configured date time format. This method is locale dependent,
<del> * Any string that is passed to this function will be intepreted as a locale
<add> * Any string that is passed to this function will be interpreted as a locale
<ide> * dependent string.
<ide> *
<ide> * When no $format is provided, the `wordFormat` format will be used.
<ide> public static function parseDateTime($time, $format = null)
<ide> *
<ide> * Example:
<ide> *
<del> * {{{
<add> * ```
<ide> * $time = Time::parseDate('10/13/2013');
<ide> * $time = Time::parseDate('13 Oct, 2013', 'dd MMM, y');
<ide> * $time = Time::parseDate('13 Oct, 2013', IntlDateFormatter::SHORT);
<del> * }}}
<add> * ```
<ide> *
<ide> * @param string $date The date string to parse.
<ide> * @param string|int $format Any format accepted by IntlDateFormatter.
<ide> public static function parseDate($date, $format = null)
<ide> /**
<ide> * Returns a new Time object after parsing the provided $time string based on
<ide> * the passed or configured date time format. This method is locale dependent,
<del> * Any string that is passed to this function will be intepreted as a locale
<add> * Any string that is passed to this function will be interpreted as a locale
<ide> * dependent string.
<ide> *
<ide> * When no $format is provided, the IntlDateFormatter::SHORT format will be used.
<ide> public static function parseDate($date, $format = null)
<ide> *
<ide> * Example:
<ide> *
<del> * {{{
<add> * ```
<ide> * $time = Time::parseDate('11:23pm');
<del> * }}}
<add> * ```
<ide> *
<ide> * @param string $time The time string to parse.
<ide> * @param string|int $format Any format accepted by IntlDateFormatter. | 2 |
Ruby | Ruby | mind the order of things | 74b7bfa6d2c5c777b11cb6ea8687c0461b579f7e | <ide><path>activerecord/lib/active_record/connection_adapters/frontbase_adapter.rb
<ide> def native_database_types #:nodoc
<ide> # Quotes the column value to help prevent
<ide> # {SQL injection attacks}[http://en.wikipedia.org/wiki/SQL_injection].
<ide> def quote(value, column = nil)
<add> return value.quoted_id if value.respond_to?(:quoted_id)
<add>
<ide> retvalue = "<INVALID>"
<del>
<add>
<ide> puts "quote(#{value.inspect}(#{value.class}),#{column.type.inspect})" if FB_TRACE
<ide> # If a column was passed in, use column type information
<ide> unless value.nil?
<ide><path>activerecord/lib/active_record/connection_adapters/oracle_adapter.rb
<ide> def quote_string(string) #:nodoc:
<ide> end
<ide>
<ide> def quote(value, column = nil) #:nodoc:
<add> return value.quoted_id if value.respond_to?(:quoted_id)
<add>
<ide> if column && [:text, :binary].include?(column.type)
<ide> %Q{empty_#{ column.sql_type rescue 'blob' }()}
<ide> else
<ide><path>activerecord/lib/active_record/connection_adapters/sqlserver_adapter.rb
<ide> def rollback_db_transaction
<ide> end
<ide>
<ide> def quote(value, column = nil)
<add> return value.quoted_id if value.respond_to?(:quoted_id)
<add>
<ide> case value
<ide> when String
<ide> if column && column.type == :binary && column.class.respond_to?(:string_to_binary)
<ide><path>activerecord/lib/active_record/connection_adapters/sybase_adapter.rb
<ide> def quoted_false
<ide> end
<ide>
<ide> def quote(value, column = nil)
<add> return value.quoted_id if value.respond_to?(:quoted_id)
<add>
<ide> case value
<ide> when String
<ide> if column && column.type == :binary && column.class.respond_to?(:string_to_binary)
<ide><path>activerecord/test/finder_test.rb
<ide> def test_find_by_empty_in_condition
<ide> end
<ide>
<ide> def test_find_by_records
<del> p1, p2 = Post.find(1, 2)
<del> assert_equal [p1, p2], Post.find(:all, :conditions => ['id in (?)', [p1, p2]]).sort_by { |p| p.id }
<del> end
<del>
<del> def test_find_by_records_and_ids
<del> p1, p2 = Post.find(1, 2)
<del> assert_equal [p1, p2], Post.find(:all, :conditions => ['id in (?)', [p1, p2.id]]).sort_by { |p| p.id }
<add> p1, p2 = Post.find(:all, :limit => 2, :order => 'id asc')
<add> assert_equal [p1, p2], Post.find(:all, :conditions => ['id in (?)', [p1, p2]], :order => 'id asc')
<add> assert_equal [p1, p2], Post.find(:all, :conditions => ['id in (?)', [p1, p2.id]], :order => 'id asc')
<ide> end
<ide>
<ide> def test_select_value | 5 |
Text | Text | add changelog entry for precompile config | a3913ca3ba0141d58e2e8c89dad268245e19ddb7 | <ide><path>actionpack/CHANGELOG.md
<ide> ## Rails 4.0.0 (unreleased) ##
<ide>
<add>* Only non-js/css under app/assets path will be included in default config.assets.precompile.
<add>
<add> *Josh Peek*
<add>
<ide> * Remove support for the RAILS_ASSET_ID environment configuration
<ide> (no longer needed now that we have the asset pipeline).
<ide> | 1 |
Python | Python | add more info to the docstrings | dd804f25dac715c399ad805e116d36c952612207 | <ide><path>libcloud/storage/drivers/s3.py
<ide> def delete_object(self, obj):
<ide> def ex_iterate_multipart_uploads(self, container, prefix=None,
<ide> delimiter=None):
<ide> """
<del> Extension method for listing all S3 multipart uploads.
<add> Extension method for listing all in-progress S3 multipart uploads.
<add>
<add> Each multipart upload which has not been committed or aborted is
<add> considered in-progress.
<ide>
<ide> @param container: The container holding the uploads
<ide> @type container: L{Container}
<ide> def ex_iterate_multipart_uploads(self, container, prefix=None,
<ide>
<ide> def ex_cleanup_all_multipart_uploads(self, container, prefix=None):
<ide> """
<del> Extension method for removing S3 multipart uploads.
<add> Extension method for removing all partially completed S3 multipart
<add> uploads.
<ide>
<ide> @param container: The container holding the uploads
<ide> @type container: L{Container} | 1 |
Text | Text | add a new blog post about mixins | b0136b37c5dc7ea25fff139dd74397a6ef140b35 | <ide><path>docs/_posts/2016-07-13-mixins-considered-harmful.md
<add>---
<add>title: "Mixins Considered Harmful"
<add>author: gaearon
<add>---
<add>
<add>“How do I share the code between several components?” is one of the first questions that people ask when they learn React. Our answer has always been to use component composition for code reuse. You can define a component and use it in several other components.
<add>
<add>It is not always obvious how a certain pattern can be solved with composition. React is influenced by functional programming but it came into the field that was dominated by object-oriented libraries. It was hard for engineers both inside and outside of Facebook to give up on the patterns they were used to.
<add>
<add>To ease the initial adoption and learning, we included certain escape hatches into React. The mixin system was one of those escape hatches, and its goal was to give you a way to reuse code between components when you aren’t sure how to solve the same problem with composition.
<add>
<add>Three years passed since React was released. The landscape has changed. Multiple view libraries now adopt a component model similar to React. Using composition over inheritance to build declarative user interfaces is no longer a novelty. We are also more confident in the React component model, and we have seen many creative uses of it both internally and in the community.
<add>
<add>In this post, we will consider the problems commonly caused by mixins. Then we will suggest several alternative patterns for the same use cases. We have found those patterns to scale better with the complexity of the codebase than mixins.
<add>
<add>## Why Mixins are Broken
<add>
<add>At Facebook, React usage has grown from a few components to thousands of them. This gives us a window into how people use React. Thanks to declarative rendering and top-down data flow, many teams were able to fix a bunch of bugs while shipping new features as they adopted React.
<add>
<add>However it’s inevitable that some of our code using React gradually became incomprehensible. Occasionally, the React team would see groups of components in different projects that people were afraid to touch. These components were too easy to break accidentally, were confusing to new developers, and eventually became just as confusing to the people who wrote them in the first place. Much of this confusion was caused by mixins. At the time, I wasn’t working at Facebook but I came to the [same conclusions](https://medium.com/@dan_abramov/mixins-are-dead-long-live-higher-order-components-94a0d2f9e750) after writing my fair share of terrible mixins.
<add>
<add>This doesn’t mean that mixins themselves are bad. People successfully employ them in different languages and paradigms, including some functional languages. At Facebook, we extensively use traits in Hack which are fairly similar to mixins. Nevertheless, we think that mixins are unnecessary and problematic in React codebases. Here’s why.
<add>
<add>### Mixins introduce implicit dependencies
<add>
<add>Sometimes a component relies on a certain method defined in the mixin, such as `getClassName()`. Sometimes it’s the other way around, and mixin calls a method like `renderHeader()` on the component. JavaScript is a dynamic language so it’s hard to enforce or document these dependencies.
<add>
<add>Mixins break the common and usually safe assumption that you can rename a state key or a method by searching for its occurrences in the component file. You might write a stateful component and then your coworker might add a mixin that reads this state. In a few months, you might want to move that state up to the parent component so it can be shared with a sibling. Will you remember to update the mixin to read a prop instead? What if, by now, other components also use this mixin?
<add>
<add>These implicit dependencies make it hard for new team members to contribute to a codebase. A component’s `render()` method might reference some method that isn’t defined on the class. Is it safe to remove? Perhaps it’s defined in one of the mixins. But which one of them? You need to scroll up to the mixin list, open each of those files, and look for this method. Worse, mixins can specify their own mixins, so the search can be deep.
<add>
<add>Often, mixins come to depend on other mixins, and removing one of them breaks the other. In these situations it is very tricky to tell how the data flows in and out of mixins, and what their dependency graph looks like. Unlike components, mixins don’t form a hierarchy: they are flattened and operate in the same namespace.
<add>
<add>### Mixins cause name clashes
<add>
<add>There is no guarantee that two particular mixins can be used together. For example, if `FluxListenerMixin` defines `handleChange()` and `WindowSizeMixin` defines `handleChange()`, you can’t use them together. You also can’t define a method with this name on your own component.
<add>
<add>It’s not a big deal if you control the mixin code. When you have a conflict, you can rename that method on one of the mixins. However it’s tricky because some components or other mixins may already be calling this method directly, and you need to find and fix those calls as well.
<add>
<add>If you have a name conflict with a mixin from a third party package, you can’t just rename a method on it. Instead, you have to use awkward method names on your component to avoid clashes.
<add>
<add>The situation is no better for mixin authors. Even adding a new method to a mixin is always a potentially breaking change because a method with the same name might already exist on some of the components using it, either directly or through another mixin. Once written, mixins are hard to remove or change. Bad ideas don’t get refactored away because refactoring is too risky.
<add>
<add>### Mixins cause snowballing complexity
<add>
<add>Even when mixins start out simple, they tend to become complex over time. The example below is based on a real scenario I’ve seen play out in a codebase.
<add>
<add>A component needs some state to track mouse hover. To keep this logic reusable, you might extract `handleMouseEnter()`, `handleMouseLeave()` and `isHovering()` into a `HoverMixin`. Next, somebody needs to implement a tooltip. They don’t want to duplicate the logic in `HoverMixin` so they create a `TooltipMixin` that uses `HoverMixin`. `TooltipMixin` reads `isHovering()` provided by `HoverMixin` in its `componentDidUpdate()` and either shows or hides the tooltip.
<add>
<add>A few months later, somebody wants to make the tooltip direction configurable. In an effort to avoid code duplication, they add support for a new optional method called `getTooltipOptions()` to `TooltipMixin`. By this time, components that show popovers also use `HoverMixin`. However popovers need a different hover delay. To solve this, somebody adds support for an optional `getHoverOptions()` method and implements it in `TooltipMixin`. Those mixins are now tightly coupled.
<add>
<add>This is fine while there are no new requirements. However this solution doesn’t scale well. What if you want to support displaying multiple tooltips in a single component? You can’t define the same mixin twice in a component. What if the tooltips need to be displayed automatically in a guided tour instead of on hover? Good luck decoupling `TooltipMixin` from `HoverMixin`. What if you need to support the case where the hover area and the tooltip anchor are located in different components? You can’t easily hoist the state used by mixin up into the parent component. Unlike components, mixins don’t lend themselves naturally to such changes.
<add>
<add>Every new requirement makes the mixins harder to understand. Components using the same mixin become increasingly coupled with time. Any new capability gets added to all of the components using that mixin. There is no way to split a “simpler” part of the mixin without either duplicating the code or introducing more dependencies and indirection between mixins. Gradually, the encapsulation boundaries erode, and since it’s hard to change or remove the existing mixins, they keep getting more abstract until nobody understands how they work.
<add>
<add>These are the same problems we faced building apps before React. We found that they are solved by declarative rendering, top-down data flow, and encapsulated components. At Facebook, we have been migrating our code to use alternative patterns to mixins, and we are generally happy with the results. You can read about those patterns below.
<add>
<add>## Migrating from Mixins
<add>
<add>Let’s make it clear that mixins are not technically deprecated. If you use `React.createClass()`, you may keep using them. We only say that they didn’t work well for us, and so we won’t recommend using them in the future.
<add>
<add>Every section below corresponds to a mixin usage pattern that we found in the Facebook codebase. For each of them, we describe the problem and a solution that we think works better than mixins. The examples are written in ES5 but once you don’t need mixins, you can switch to ES6 classes if you’d like.
<add>
<add>We hope that you find this list helpful. Please let us know if we missed important use cases so we can either amend the list or be proven wrong!
<add>
<add>### Performance Optimizations
<add>
<add>One of the most commonly used mixins is [`PureRenderMixin`](/react/docs/pure-render-mixin.html). You might be using it in some components to [prevent unnecessary re-renders](/react/docs/advanced-performance.html#shouldcomponentupdate-in-action) when the props and state are shallowly equal to the previous props and state:
<add>
<add>```javascript
<add>var PureRenderMixin = require('react-addons-pure-render-mixin');
<add>
<add>var Button = React.createClass({
<add> mixins: [PureRenderMixin],
<add>
<add> // ...
<add>
<add>});
<add>```
<add>
<add>#### Solution
<add>
<add>To express the same without mixins, you can use the [`shallowCompare`](/react/docs/shallow-compare.html) function directly instead:
<add>
<add>```js
<add>var shallowCompare = require('react-addons-shallow-compare');
<add>
<add>var Button = React.createClass({
<add> shouldComponentUpdate: function(nextProps, nextState) {
<add> return shallowCompare(this, nextProps, nextState);
<add> },
<add>
<add> // ...
<add>
<add>});
<add>```
<add>
<add>If you use a custom mixin implementing a `shouldComponentUpdate` function with different algorithm, we suggest exporting just that single function from a module and calling it directly from your components.
<add>
<add>We understand that more typing can be annoying. For the most common case, we plan to [introduce a new base class](https://github.com/facebook/react/pull/7195) called `React.PureComponent` in the next minor release. It uses the same shallow comparison as `PureRenderMixin` does today.
<add>
<add>### Subscriptions and Side Effects
<add>
<add>The second most common type of mixins that we encountered are mixins that subscribe a React component to a third-party data source. Whether this data source is a Flux Store, an Rx Observable, or something else, the pattern is very similar: the subscription is created in `componentDidMount`, destroyed in `componentWillUnmount`, and the change handler calls `this.setState()`.
<add>
<add>```javascript
<add>var SubscriptionMixin = {
<add> getInitialState: function() {
<add> return {
<add> comments: DataSource.getComments()
<add> };
<add> },
<add>
<add> componentDidMount: function() {
<add> DataSource.addChangeListener(this.handleChange);
<add> },
<add>
<add> componentWillUnmount: function() {
<add> DataSource.removeChangeListener(this.handleChange);
<add> },
<add>
<add> handleChange: function() {
<add> this.setState({
<add> comments: DataSource.getComments()
<add> });
<add> }
<add>};
<add>
<add>var CommentList = React.createClass({
<add> mixins: [SubscriptionMixin],
<add>
<add> render: function() {
<add> // Reading comments from state managed by mixin.
<add> var comments = this.state.comments;
<add> return (
<add> <div>
<add> {comments.map(function(comment) {
<add> return <Comment comment={comment} key={comment.id} />
<add> })}
<add> </div>
<add> )
<add> }
<add>});
<add>
<add>module.exports = CommentList;
<add>```
<add>
<add>#### Solution
<add>
<add>If there is just one component subscribed to this data source, it is fine to embed the subscription logic right into the component. Avoid premature abstractions.
<add>
<add>If several components used this mixin to subscribe to a data source, a nice way to avoid repetition is to use a pattern called [“higher-order components”](https://medium.com/@dan_abramov/mixins-are-dead-long-live-higher-order-components-94a0d2f9e750). It can sound intimidating so we will take a closer look at how this pattern naturally emerges from the component model.
<add>
<add>#### Higher-Order Components Explained
<add>
<add>Let’s forget about React for a second. Consider these two functions that add and multiply numbers, logging the results as they do that:
<add>
<add>```js
<add>function addAndLog(x, y) {
<add> var result = x + y;
<add> console.log('result:', result);
<add> return result;
<add>}
<add>
<add>function multiplyAndLog(x, y) {
<add> var result = x * y;
<add> console.log('result:', result);
<add> return result;
<add>}
<add>```
<add>
<add>These two functions are not very useful but they help us demonstrate a pattern that we can later apply to components.
<add>
<add>Let’s say that we want to extract the logging logic out of these functions without changing their signatures. How can we do this? An elegant solution is to write a [higher-order function](https://en.wikipedia.org/wiki/Higher-order_function), that is, a function that takes a function as an argument and returns a function.
<add>
<add>Again, it sounds more intimidating than it really is:
<add>
<add>```js
<add>function withLogging(wrappedFunction) {
<add> // Return a function with the same API...
<add> return function(x, y) {
<add> // ... that calls the original function
<add> var result = wrappedFunction(x, y);
<add> // ... but also logs its result!
<add> console.log('result:', result);
<add> return result;
<add> };
<add>}
<add>```
<add>
<add>The `withLogging` higher-order function lets us write `add` and `multiply` without the logging statements, and later wrap them to get `addAndLog` and `multiplyAndLog` with exactly the same signatures as before:
<add>
<add>```js
<add>function add(x, y) {
<add> return x + y;
<add>}
<add>
<add>function multiply(x, y) {
<add> return x * y;
<add>}
<add>
<add>function withLogging(wrappedFunction) {
<add> return function(x, y) {
<add> var result = wrappedFunction(x, y);
<add> console.log('result:', result);
<add> return result;
<add> };
<add>}
<add>
<add>// Equivalent to writing addAndLog by hand:
<add>var addAndLog = withLogging(add);
<add>
<add>// Equivalent to writing multiplyAndLog by hand:
<add>var multiplyAndLog = withLogging(multiply);
<add>```
<add>
<add>Higher-order components are a very similar pattern, but applied to components in React. We will apply this transformation from mixins in two steps.
<add>
<add>As a first step, we will split our `CommentList` component in two, a child and a parent. The child will be only concerned with rendering the comments. The parent will set up the subscription and pass the up-to-date data to the child via props.
<add>
<add>```js
<add>// This is a child component.
<add>// It only renders the comments it receives as props.
<add>var CommentList = React.createClass({
<add> render: function() {
<add> // Note: now reading from props rather than state.
<add> var comments = this.props.comments;
<add> return (
<add> <div>
<add> {comments.map(function(comment) {
<add> return <Comment comment={comment} key={comment.id} />
<add> })}
<add> </div>
<add> )
<add> }
<add>});
<add>
<add>// This is a parent component.
<add>// It subscribes to the data source and renders <CommentList />.
<add>var CommentListWithSubscription = React.createClass({
<add> getInitialState: function() {
<add> return {
<add> comments: DataSource.getComments()
<add> };
<add> },
<add>
<add> componentDidMount: function() {
<add> DataSource.addChangeListener(this.handleChange);
<add> },
<add>
<add> componentWillUnmount: function() {
<add> DataSource.removeChangeListener(this.handleChange);
<add> },
<add>
<add> handleChange: function() {
<add> this.setState({
<add> comments: DataSource.getComments()
<add> });
<add> },
<add>
<add> render: function() {
<add> // We pass the current state as props to CommentList.
<add> return <CommentList comments={this.state.comments} />;
<add> }
<add>});
<add>
<add>module.exports = CommentListWithSubscription;
<add>```
<add>
<add>There is just one final step left to do.
<add>
<add>Remember how we made `withLogging()` take a function and return another function wrapping it? We can apply a similar pattern to React components.
<add>
<add>We will write a new function called `withSubscription(WrappedComponent)`. Its argument could be any React component. We will pass `CommentList` as `WrappedComponent`, but we could also apply `withSubscription()` to any other component in our codebase.
<add>
<add>This function would return another component. The returned component would manage the subscription and render `<WrappedComponent />` with the current data.
<add>
<add>We call this pattern a “higher-order component”.
<add>
<add>The composition happens at React rendering level rather than with a direct function call. This is why it doesn’t matter whether the wrapped component is defined with `createClass()`, as an ES6 class or a function. If `WrappedComponent` is a React component, the component created by `withSubscription()` can render it.
<add>
<add>```js
<add>// This function takes a component...
<add>function withSubscription(WrappedComponent) {
<add> // ...and returns another component...
<add> return React.createClass({
<add> getInitialState: function() {
<add> return {
<add> comments: DataSource.getComments()
<add> };
<add> },
<add>
<add> componentDidMount: function() {
<add> // ... that takes care of the subscription...
<add> DataSource.addChangeListener(this.handleChange);
<add> },
<add>
<add> componentWillUnmount: function() {
<add> DataSource.removeChangeListener(this.handleChange);
<add> },
<add>
<add> handleChange: function() {
<add> this.setState({
<add> comments: DataSource.getComments()
<add> });
<add> },
<add>
<add> render: function() {
<add> // ... and renders the wrapped component with the fresh data!
<add> return <WrappedComponent comments={this.state.comments} />;
<add> }
<add> });
<add>}
<add>```
<add>
<add>Now we can declare `CommentListWithSubscription` by applying `withSubscription` to `CommentList`:
<add>
<add>```js
<add>var CommentList = React.createClass({
<add> render: function() {
<add> var comments = this.props.comments;
<add> return (
<add> <div>
<add> {comments.map(function(comment) {
<add> return <Comment comment={comment} key={comment.id} />
<add> })}
<add> </div>
<add> )
<add> }
<add>});
<add>
<add>// withSubscription() returns a new component that
<add>// is subscribed to the data source and renders
<add>// <CommentList /> with up-to-date data.
<add>var CommentListWithSubscription = withSubscription(CommentList);
<add>
<add>// The rest of the app is interested in the subscribed component
<add>// so we export it instead of the original unwrapped CommentList.
<add>module.exports = CommentListWithSubscription;
<add>```
<add>
<add>#### Solution, Revisited
<add>
<add>Now that we understand higher-order components better, let’s take another look at the complete solution that doesn’t involve mixins. There are a few minor changes that are annotated with inline comments:
<add>
<add>```js
<add>function withSubscription(WrappedComponent) {
<add> return React.createClass({
<add> getInitialState: function() {
<add> return {
<add> comments: DataSource.getComments()
<add> };
<add> },
<add>
<add> componentDidMount: function() {
<add> DataSource.addChangeListener(this.handleChange);
<add> },
<add>
<add> componentWillUnmount: function() {
<add> DataSource.removeChangeListener(this.handleChange);
<add> },
<add>
<add> handleChange: function() {
<add> this.setState({
<add> comments: DataSource.getComments()
<add> });
<add> },
<add>
<add> render: function() {
<add> // Use JSX spread syntax to pass all props and state down automatically.
<add> return <WrappedComponent {...this.props} {...this.state} />;
<add> }
<add> });
<add>}
<add>
<add>// Optional change: convert CommentList to a functional component
<add>// because it doesn't use lifecycle hooks or state.
<add>function CommentList(props) {
<add> var comments = props.comments;
<add> return (
<add> <div>
<add> {comments.map(function(comment) {
<add> return <Comment comment={comment} key={comment.id} />
<add> })}
<add> </div>
<add> )
<add>}
<add>
<add>// Instead of declaring CommentListWithSubscription,
<add>// we export the wrapped component right away.
<add>module.exports = withSubscription(CommentList);
<add>```
<add>
<add>Higher-order components are a powerful pattern. You can pass additional arguments to them if you want to further customize their behavior. After all, they are not even a feature of React. They are just functions that receive components and return components that wrap them.
<add>
<add>Like any solution, higher-order components have their own pitfalls. For example, if you heavily use [refs](/react/docs/more-about-refs.html), you might notice that wrapping something into a higher-order component changes the ref to point to the wrapping component. In practice we discourage using refs for component communication so we don’t think it’s a big issue. In the future, we might consider adding [ref forwarding](https://github.com/facebook/react/issues/4213) to React to solve this annoyance.
<add>
<add>### Rendering Logic
<add>
<add>The next most common use case for mixins that we discovered in our codebase is sharing rendering logic between components.
<add>
<add>Here is a typical example of this pattern:
<add>
<add>```js
<add>var RowMixin = {
<add> // Called by components from render()
<add> renderHeader: function() {
<add> return (
<add> <div className='row-header'>
<add> <h1>
<add> {this.getHeaderText() /* Defined by components */}
<add> </h1>
<add> </div>
<add> );
<add> }
<add>};
<add>
<add>var UserRow = React.createClass({
<add> mixins: [RowMixin],
<add>
<add> // Called by RowMixin.renderHeader()
<add> getHeaderText: function() {
<add> return this.props.user.fullName;
<add> },
<add>
<add> render: function() {
<add> return (
<add> <div>
<add> {this.renderHeader() /* Defined by RowMixin */}
<add> <h2>{this.props.user.biography}
<add> </div>
<add> )
<add> }
<add>});
<add>```
<add>
<add>Multiple components may be sharing `RowMixin` to render the header, and each of them would need to define `getHeaderText()`.
<add>
<add>#### Solution
<add>
<add>If you see rendering logic inside a mixin, it’s time to extract a component!
<add>
<add>Instead of `RowMixin`, we will define a `<Row>` component. We will also replace the convention of defining a `getHeaderText()` method with the standard mechanism of top-data flow in React: passing props.
<add>
<add>Finally, since neither of those components currently need lifecycle hooks or state, we can declare them as simple functions:
<add>
<add>```js
<add>function RowHeader(props) {
<add> return (
<add> <div className='row-header'>
<add> <h1>{props.text}</h1>
<add> </div>
<add> );
<add>}
<add>
<add>function UserRow(props) {
<add> return (
<add> <div>
<add> <RowHeader text={props.user.fullName} />
<add> <h2>{props.user.biography}
<add> </div>
<add> );
<add>}
<add>```
<add>
<add>Props keep component dependencies explicit, easy to replace, and enforceable with tools like [Flow](https://flowtype.org/) and [TypeScript](https://www.typescriptlang.org/).
<add>
<add>> **Note:**
<add>>
<add>> Defining components as functions is not required. There is also nothing wrong with using lifecycle hooks and state—they are first-class React features. We use functional components in this example because they are easier to read and we didn’t need those extra features, but classes would work just as fine.
<add>
<add>### Context
<add>
<add>Another group of mixins we discovered were helpers for providing and consuming [React context](/react/docs/context.html). Context is an experimental unstable feature, has [certain issues](https://github.com/facebook/react/issues/2517), and will likely change its API in the future. We don’t recommend using it unless you’re confident there is no other way of solving your problem.
<add>
<add>Nevertheless, if you already use context today, you might have been hiding its usage with mixins like this:
<add>
<add>```js
<add>var RouterMixin = {
<add> contextTypes: {
<add> router: React.PropTypes.object.isRequired
<add> },
<add>
<add> // The mixin provides a method so that components
<add> // don't have to use the context API directly.
<add> push: function(path) {
<add> this.context.router.push(path)
<add> }
<add>};
<add>
<add>var Link = React.createClass({
<add> handleClick: function(e) {
<add> e.stopPropagation();
<add>
<add> // This method is defined in RouterMixin.
<add> this.push(this.props.to);
<add> },
<add>
<add> render: function() {
<add> return (
<add> <a onClick={this.handleClick}>
<add> {this.props.children}
<add> </a>
<add> );
<add> }
<add>});
<add>
<add>module.exports = Link;
<add>```
<add>
<add>#### Solution
<add>
<add>We agree that hiding context usage from consuming components is a good idea until the context API stabilizes. However, we recommend using higher-order components instead of mixins for this.
<add>
<add>Let the wrapping component grab something from the context, and pass it down with props to the wrapped component:
<add>
<add>```js
<add>function withRouter(WrappedComponent) {
<add> return React.createClass({
<add> contextTypes: {
<add> router: React.PropTypes.object.isRequired
<add> },
<add>
<add> render: function() {
<add> // The wrapper component reads something from the context
<add> // and passes it down as a prop to the wrapped component.
<add> var router = this.context.router;
<add> return <WrappedComponent {...this.props} router={router} />;
<add> }
<add> });
<add>};
<add>
<add>var Link = React.createClass({
<add> handleClick: function(e) {
<add> e.stopPropagation();
<add>
<add> // The wrapped component uses props instead of context.
<add> this.props.router.push(this.props.to);
<add> },
<add>
<add> render: function() {
<add> return (
<add> <a onClick={this.handleClick}>
<add> {this.props.children}
<add> </a>
<add> );
<add> }
<add>});
<add>
<add>// Don't forget to wrap the component!
<add>module.exports = withRouter(Link);
<add>```
<add>
<add>If you’re using a third party library that only provides a mixin, we encourage you to file an issue with them linking to this post so that they can provide a higher-order component instead. In the meantime, you can create a higher-order component around it yourself in exactly the same way.
<add>
<add>### Utility Methods
<add>
<add>Sometimes, mixins are used solely to share utility functions between components:
<add>
<add>```js
<add>var ColorMixin = {
<add> getLuminance(color) {
<add> var c = parseInt(color, 16);
<add> var r = (c & 0xFF0000) >> 16;
<add> var g = (c & 0x00FF00) >> 8;
<add> var b = (c & 0x0000FF);
<add> return (0.299 * r + 0.587 * g + 0.114 * b);
<add> }
<add>};
<add>
<add>var Button = React.createClass({
<add> mixins: [ColorMixin],
<add>
<add> render: function () {
<add> var theme = this.getLuminance(this.props.color) > 160 ? 'dark' : 'light';
<add> return (
<add> <div className={theme}>
<add> {this.props.children}
<add> </div>
<add> )
<add> }
<add>});
<add>```
<add>
<add>#### Solution
<add>
<add>Put utility functions into regular JavaScript modules and import them. This also makes it easier to test them or use them outside of your components:
<add>
<add>```js
<add>var getLuminance = require('../utils/getLuminance');
<add>
<add>var Button = React.createClass({
<add> render: function () {
<add> var theme = getLuminance(this.props.color) > 160 ? 'dark' : 'light';
<add> return (
<add> <div className={theme}>
<add> {this.props.children}
<add> </div>
<add> )
<add> }
<add>});
<add>```
<add>
<add>### Other Use Cases
<add>
<add>Sometimes people use mixins to selectively add logging to lifecycle hooks in some components. In the future, we intend to provide an [official DevTools API](https://github.com/facebook/react/issues/5306) that would let you implement something similar without touching the components. However it’s still very much a work in progress. If you heavily depend on logging mixins for debugging, you might want to keep using those mixins for a little longer.
<add>
<add>If you can’t accomplish something with a component, a higher-order component, or a utility module, it could be mean that React should provide this out of the box. [File an issue](https://github.com/facebook/react/issues/new) to tell us about your use case for mixins, and we’ll help you consider alternatives or perhaps implement your feature request.
<add>
<add>Mixins are not deprecated in the traditional sense. You can keep using them with `React.createClass()`, as we won’t be changing it further. Eventually, as ES6 classes gain more adoption and their usability problems in React are solved, we might split `React.createClass()` into a separate package because most people wouldn’t need it. Even in that case, your old mixins would keep working.
<add>
<add>We believe that the alternatives above are better for the vast majority of cases, and we invite you to try writing React apps without using mixins. | 1 |
PHP | PHP | fix wrong docblock | 5797f2f41dba876f1597bb552eda8be9e4e4bc97 | <ide><path>src/TestSuite/Constraint/Console/ContentsBase.php
<ide> abstract class ContentsBase extends Constraint
<ide> /**
<ide> * Constructor
<ide> *
<del> * @param int $contents Contents
<add> * @param array $contents Contents
<ide> * @param string $output Output type
<ide> */
<ide> public function __construct($contents, $output) | 1 |
Javascript | Javascript | provide support for staggering animations with css | 74848307443c00ab07552336c56ddfa1e9ef6eff | <ide><path>src/ngAnimate/animate.js
<ide> * immediately resulting in a DOM element that is at its final state. This final state is when the DOM element
<ide> * has no CSS transition/animation classes applied to it.
<ide> *
<add> * <h3>CSS Staggering Animations</h3>
<add> * A Staggering animation is a collection of animations that are issued with a slight delay in between each successive operation resulting in a
<add> * curtain-like effect. The ngAnimate module, as of 1.2.0, supports staggering animations and the stagger effect can be
<add> * performed by creating a **ng-EVENT-stagger** CSS class and attaching that class to the base CSS class used for
<add> * the animation. The style property expected within the stagger class can either be a **transition-delay** or an
<add> * **animation-delay** property (or both if your animation contains both transitions and keyframe animations).
<add> *
<add> * <pre>
<add> * .my-animation.ng-enter {
<add> * /* standard transition code */
<add> * }
<add> * .my-animation.ng-enter-stagger {
<add> * /* this will have a 100ms delay between each successive leave animation */
<add> * -webkit-transition-delay: 0.1s;
<add> * transition-delay: 0.1s;
<add> *
<add> * /* in case the stagger doesn't work then these two values
<add> * must be set to 0 to avoid an accidental CSS inheritance */
<add> * -webkit-transition-duration: 0s;
<add> * transition-duration: 0s;
<add> * }
<add> * .my-animation.ng-enter.ng-enter-active {
<add> * /* standard transition styles */
<add> * }
<add> * </pre>
<add> *
<add> * Staggering animations work by default in ngRepeat (so long as the CSS class is defiend). Outside of ngRepeat, to use staggering animations
<add> * on your own, they can be triggered by firing multiple calls to the same event on $animate. However, the restrictions surrounding this
<add> * are that each of the elements must have the same CSS className value as well as the same parent element. A stagger operation
<add> * will also be reset if more than 10ms has passed after the last animation has been fired.
<add> *
<add> * The following code will issue the **ng-leave-stagger** event on the element provided:
<add> *
<add> * <pre>
<add> * var kids = parent.children();
<add> *
<add> * $animate.leave(kids[0]); //stagger index=0
<add> * $animate.leave(kids[1]); //stagger index=1
<add> * $animate.leave(kids[2]); //stagger index=2
<add> * $animate.leave(kids[3]); //stagger index=3
<add> * $animate.leave(kids[4]); //stagger index=4
<add> *
<add> * $timeout(function() {
<add> * //stagger has reset itself
<add> * $animate.leave(kids[5]); //stagger index=0
<add> * $animate.leave(kids[6]); //stagger index=1
<add> * }, 100, false);
<add> * </pre>
<add> *
<add> * Stagger animations are currently only supported within CSS-defined animations.
<add> *
<ide> * <h2>JavaScript-defined Animations</h2>
<ide> * In the event that you do not want to use CSS3 transitions or CSS3 animations or if you wish to offer animations on browsers that do not
<ide> * yet support CSS transitions/animations, then you can make use of JavaScript animations defined inside of your AngularJS module.
<ide> angular.module('ngAnimate', ['ng'])
<ide> var forEach = angular.forEach;
<ide>
<ide> // Detect proper transitionend/animationend event names.
<del> var transitionProp, transitionendEvent, animationProp, animationendEvent;
<add> var prefix = '', transitionProp, transitionendEvent, animationProp, animationendEvent;
<ide>
<ide> // If unprefixed events are not supported but webkit-prefixed are, use the latter.
<ide> // Otherwise, just use W3C names, browsers not supporting them at all will just ignore them.
<ide> angular.module('ngAnimate', ['ng'])
<ide> // Also, the only modern browser that uses vendor prefixes for transitions/keyframes is webkit
<ide> // therefore there is no reason to test anymore for other vendor prefixes: http://caniuse.com/#search=transition
<ide> if (window.ontransitionend === undefined && window.onwebkittransitionend !== undefined) {
<add> prefix = '-webkit-';
<ide> transitionProp = 'WebkitTransition';
<ide> transitionendEvent = 'webkitTransitionEnd transitionend';
<ide> } else {
<ide> angular.module('ngAnimate', ['ng'])
<ide> }
<ide>
<ide> if (window.onanimationend === undefined && window.onwebkitanimationend !== undefined) {
<add> prefix = '-webkit-';
<ide> animationProp = 'WebkitAnimation';
<ide> animationendEvent = 'webkitAnimationEnd animationend';
<ide> } else {
<ide> angular.module('ngAnimate', ['ng'])
<ide> }, 10, false);
<ide> }
<ide>
<add> function applyStyle(node, style) {
<add> var oldStyle = node.getAttribute('style') || '';
<add> var newStyle = (oldStyle.length > 0 ? '; ' : '') + style;
<add> node.setAttribute('style', newStyle);
<add> return oldStyle;
<add> }
<add>
<ide> function getElementAnimationDetails(element, cacheKey, onlyCheckTransition) {
<ide> var data = cacheKey ? lookupCache[cacheKey] : null;
<ide> if(!data) {
<ide> angular.module('ngAnimate', ['ng'])
<ide> }
<ide> });
<ide> data = {
<add> total : 0,
<ide> transitionDelay : transitionDelay,
<ide> animationDelay : animationDelay,
<ide> transitionDuration : transitionDuration,
<ide> angular.module('ngAnimate', ['ng'])
<ide> }
<ide>
<ide> function animate(element, className, done) {
<del>
<ide> var cacheKey = getCacheKey(element);
<ide> if(getElementAnimationDetails(element, cacheKey, true).transitionDuration > 0) {
<ide>
<ide> done();
<ide> return;
<ide> }
<ide>
<add> var eventCacheKey = cacheKey + ' ' + className;
<add> var ii = lookupCache[eventCacheKey] ? ++lookupCache[eventCacheKey].total : 0;
<add>
<add> var stagger = {};
<add> if(ii > 0) {
<add> var staggerClassName = className + '-stagger';
<add> var staggerCacheKey = cacheKey + ' ' + staggerClassName;
<add> var applyClasses = !lookupCache[staggerCacheKey];
<add>
<add> applyClasses && element.addClass(staggerClassName);
<add>
<add> stagger = getElementAnimationDetails(element, staggerCacheKey);
<add>
<add> applyClasses && element.removeClass(staggerClassName);
<add> }
<add>
<ide> element.addClass(className);
<ide>
<del> var timings = getElementAnimationDetails(element, cacheKey + ' ' + className);
<add> var timings = getElementAnimationDetails(element, eventCacheKey);
<ide>
<ide> /* there is no point in performing a reflow if the animation
<ide> timeout is empty (this would cause a flicker bug normally
<ide> angular.module('ngAnimate', ['ng'])
<ide> activeClassName += (i > 0 ? ' ' : '') + klass + '-active';
<ide> });
<ide>
<del> // This triggers a reflow which allows for the transition animation to kick in.
<del> var css3AnimationEvents = animationendEvent + ' ' + transitionendEvent;
<add> var formerStyle, css3AnimationEvents = animationendEvent + ' ' + transitionendEvent;
<ide>
<add> // This triggers a reflow which allows for the transition animation to kick in.
<ide> afterReflow(function() {
<ide> if(timings.transitionDuration > 0) {
<ide> node.style[transitionProp + propertyKey] = '';
<add> if(ii > 0 && stagger.transitionDelay > 0 && stagger.transitionDuration === 0) {
<add> formerStyle = applyStyle(node, prefix + 'transition-delay: ' +
<add> (ii * stagger.transitionDelay + timings.transitionDelay) + 's');
<add> }
<add> }
<add>
<add> if(ii > 0 && stagger.animationDelay > 0 && stagger.animationDuration === 0) {
<add> formerStyle = applyStyle(node, prefix + 'animation-delay: ' +
<add> (ii * stagger.animationDelay + timings.animationDelay) + 's');
<ide> }
<ide> element.addClass(activeClassName);
<ide> });
<ide> angular.module('ngAnimate', ['ng'])
<ide> element.removeClass(className);
<ide> element.removeClass(activeClassName);
<ide> element.removeData(NG_ANIMATE_CLASS_KEY);
<add> if(formerStyle != null) {
<add> formerStyle.length > 0 ?
<add> node.setAttribute('style', formerStyle) :
<add> node.removeAttribute('style');
<add> }
<ide>
<ide> // Only when the animation is cancelled is the done()
<ide> // function not called for this animation therefore
<ide><path>test/ngAnimate/animateSpec.js
<ide> describe("ngAnimate", function() {
<ide>
<ide> expect(element.hasClass('ng-hide-remove-active')).toBe(false);
<ide> }));
<add>
<add> it("should stagger the items when the correct CSS class is provided",
<add> inject(function($animate, $rootScope, $compile, $sniffer, $timeout, $document, $rootElement) {
<add>
<add> if(!$sniffer.animations) return;
<add>
<add> $animate.enabled(true);
<add>
<add> ss.addRule('.ani.ng-enter, .ani.ng-leave, .ani-fake.ng-enter, .ani-fake.ng-leave',
<add> '-webkit-animation:1s my_animation;' +
<add> 'transition:1s my_animation;');
<add>
<add> ss.addRule('.ani.ng-enter-stagger, .ani.ng-leave-stagger',
<add> '-webkit-animation-delay:0.1s;' +
<add> '-webkit-animation-duration:0s;' +
<add> 'animation-delay:0.1s;' +
<add> 'animation-duration:0s;');
<add>
<add> ss.addRule('.ani-fake.ng-enter-stagger, .ani-fake.ng-leave-stagger',
<add> '-webkit-animation-delay:0.1s;' +
<add> '-webkit-animation-duration:1s;' +
<add> 'animation-delay:0.1s;' +
<add> 'animation-duration:1s;');
<add>
<add> var container = $compile(html('<div></div>'))($rootScope);
<add>
<add> var elements = [];
<add> for(var i = 0; i < 5; i++) {
<add> var newScope = $rootScope.$new();
<add> var element = $compile('<div class="ani"></div>')(newScope);
<add> $animate.enter(element, container);
<add> elements.push(element);
<add> };
<add>
<add> $rootScope.$digest();
<add> $timeout.flush();
<add>
<add> expect(elements[0].attr('style')).toBeFalsy();
<add> expect(elements[1].attr('style')).toMatch(/animation-delay: 0\.1\d*s/);
<add> expect(elements[2].attr('style')).toMatch(/animation-delay: 0\.2\d*s/);
<add> expect(elements[3].attr('style')).toMatch(/animation-delay: 0\.3\d*s/);
<add> expect(elements[4].attr('style')).toMatch(/animation-delay: 0\.4\d*s/);
<add>
<add> for(var i = 0; i < 5; i++) {
<add> dealoc(elements[i]);
<add> var newScope = $rootScope.$new();
<add> var element = $compile('<div class="ani-fake"></div>')(newScope);
<add> $animate.enter(element, container);
<add> elements[i] = element;
<add> };
<add>
<add> $rootScope.$digest();
<add> $timeout.flush();
<add>
<add> expect(elements[0].attr('style')).toBeFalsy();
<add> expect(elements[1].attr('style')).not.toMatch(/animation-delay: 0\.1\d*s/);
<add> expect(elements[2].attr('style')).not.toMatch(/animation-delay: 0\.2\d*s/);
<add> expect(elements[3].attr('style')).not.toMatch(/animation-delay: 0\.3\d*s/);
<add> expect(elements[4].attr('style')).not.toMatch(/animation-delay: 0\.4\d*s/);
<add> }));
<ide> });
<ide>
<ide> describe("Transitions", function() {
<ide> describe("ngAnimate", function() {
<ide> expect(element.hasClass('ng-hide-add-active')).toBe(true);
<ide> }
<ide> }));
<add>
<add> it("should stagger the items when the correct CSS class is provided",
<add> inject(function($animate, $rootScope, $compile, $sniffer, $timeout, $document, $rootElement) {
<add>
<add> if(!$sniffer.transitions) return;
<add>
<add> $animate.enabled(true);
<add>
<add> ss.addRule('.ani.ng-enter, .ani.ng-leave, .ani-fake.ng-enter, .ani-fake.ng-leave',
<add> '-webkit-transition:1s linear all;' +
<add> 'transition:1s linear all;');
<add>
<add> ss.addRule('.ani.ng-enter-stagger, .ani.ng-leave-stagger',
<add> '-webkit-transition-delay:0.1s;' +
<add> '-webkit-transition-duration:0s;' +
<add> 'transition-delay:0.1s;' +
<add> 'transition-duration:0s;');
<add>
<add> ss.addRule('.ani-fake.ng-enter-stagger, .ani-fake.ng-leave-stagger',
<add> '-webkit-transition-delay:0.1s;' +
<add> '-webkit-transition-duration:1s;' +
<add> 'transition-delay:0.1s;' +
<add> 'transition-duration:1s;');
<add>
<add> var container = $compile(html('<div></div>'))($rootScope);
<add>
<add> var elements = [];
<add> for(var i = 0; i < 5; i++) {
<add> var newScope = $rootScope.$new();
<add> var element = $compile('<div class="ani"></div>')(newScope);
<add> $animate.enter(element, container);
<add> elements.push(element);
<add> };
<add>
<add> $rootScope.$digest();
<add> $timeout.flush();
<add>
<add> expect(elements[0].attr('style')).toBeFalsy();
<add> expect(elements[1].attr('style')).toMatch(/transition-delay: 0\.1\d*s/);
<add> expect(elements[2].attr('style')).toMatch(/transition-delay: 0\.2\d*s/);
<add> expect(elements[3].attr('style')).toMatch(/transition-delay: 0\.3\d*s/);
<add> expect(elements[4].attr('style')).toMatch(/transition-delay: 0\.4\d*s/);
<add>
<add> for(var i = 0; i < 5; i++) {
<add> dealoc(elements[i]);
<add> var newScope = $rootScope.$new();
<add> var element = $compile('<div class="ani-fake"></div>')(newScope);
<add> $animate.enter(element, container);
<add> elements[i] = element;
<add> };
<add>
<add> $rootScope.$digest();
<add> $timeout.flush();
<add>
<add> expect(elements[0].attr('style')).toBeFalsy();
<add> expect(elements[1].attr('style')).not.toMatch(/transition-delay: 0\.1\d*s/);
<add> expect(elements[2].attr('style')).not.toMatch(/transition-delay: 0\.2\d*s/);
<add> expect(elements[3].attr('style')).not.toMatch(/transition-delay: 0\.3\d*s/);
<add> expect(elements[4].attr('style')).not.toMatch(/transition-delay: 0\.4\d*s/);
<add> }));
<ide> });
<ide> });
<ide>
<ide> describe("ngAnimate", function() {
<ide> $rootScope.$digest();
<ide> $timeout.flush();
<ide>
<del> expect(count).toBe(2);
<add> //called three times since the classname is the same
<add> expect(count).toBe(3);
<ide>
<ide> dealoc(element);
<ide> count = 0; | 2 |
PHP | PHP | unskip test for sqlserver and belongstomany | a12b65dd77426495313087c7442180927c22a15d | <ide><path>tests/TestCase/ORM/Association/BelongsToManyTest.php
<ide> public function testReplaceLinkFailingDomainRules()
<ide> */
<ide> public function testReplaceLinkBinaryUuid()
<ide> {
<del> $this->skipIf(
<del> ConnectionManager::get('test')->getDriver() instanceof \Cake\Database\Driver\Sqlserver,
<del> 'This test is failing in SQLServer and needs to be revisited.'
<del> );
<ide> $items = $this->getTableLocator()->get('BinaryUuidItems');
<ide> $tags = $this->getTableLocator()->get('BinaryUuidTags');
<ide>
<ide> public function testReplaceLinkBinaryUuid()
<ide> ]);
<ide> $itemName = 'Item 1';
<ide> $item = $items->find()->where(['BinaryUuidItems.name' => $itemName])->firstOrFail();
<add> $existingTag = $tags->find()->where(['BinaryUuidTags.name' => 'Defect'])->firstOrFail();
<ide>
<ide> // 1=existing, 2=new tag
<ide> $item->binary_uuid_tags = [
<del> new Entity(['id' => '481fc6d0-b920-43e0-a40d-111111111111'], ['markNew' => false]),
<add> $existingTag,
<ide> new Entity(['name' => 'net new']),
<ide> ];
<ide> $item->name = 'Updated'; | 1 |
Python | Python | fix resnet lr comment | e7957b7f4a51b871578cc96808b5568ce8ce78b5 | <ide><path>official/resnet/resnet_run_loop.py
<ide> def learning_rate_with_decay(
<ide> initial_learning_rate = 0.1 * batch_size / batch_denom
<ide> batches_per_epoch = num_images / batch_size
<ide>
<del> # Multiply the learning rate by 0.1 at 100, 150, and 200 epochs.
<add> # Reduce the learning rate at certain epochs.
<add> # CIFAR-10: divide by 10 at epoch 100, 150, and 200
<add> # ImageNet: divide by 10 at epoch 30, 60, 80, and 90
<ide> boundaries = [int(batches_per_epoch * epoch) for epoch in boundary_epochs]
<ide> vals = [initial_learning_rate * decay for decay in decay_rates]
<ide> | 1 |
PHP | PHP | update behavior generation and some internals | eea446891576259802cdb7aaed8001abfde0eabd | <ide><path>src/Console/Command/Task/ModelTask.php
<ide> public function generate($name) {
<ide> $displayField = $this->getDisplayField($model);
<ide> $fields = $this->getFields($model);
<ide> $validation = $this->getValidation($model);
<add> $behaviors = $this->getBehaviors($model);
<ide>
<del> $this->bake($object, false);
<del> $this->bakeFixture($model, $useTable);
<add> $data = compact('associations', 'primaryKey', 'displayField',
<add> 'fields', 'validation', 'behaviors');
<add> $this->bakeEntity($object);
<add> $this->bakeTable($object);
<add> $this->bakeFixture($model, $table);
<ide> $this->bakeTest($model);
<ide> }
<ide>
<ide> public function all() {
<ide> }
<ide> $modelClass = Inflector::classify($table);
<ide> $this->out(__d('cake_console', 'Baking %s', $modelClass));
<del> $object = $this->_getModelObject($modelClass, $table);
<del> if ($this->bake($object, false) && $unitTestExists) {
<del> $this->bakeFixture($modelClass, $table);
<del> $this->bakeTest($modelClass);
<del> }
<add> $this->generate($table);
<ide> }
<ide> }
<ide>
<ide> public function fieldValidation($fieldName, $metaData, $primaryKey) {
<ide> ];
<ide> }
<ide>
<add>/**
<add> * Get behaviors
<add> *
<add> * @param Cake\ORM\Table $model
<add> * @return array Behaviors
<add> */
<add> public function getBehaviors($model) {
<add> $behaviors = [];
<add> $schema = $model->schema();
<add> $fields = $schema->columns();
<add> if (empty($fields)) {
<add> return [];
<add> }
<add> if (in_array('created', $fields) || in_array('modified', $fields)) {
<add> $behaviors[] = 'Timestamp';
<add> }
<add>
<add> if (in_array('lft', $fields) && $schema->columnType('lft') === 'integer' &&
<add> in_array('rght', $fields) && $schema->columnType('rght') === 'integer' &&
<add> in_array('parent_id', $fields)
<add> ) {
<add> $behaviors[] = 'Tree';
<add> }
<add> return $behaviors;
<add> }
<ide>
<ide> /**
<ide> * Generate a key value list of options and a prompt.
<ide> protected function _interactive() {
<ide> }
<ide> }
<ide>
<del>/**
<del> * Handles behaviors
<del> *
<del> * @param Model $model
<del> * @return array Behaviors
<del> */
<del> public function doActsAs($model) {
<del> if (!$model instanceof Model) {
<del> return false;
<del> }
<del> $behaviors = [];
<del> $fields = $model->schema(true);
<del> if (empty($fields)) {
<del> return [];
<del> }
<del>
<del> if (isset($fields['lft']) && $fields['lft']['type'] === 'integer' &&
<del> isset($fields['rght']) && $fields['rght']['type'] === 'integer' &&
<del> isset($fields['parent_id'])) {
<del> $behaviors[] = 'Tree';
<del> }
<del> return $behaviors;
<del> }
<del>
<ide> /**
<ide> * Assembles and writes a Model file.
<ide> *
<ide><path>tests/TestCase/Console/Command/Task/ModelTaskTest.php
<ide> public function testGetValidation() {
<ide> *
<ide> * @return void
<ide> */
<del> public function testDoActsAs() {
<del> $this->markTestIncomplete('Not done here yet');
<del> $this->Task->connection = 'test';
<del> $this->Task->interactive = false;
<del> $model = new Model(array('ds' => 'test', 'name' => 'NumberTree'));
<del> $result = $this->Task->doActsAs($model);
<add> public function testGetBehaviors() {
<add> $model = TableRegistry::get('NumberTrees');
<add> $result = $this->Task->getBehaviors($model);
<add> $this->assertEquals(['Tree'], $result);
<ide>
<del> $this->assertEquals(array('Tree'), $result);
<add> $model = TableRegistry::get('BakeArticles');
<add> $result = $this->Task->getBehaviors($model);
<add> $this->assertEquals(['Timestamp'], $result);
<ide> }
<ide>
<ide> /** | 2 |
Python | Python | use string name in setup.py | a8416c46f74a6a9d159090ad6dbc644cd6d40e92 | <ide><path>setup.py
<ide> def setup_package():
<ide> generate_cython(root, "spacy")
<ide>
<ide> setup(
<del> name=about["__title__"],
<add> name="spacy",
<ide> zip_safe=False,
<ide> packages=PACKAGES,
<ide> package_data=PACKAGE_DATA, | 1 |
Javascript | Javascript | use local bootstrap css | cfe7b0e9efbcbc4d6dafef568895be265842c071 | <ide><path>src/ng/directive/ngShowHide.js
<ide> var NG_HIDE_IN_PROGRESS_CLASS = 'ng-hide-animate';
<ide> </div>
<ide> </file>
<ide> <file name="glyphicons.css">
<del> @import url(//netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap-glyphicons.css);
<add> @import url(../../components/bootstrap-3.1.1/css/bootstrap.css);
<ide> </file>
<ide> <file name="animations.css">
<ide> .animate-show {
<ide> var ngShowDirective = ['$animate', function($animate) {
<ide> </div>
<ide> </file>
<ide> <file name="glyphicons.css">
<del> @import url(//netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap-glyphicons.css);
<add> @import url(../../components/bootstrap-3.1.1/css/bootstrap.css);
<ide> </file>
<ide> <file name="animations.css">
<ide> .animate-hide { | 1 |
Javascript | Javascript | hoist exports declarations too | a44694aa4e6369892f2fa455b15c1d19a13d1f56 | <ide><path>lib/Parser.js
<ide> Parser.prototype.walkStatements = function walkStatements(statements) {
<ide> Parser.prototype.isHoistedStatement = function isHoistedStatement(statement) {
<ide> switch(statement.type) {
<ide> case "ImportDeclaration":
<add> case "ExportAllDeclaration":
<add> case "ExportNamedDeclaration":
<ide> return true;
<ide> }
<ide> return false;
<ide> Parser.prototype.walkExportDefaultDeclaration = function walkExportDefaultDeclar
<ide> Parser.prototype.walkExportAllDeclaration = function walkExportAllDeclaration(statement) {
<ide> var source = statement.source.value;
<ide> this.applyPluginsBailResult("export import", statement, source);
<del> this.applyPluginsBailResult("export import specifier", statement, source, null, null);
<add> this.applyPluginsBailResult("export import specifier", statement, source, null, null, 0);
<ide> };
<ide>
<ide> Parser.prototype.walkVariableDeclaration = function walkVariableDeclaration(statement) {
<ide><path>test/cases/parsing/harmony-commonjs/e.js
<add>exports.a = "a";
<add>exports.b = "b";
<ide><path>test/cases/parsing/harmony-commonjs/index.js
<ide> it("should double reexport from non-harmony modules correctly", function() {
<ide> y.should.be.eql("y");
<ide> x.should.be.eql("x");
<ide> });
<add>
<add>
<add>import { a, b } from "./reexport"
<add>
<add>it("should be possible to reexport a module with unknown exports", function() {
<add> a.should.be.eql("a");
<add> b.should.be.eql("b");
<add>});
<ide><path>test/cases/parsing/harmony-commonjs/reexport.js
<add>export * from "./e";
<add>import "./e"; | 4 |
Text | Text | add a note about hover options | 81e28c9895ad685b2b567a5de4b70ea591d275ae | <ide><path>docs/docs/general/interactions/index.md
<ide> The interaction configuration is passed into the `options.interaction` namespace
<ide> | `mode` | `string` | `'nearest'` | Sets which elements appear in the tooltip. See [Interaction Modes](./modes.md#interaction-modes) for details.
<ide> | `intersect` | `boolean` | `true` | if true, the hover mode only applies when the mouse position intersects an item on the chart.
<ide> | `axis` | `string` | `'x'` | Can be set to `'x'`, `'y'`, or `'xy'` to define which directions are used in calculating distances. Defaults to `'x'` for `'index'` mode and `'xy'` in `dataset` and `'nearest'` modes.
<add>
<add>The same options can be set into the `options.hover` namespace, in which case they will only affect the hover effect and the tooltip configuration will be kept independent. | 1 |
PHP | PHP | filter optional | b38bb6bc6d0dd831a334f8bbde6bd757bee694cb | <ide><path>src/Illuminate/Support/Collection.php
<ide> public function fetch($key)
<ide> /**
<ide> * Run a filter over each of the items.
<ide> *
<del> * @param callable $callback
<add> * @param callable|null $callback
<ide> * @return static
<ide> */
<del> public function filter(callable $callback)
<add> public function filter(callable $callback = null)
<ide> {
<del> return new static(array_filter($this->items, $callback));
<add> if ($callback) {
<add> return new static(array_filter($this->items, $callback));
<add> }
<add>
<add> return new static(array_filter($this->items));
<ide> }
<ide>
<ide> /**
<ide><path>tests/Support/SupportCollectionTest.php
<ide> public function testFilter()
<ide> $this->assertEquals([1 => ['id' => 2, 'name' => 'World']], $c->filter(function ($item) {
<ide> return $item['id'] == 2;
<ide> })->all());
<add>
<add> $c = new Collection(['', 'Hello', '', 'World']);
<add> $this->assertEquals(['Hello', 'World'], $c->filter()->values()->toArray());
<ide> }
<ide>
<ide> public function testWhere() | 2 |
Javascript | Javascript | use dasherizedmodulename for test description | 06e86935339148ecc18d91fd76484d3b3a90100f | <ide><path>blueprints/controller-test/index.js
<ide> 'use strict';
<ide>
<ide> const stringUtil = require('ember-cli-string-utils');
<del>const testInfo = require('ember-cli-test-info');
<ide>
<ide> const useTestFrameworkDetector = require('../test-framework-detector');
<ide>
<ide> module.exports = useTestFrameworkDetector({
<ide> let controllerPathName = dasherizedModuleName;
<ide> return {
<ide> controllerPathName: controllerPathName,
<del> friendlyTestDescription: testInfo.description(options.entity.name, 'Unit', 'Controller')
<add> friendlyTestDescription: ['Unit', 'Controller', dasherizedModuleName].join(' | ')
<ide> };
<ide> }
<ide> }); | 1 |
PHP | PHP | throw exception for unsafe order() usage | 4a04aedeed9cab7532eb5f3bde1ab2faaeaee87e | <ide><path>src/Database/Expression/OrderByExpression.php
<ide>
<ide> use Cake\Database\ExpressionInterface;
<ide> use Cake\Database\ValueBinder;
<add>use RuntimeException;
<ide>
<ide> /**
<ide> * An expression object for ORDER BY clauses
<ide> public function sql(ValueBinder $generator): string
<ide> */
<ide> protected function _addConditions(array $orders, array $types): void
<ide> {
<add> foreach ($orders as $key => $val) {
<add> if (is_string($key) &&
<add> is_string($val) &&
<add> !in_array(strtoupper($val), ['ASC', 'DESC'], true)
<add> ) {
<add> throw new RuntimeException(
<add> 'Passing extra expressions by associative array is not ' .
<add> 'allowed to avoid potential SQL injection. ' .
<add> 'Use QueryExpression or numeric array instead.'
<add> );
<add> }
<add> }
<add>
<ide> $this->_conditions = array_merge($this->_conditions, $orders);
<ide> }
<ide> }
<ide><path>tests/TestCase/Database/QueryTest.php
<ide> public function testSelectOrderByString()
<ide> }
<ide>
<ide> /**
<del> * Test that order() works with an associative array which contains extra values.
<add> * Test exception for order() with an associative array which contains extra values.
<ide> *
<ide> * @return void
<ide> */
<ide> public function testSelectOrderByAssociativeArrayContainingExtraExpressions()
<ide> {
<add> $this->expectException('RuntimeException');
<add> $this->expectExceptionMessage(
<add> 'Passing extra expressions by associative array is not ' .
<add> 'allowed to avoid potential SQL injection. ' .
<add> 'Use QueryExpression or numeric array instead.'
<add> );
<add>
<ide> $this->loadFixtures('Articles');
<ide> $query = new Query($this->connection);
<ide> $query->select(['id'])
<ide> ->from('articles')
<ide> ->order([
<ide> 'id' => 'desc -- Comment',
<ide> ]);
<del> $result = $query->execute();
<del> $this->assertEquals(['id' => 3], $result->fetch('assoc'));
<del> $this->assertEquals(['id' => 2], $result->fetch('assoc'));
<del> $this->assertEquals(['id' => 1], $result->fetch('assoc'));
<del> $result->closeCursor();
<ide> }
<ide>
<ide> /** | 2 |
Python | Python | add example to compare relu with selu | 8d5b2ce60c21a0c18c00610dda41687b42fd5c13 | <ide><path>examples/reuters_mlp_relu_vs_selu.py
<add>'''Compares self-normalizing MLPs with regular MLPs.
<add>
<add>Compares the performance of a simple MLP using two
<add>different activation functions: RELU and SELU
<add>on the Reuters newswire topic classification task.
<add>
<add># Reference:
<add> Klambauer, G., Unterthiner, T., Mayr, A., & Hochreiter, S. (2017).
<add> Self-Normalizing Neural Networks. arXiv preprint arXiv:1706.02515.
<add> https://arxiv.org/abs/1706.02515
<add>'''
<add>from __future__ import print_function
<add>
<add>import numpy as np
<add>import matplotlib.pyplot as plt
<add>import keras
<add>from keras.datasets import reuters
<add>from keras.models import Sequential
<add>from keras.layers import Dense, Activation, Dropout
<add>from keras.layers.noise import AlphaDropout
<add>from keras.preprocessing.text import Tokenizer
<add>
<add>max_words = 1000
<add>batch_size = 16
<add>epochs = 40
<add>plot = True
<add>
<add>
<add>def create_network(n_dense=6,
<add> dense_units=16,
<add> activation='selu',
<add> dropout=AlphaDropout,
<add> dropout_rate=0.1,
<add> kernel_initializer='lecun_normal',
<add> optimizer='adam',
<add> num_classes=1,
<add> max_words=max_words):
<add> """Generic function to create a fully-connected neural network.
<add>
<add> # Arguments
<add> n_dense: int > 0. Number of dense layers.
<add> dense_units: int > 0. Number of dense units per layer.
<add> dropout: keras.layers.Layer. A dropout layer to apply.
<add> dropout_rate: 0 <= float <= 1. The rate of dropout.
<add> kernel_initializer: str. The initializer for the weights.
<add> optimizer: str/keras.optimizers.Optimizer. The optimizer to use.
<add> num_classes: int > 0. The number of classes to predict.
<add> max_words: int > 0. The maximum number of words per data point.
<add>
<add> # Returns
<add> A Keras model instance (compiled).
<add> """
<add> model = Sequential()
<add> model.add(Dense(dense_units, input_shape=(max_words,),
<add> kernel_initializer=kernel_initializer))
<add> model.add(Activation(activation))
<add> model.add(dropout(dropout_rate))
<add>
<add> for i in range(n_dense - 1):
<add> model.add(Dense(dense_units, kernel_initializer=kernel_initializer))
<add> model.add(Activation(activation))
<add> model.add(dropout(dropout_rate))
<add>
<add> model.add(Dense(num_classes))
<add> model.add(Activation('softmax'))
<add> model.compile(loss='categorical_crossentropy',
<add> optimizer=optimizer,
<add> metrics=['accuracy'])
<add> return model
<add>
<add>
<add>network1 = {
<add> 'n_dense': 6,
<add> 'dense_units': 16,
<add> 'activation': 'relu',
<add> 'dropout': Dropout,
<add> 'dropout_rate': 0.5,
<add> 'kernel_initializer': 'glorot_uniform',
<add> 'optimizer': 'sgd'
<add>}
<add>
<add>network2 = {
<add> 'n_dense': 6,
<add> 'dense_units': 16,
<add> 'activation': 'selu',
<add> 'dropout': AlphaDropout,
<add> 'dropout_rate': 0.1,
<add> 'kernel_initializer': 'lecun_normal',
<add> 'optimizer': 'sgd'
<add>}
<add>
<add>print('Loading data...')
<add>(x_train, y_train), (x_test, y_test) = reuters.load_data(num_words=max_words,
<add> test_split=0.2)
<add>print(len(x_train), 'train sequences')
<add>print(len(x_test), 'test sequences')
<add>
<add>num_classes = np.max(y_train) + 1
<add>print(num_classes, 'classes')
<add>
<add>print('Vectorizing sequence data...')
<add>tokenizer = Tokenizer(num_words=max_words)
<add>x_train = tokenizer.sequences_to_matrix(x_train, mode='binary')
<add>x_test = tokenizer.sequences_to_matrix(x_test, mode='binary')
<add>print('x_train shape:', x_train.shape)
<add>print('x_test shape:', x_test.shape)
<add>
<add>print('Convert class vector to binary class matrix '
<add> '(for use with categorical_crossentropy)')
<add>y_train = keras.utils.to_categorical(y_train, num_classes)
<add>y_test = keras.utils.to_categorical(y_test, num_classes)
<add>print('y_train shape:', y_train.shape)
<add>print('y_test shape:', y_test.shape)
<add>
<add>print('\nBuilding network 1...')
<add>
<add>model1 = create_network(num_classes=num_classes, **network1)
<add>history_model1 = model1.fit(x_train,
<add> y_train,
<add> batch_size=batch_size,
<add> epochs=epochs,
<add> verbose=1,
<add> validation_split=0.1)
<add>
<add>score_model1 = model1.evaluate(x_test,
<add> y_test,
<add> batch_size=batch_size,
<add> verbose=1)
<add>
<add>
<add>print('\nBuilding network 2...')
<add>model2 = create_network(num_classes=num_classes, **network2)
<add>
<add>history_model2 = model2.fit(x_train,
<add> y_train,
<add> batch_size=batch_size,
<add> epochs=epochs,
<add> verbose=1,
<add> validation_split=0.1)
<add>
<add>score_model2 = model2.evaluate(x_test,
<add> y_test,
<add> batch_size=batch_size,
<add> verbose=1)
<add>
<add>print('\nNetwork 1 results')
<add>print('Hyperparameters:', network1)
<add>print('Test score:', score_model1[0])
<add>print('Test accuracy:', score_model1[1])
<add>print('Network 2 results')
<add>print('Hyperparameters:', network2)
<add>print('Test score:', score_model2[0])
<add>print('Test accuracy:', score_model2[1])
<add>
<add>plt.plot(range(epochs),
<add> history_model1.history['val_loss'],
<add> 'g-',
<add> label='Network 1 Val Loss')
<add>plt.plot(range(epochs),
<add> history_model2.history['val_loss'],
<add> 'r-',
<add> label='Network 2 Val Loss')
<add>plt.plot(range(epochs),
<add> history_model1.history['loss'],
<add> 'g--',
<add> label='Network 1 Loss')
<add>plt.plot(range(epochs),
<add> history_model2.history['loss'],
<add> 'r--',
<add> label='Network 2 Loss')
<add>plt.xlabel('Epochs')
<add>plt.ylabel('Loss')
<add>plt.legend()
<add>plt.savefig('comparison_of_networks.png') | 1 |
Python | Python | catch eof error while readin glances version file | faa7edfd64c09889c34b4a439b9455e9f0f120fd | <ide><path>glances/outdated.py
<ide> def _load_cache(self):
<ide> try:
<ide> with open(os.path.join(self._cache_path(), 'glances-version.db'), 'rb') as f:
<ide> cached_data = pickle.load(f)
<del> except IOError as e:
<add> except (EOFError, IOError) as e:
<ide> logger.debug("Can not read the version cache file ({0})".format(e))
<ide> else:
<ide> logger.debug("Read the version cache file") | 1 |
Go | Go | ensure timestamps set for metadata commands | 1a2bd3cf7dab6f4e2e96a44fe2084bc06044802b | <ide><path>builder/builder-next/exporter/writer.go
<ide> func normalizeLayersAndHistory(diffs []digest.Digest, history []ocispec.History,
<ide> history[i] = h
<ide> }
<ide>
<add> // Find the first new layer time. Otherwise, the history item for a first
<add> // metadata command would be the creation time of a base image layer.
<add> // If there is no such then the last layer with timestamp.
<add> var created *time.Time
<add> var noCreatedTime bool
<add> for _, h := range history {
<add> if h.Created != nil {
<add> created = h.Created
<add> if noCreatedTime {
<add> break
<add> }
<add> } else {
<add> noCreatedTime = true
<add> }
<add> }
<add>
<add> // Fill in created times for all history items to be either the first new
<add> // layer time or the previous layer.
<add> noCreatedTime = false
<add> for i, h := range history {
<add> if h.Created != nil {
<add> if noCreatedTime {
<add> created = h.Created
<add> }
<add> } else {
<add> noCreatedTime = true
<add> h.Created = created
<add> }
<add> history[i] = h
<add> }
<add>
<ide> return diffs, history
<ide> }
<ide> | 1 |
PHP | PHP | fix code standards | fd5a8a8d6b1557df4fc0c2cd1256e874bdb2dd21 | <ide><path>lib/Cake/Test/Case/Utility/CakeTimeTest.php
<ide> public function testTimeAgoInWords() {
<ide> $result = $this->Time->timeAgoInWords('+1 week');
<ide> $this->assertEquals('On ' . date('l d/m, H:i', strtotime('+1 week')), $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+4 months +2 weeks +3 days'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+4 months +2 weeks +3 days'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('4 months, 2 weeks, 3 days', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+4 months +2 weeks +2 days'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+4 months +2 weeks +2 days'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('4 months, 2 weeks, 2 days', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+4 months +2 weeks +1 day'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+4 months +2 weeks +1 day'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('4 months, 2 weeks, 1 day', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+3 months +2 weeks +1 day'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+3 months +2 weeks +1 day'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('3 months, 2 weeks, 1 day', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+3 months +2 weeks'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+3 months +2 weeks'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('3 months, 2 weeks', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+3 months +1 week +6 days'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+3 months +1 week +6 days'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('3 months, 1 week, 6 days', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +2 weeks +1 day'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +2 weeks +1 day'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('2 months, 2 weeks, 1 day', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +2 weeks'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +2 weeks'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('2 months, 2 weeks', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +1 week +6 days'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +1 week +6 days'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('2 months, 1 week, 6 days', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+1 month +1 week +6 days'), array('end' => '8 years'), true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+1 month +1 week +6 days'),
<add> array('end' => '8 years'),
<add> true
<add> );
<ide> $this->assertEquals('1 month, 1 week, 6 days', $result);
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('+8 years, +4 months +2 weeks +3 days'), array('end' => '1 years', 'element' => 'span'), true);
<del> $expected = '<span title="'.strtotime('+8 years, +4 months +2 weeks +3 days').'" class="timeAgoInWords">on '.date('j/n/y', strtotime('+8 years, +4 months +2 weeks +3 days')).'</span>';
<del> $this->assertEquals($expected, $result);
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 weeks'), array('end' => '1 years', 'element' => 'div'), true);
<del> $expected = '<div title="'.strtotime('+2 weeks').'" class="timeAgoInWords">2 weeks</div>';
<del> $this->assertEquals($expected, $result);
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('+8 years +4 months +2 weeks +3 days'), array('accuracy' => array('year' => 'year'), 'end' => '+10 years'), true);
<add>
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+8 years +4 months +2 weeks +3 days'),
<add> array('accuracy' => array('year' => 'year'), 'end' => '+10 years'),
<add> true
<add> );
<ide> $expected = '8 years';
<ide> $this->assertEquals($expected, $result);
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('+8 years +4 months +2 weeks +3 days'), array('accuracy' => array('year' => 'month'), 'end' => '+10 years'), true);
<add>
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+8 years +4 months +2 weeks +3 days'),
<add> array('accuracy' => array('year' => 'month'), 'end' => '+10 years'),
<add> true
<add> );
<ide> $expected = '8 years, 4 months';
<ide> $this->assertEquals($expected, $result);
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('+8 years +4 months +2 weeks +3 days'), array('accuracy' => array('year' => 'week'), 'end' => '+10 years'), true);
<add>
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+8 years +4 months +2 weeks +3 days'),
<add> array('accuracy' => array('year' => 'week'), 'end' => '+10 years'),
<add> true
<add> );
<ide> $expected = '8 years, 4 months, 2 weeks';
<ide> $this->assertEquals($expected, $result);
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('+8 years +4 months +2 weeks +3 days'), array('accuracy' => array('year' => 'day'), 'end' => '+10 years'), true);
<add>
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+8 years +4 months +2 weeks +3 days'),
<add> array('accuracy' => array('year' => 'day'), 'end' => '+10 years'),
<add> true);
<ide> $expected = '8 years, 4 months, 2 weeks, 3 days';
<ide> $this->assertEquals($expected, $result);
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('+1 years +5 weeks'), array('accuracy' => array('year' => 'year'), 'end' => '+10 years'), true);
<add>
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+1 years +5 weeks'),
<add> array('accuracy' => array('year' => 'year'), 'end' => '+10 years'),
<add> true
<add> );
<ide> $expected = '1 year';
<ide> $this->assertEquals($expected, $result);
<ide>
<ide> public function testTimeAgoInWords() {
<ide> $this->assertEquals($expected, $result);
<ide> }
<ide> }
<del>
<del> $result = $this->Time->timeAgoInWords(strtotime('-2 years -5 months -2 days'), array('end' => '3 years'), true);
<add>
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('-2 years -5 months -2 days'),
<add> array('end' => '3 years'),
<add> true
<add> );
<ide> $this->assertEquals('2 years, 5 months, 2 days ago', $result);
<ide>
<ide> $result = $this->Time->timeAgoInWords('2007-9-25');
<ide> public function testTimeAgoInWords() {
<ide> $result = $this->Time->timeAgoInWords('2007-9-25', 'Y-m-d', true);
<ide> $this->assertEquals('on 2007-09-25', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('-2 weeks -2 days'), 'Y-m-d', false);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('-2 weeks -2 days'),
<add> 'Y-m-d',
<add> false
<add> );
<ide> $this->assertEquals('2 weeks, 2 days ago', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 weeks +2 days'), 'Y-m-d', true);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 weeks +2 days'),
<add> 'Y-m-d',
<add> true
<add> );
<ide> $this->assertRegExp('/^2 weeks, [1|2] day(s)?$/', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +2 days'), array('end' => '1 month'));
<del> $this->assertEquals('on ' . date('j/n/y', strtotime('+2 months +2 days')), $result);
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +2 days'),
<add> array('end' => '1 month')
<add> );
<add> $this->assertEquals(
<add> 'on ' . date('j/n/y', strtotime('+2 months +2 days')),
<add> $result
<add> );
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +2 days'), array('end' => '3 month'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +2 days'),
<add> array('end' => '3 month')
<add> );
<ide> $this->assertRegExp('/2 months/', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +12 days'), array('end' => '3 month'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +12 days'),
<add> array('end' => '3 month')
<add> );
<ide> $this->assertRegExp('/2 months, 1 week/', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+3 months +5 days'), array('end' => '4 month'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+3 months +5 days'),
<add> array('end' => '4 month')
<add> );
<ide> $this->assertEquals('3 months, 5 days', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('-2 months -2 days'), array('end' => '3 month'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('-2 months -2 days'),
<add> array('end' => '3 month')
<add> );
<ide> $this->assertEquals('2 months, 2 days ago', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('-2 months -2 days'), array('end' => '3 month'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('-2 months -2 days'),
<add> array('end' => '3 month')
<add> );
<ide> $this->assertEquals('2 months, 2 days ago', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +2 days'), array('end' => '3 month'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +2 days'),
<add> array('end' => '3 month')
<add> );
<ide> $this->assertRegExp('/2 months/', $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('+2 months +2 days'), array('end' => '1 month', 'format' => 'Y-m-d'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('+2 months +2 days'),
<add> array('end' => '1 month', 'format' => 'Y-m-d')
<add> );
<ide> $this->assertEquals('on ' . date('Y-m-d', strtotime('+2 months +2 days')), $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('-2 months -2 days'), array('end' => '1 month', 'format' => 'Y-m-d'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('-2 months -2 days'),
<add> array('end' => '1 month', 'format' => 'Y-m-d')
<add> );
<ide> $this->assertEquals('on ' . date('Y-m-d', strtotime('-2 months -2 days')), $result);
<ide>
<del> $result = $this->Time->timeAgoInWords(strtotime('-13 months -5 days'), array('end' => '2 years'));
<add> $result = $this->Time->timeAgoInWords(
<add> strtotime('-13 months -5 days'),
<add> array('end' => '2 years')
<add> );
<ide> $this->assertEquals('1 year, 1 month, 5 days ago', $result);
<ide> }
<ide> | 1 |
Python | Python | set version to v3.0.0.dev13 | 52338a07bba66da549a55391747d74a4a7e6aba3 | <ide><path>spacy/about.py
<ide> # fmt: off
<ide> __title__ = "spacy"
<del>__version__ = "3.0.0.dev12"
<add>__version__ = "3.0.0.dev13"
<ide> __release__ = True
<ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download"
<ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json" | 1 |
Mixed | Javascript | improve error messages | 2d9e87695e16d80ecffc8eab50e9fe1dfa7005f5 | <ide><path>doc/api/assert.md
<ide> For more information about the used equality comparisons see
<ide> <!-- YAML
<ide> added: REPLACEME
<ide> changes:
<add> - version: REPLACEME
<add> pr-url: https://github.com/nodejs/node/pull/REPLACEME
<add> description: Added error diffs to the strict mode
<ide> - version: REPLACEME
<ide> pr-url: https://github.com/nodejs/node/pull/17002
<ide> description: Added strict mode to the assert module.
<ide> When using the `strict mode`, any `assert` function will use the equality used i
<ide> the strict function mode. So [`assert.deepEqual()`][] will, for example, work the
<ide> same as [`assert.deepStrictEqual()`][].
<ide>
<add>On top of that, error messages which involve objects produce an error diff
<add>instead of displaying both objects. That is not the case for the legacy mode.
<add>
<ide> It can be accessed using:
<ide>
<ide> ```js
<ide> const assert = require('assert').strict;
<ide> ```
<ide>
<add>Example error diff (the `expected`, `actual`, and `Lines skipped` will be on a
<add>single row):
<add>
<add>```js
<add>const assert = require('assert').strict;
<add>
<add>assert.deepEqual([[[1, 2, 3]], 4, 5], [[[1, 2, '3']], 4, 5]);
<add>```
<add>
<add>```diff
<add>AssertionError [ERR_ASSERTION]: Input A expected to deepStrictEqual input B:
<add>+ expected
<add>- actual
<add>... Lines skipped
<add>
<add> [
<add> [
<add>...
<add> 2,
<add>- 3
<add>+ '3'
<add> ],
<add>...
<add> 5
<add> ]
<add>```
<add>
<ide> ## Legacy mode
<ide>
<ide> > Stability: 0 - Deprecated: Use strict mode instead.
<ide><path>lib/assert.js
<ide> const meta = [
<ide>
<ide> const escapeFn = (str) => meta[str.charCodeAt(0)];
<ide>
<add>const ERR_DIFF_DEACTIVATED = 0;
<add>const ERR_DIFF_NOT_EQUAL = 1;
<add>const ERR_DIFF_EQUAL = 2;
<add>
<ide> // The assert module provides functions that throw
<ide> // AssertionError's when particular conditions are not met. The
<ide> // assert module must conform to the following interface.
<ide> assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) {
<ide> expected,
<ide> message,
<ide> operator: 'deepStrictEqual',
<del> stackStartFn: deepStrictEqual
<add> stackStartFn: deepStrictEqual,
<add> errorDiff: this === strict ? ERR_DIFF_EQUAL : ERR_DIFF_DEACTIVATED
<ide> });
<ide> }
<ide> };
<ide> function notDeepStrictEqual(actual, expected, message) {
<ide> expected,
<ide> message,
<ide> operator: 'notDeepStrictEqual',
<del> stackStartFn: notDeepStrictEqual
<add> stackStartFn: notDeepStrictEqual,
<add> errorDiff: this === strict ? ERR_DIFF_NOT_EQUAL : ERR_DIFF_DEACTIVATED
<ide> });
<ide> }
<ide> }
<ide> assert.strictEqual = function strictEqual(actual, expected, message) {
<ide> expected,
<ide> message,
<ide> operator: 'strictEqual',
<del> stackStartFn: strictEqual
<add> stackStartFn: strictEqual,
<add> errorDiff: this === strict ? ERR_DIFF_EQUAL : ERR_DIFF_DEACTIVATED
<ide> });
<ide> }
<ide> };
<ide> assert.notStrictEqual = function notStrictEqual(actual, expected, message) {
<ide> expected,
<ide> message,
<ide> operator: 'notStrictEqual',
<del> stackStartFn: notStrictEqual
<add> stackStartFn: notStrictEqual,
<add> errorDiff: this === strict ? ERR_DIFF_NOT_EQUAL : ERR_DIFF_DEACTIVATED
<ide> });
<ide> }
<ide> };
<ide><path>lib/internal/errors.js
<ide> class SystemError extends makeNodeError(Error) {
<ide> }
<ide> }
<ide>
<add>function createErrDiff(actual, expected, operator) {
<add> var other = '';
<add> var res = '';
<add> var lastPos = 0;
<add> var end = '';
<add> var skipped = false;
<add> const actualLines = util
<add> .inspect(actual, { compact: false }).split('\n');
<add> const expectedLines = util
<add> .inspect(expected, { compact: false }).split('\n');
<add> const msg = `Input A expected to ${operator} input B:\n` +
<add> '\u001b[32m+ expected\u001b[39m \u001b[31m- actual\u001b[39m';
<add> const skippedMsg = ' ... Lines skipped';
<add>
<add> // Remove all ending lines that match (this optimizes the output for
<add> // readability by reducing the number of total changed lines).
<add> var a = actualLines[actualLines.length - 1];
<add> var b = expectedLines[expectedLines.length - 1];
<add> var i = 0;
<add> while (a === b) {
<add> if (i++ < 2) {
<add> end = `\n ${a}${end}`;
<add> } else {
<add> other = a;
<add> }
<add> actualLines.pop();
<add> expectedLines.pop();
<add> a = actualLines[actualLines.length - 1];
<add> b = expectedLines[expectedLines.length - 1];
<add> }
<add> if (i > 3) {
<add> end = `\n...${end}`;
<add> skipped = true;
<add> }
<add> if (other !== '') {
<add> end = `\n ${other}${end}`;
<add> other = '';
<add> }
<add>
<add> const maxLines = Math.max(actualLines.length, expectedLines.length);
<add> var printedLines = 0;
<add> for (i = 0; i < maxLines; i++) {
<add> // Only extra expected lines exist
<add> const cur = i - lastPos;
<add> if (actualLines.length < i + 1) {
<add> if (cur > 1 && i > 2) {
<add> if (cur > 4) {
<add> res += '\n...';
<add> skipped = true;
<add> } else if (cur > 3) {
<add> res += `\n ${expectedLines[i - 2]}`;
<add> printedLines++;
<add> }
<add> res += `\n ${expectedLines[i - 1]}`;
<add> printedLines++;
<add> }
<add> lastPos = i;
<add> other += `\n\u001b[32m+\u001b[39m ${expectedLines[i]}`;
<add> printedLines++;
<add> // Only extra actual lines exist
<add> } else if (expectedLines.length < i + 1) {
<add> if (cur > 1 && i > 2) {
<add> if (cur > 4) {
<add> res += '\n...';
<add> skipped = true;
<add> } else if (cur > 3) {
<add> res += `\n ${actualLines[i - 2]}`;
<add> printedLines++;
<add> }
<add> res += `\n ${actualLines[i - 1]}`;
<add> printedLines++;
<add> }
<add> lastPos = i;
<add> res += `\n\u001b[31m-\u001b[39m ${actualLines[i]}`;
<add> printedLines++;
<add> // Lines diverge
<add> } else if (actualLines[i] !== expectedLines[i]) {
<add> if (cur > 1 && i > 2) {
<add> if (cur > 4) {
<add> res += '\n...';
<add> skipped = true;
<add> } else if (cur > 3) {
<add> res += `\n ${actualLines[i - 2]}`;
<add> printedLines++;
<add> }
<add> res += `\n ${actualLines[i - 1]}`;
<add> printedLines++;
<add> }
<add> lastPos = i;
<add> res += `\n\u001b[31m-\u001b[39m ${actualLines[i]}`;
<add> other += `\n\u001b[32m+\u001b[39m ${expectedLines[i]}`;
<add> printedLines += 2;
<add> // Lines are identical
<add> } else {
<add> res += other;
<add> other = '';
<add> if (cur === 1 || i === 0) {
<add> res += `\n ${actualLines[i]}`;
<add> printedLines++;
<add> }
<add> }
<add> // Inspected object to big (Show ~20 rows max)
<add> if (printedLines > 20 && i < maxLines - 2) {
<add> return `${msg}${skippedMsg}\n${res}\n...${other}\n...`;
<add> }
<add> }
<add> return `${msg}${skipped ? skippedMsg : ''}\n${res}${other}${end}`;
<add>}
<add>
<ide> class AssertionError extends Error {
<ide> constructor(options) {
<ide> if (typeof options !== 'object' || options === null) {
<ide> throw new exports.TypeError('ERR_INVALID_ARG_TYPE', 'options', 'Object');
<ide> }
<del> var { actual, expected, message, operator, stackStartFn } = options;
<add> var {
<add> actual,
<add> expected,
<add> message,
<add> operator,
<add> stackStartFn,
<add> errorDiff = 0
<add> } = options;
<add>
<ide> if (message != null) {
<ide> super(message);
<ide> } else {
<add> if (util === null) util = require('util');
<add>
<ide> if (actual && actual.stack && actual instanceof Error)
<ide> actual = `${actual.name}: ${actual.message}`;
<ide> if (expected && expected.stack && expected instanceof Error)
<ide> expected = `${expected.name}: ${expected.message}`;
<del> if (util === null) util = require('util');
<del> super(`${util.inspect(actual).slice(0, 128)} ` +
<del> `${operator} ${util.inspect(expected).slice(0, 128)}`);
<add>
<add> if (errorDiff === 0) {
<add> let res = util.inspect(actual);
<add> let other = util.inspect(expected);
<add> if (res.length > 128)
<add> res = `${res.slice(0, 125)}...`;
<add> if (other.length > 128)
<add> other = `${other.slice(0, 125)}...`;
<add> super(`${res} ${operator} ${other}`);
<add> } else if (errorDiff === 1) {
<add> // In case the objects are equal but the operator requires unequal, show
<add> // the first object and say A equals B
<add> const res = util
<add> .inspect(actual, { compact: false }).split('\n');
<add>
<add> if (res.length > 20) {
<add> res[19] = '...';
<add> while (res.length > 20) {
<add> res.pop();
<add> }
<add> }
<add> // Only print a single object.
<add> super(`Identical input passed to ${operator}:\n${res.join('\n')}`);
<add> } else {
<add> super(createErrDiff(actual, expected, operator));
<add> }
<ide> }
<ide>
<ide> this.generatedMessage = !message;
<ide><path>test/parallel/test-assert.js
<ide> assert.throws(() => {
<ide> assert.strictEqual('A'.repeat(1000), '');
<ide> }, common.expectsError({
<ide> code: 'ERR_ASSERTION',
<del> message: new RegExp(`^'${'A'.repeat(127)} strictEqual ''$`) }));
<add> message: /^'A{124}\.\.\. strictEqual ''$/
<add>}));
<ide>
<ide> {
<ide> // bad args to AssertionError constructor should throw TypeError
<ide> common.expectsError(
<ide> assert.equal(assert.notEqual, assert.notStrictEqual);
<ide> assert.equal(assert.notDeepEqual, assert.notDeepStrictEqual);
<ide> assert.equal(Object.keys(assert).length, Object.keys(a).length);
<del> /* eslint-enable no-restricted-properties */
<ide> assert(7);
<ide> common.expectsError(
<ide> () => assert(),
<ide> common.expectsError(
<ide> }
<ide> );
<ide> Error.stackTraceLimit = tmpLimit;
<add>
<add> // Test error diffs
<add> const start = 'Input A expected to deepStrictEqual input B:';
<add> const actExp = '\u001b[32m+ expected\u001b[39m \u001b[31m- actual\u001b[39m';
<add> const plus = '\u001b[32m+\u001b[39m';
<add> const minus = '\u001b[31m-\u001b[39m';
<add> let message = [
<add> start,
<add> `${actExp} ... Lines skipped`,
<add> '',
<add> ' [',
<add> ' [',
<add> '...',
<add> ' 2,',
<add> `${minus} 3`,
<add> `${plus} '3'`,
<add> ' ]',
<add> '...',
<add> ' 5',
<add> ' ]'].join('\n');
<add> assert.throws(
<add> () => assert.deepEqual([[[1, 2, 3]], 4, 5], [[[1, 2, '3']], 4, 5]),
<add> { message });
<add>
<add> message = [
<add> start,
<add> `${actExp} ... Lines skipped`,
<add> '',
<add> ' [',
<add> ' 1,',
<add> '...',
<add> ' 0,',
<add> `${plus} 1,`,
<add> ' 1,',
<add> '...',
<add> ' 1',
<add> ' ]'
<add> ].join('\n');
<add> assert.throws(
<add> () => assert.deepEqual(
<add> [1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1],
<add> [1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1]),
<add> { message });
<add>
<add> message = [
<add> start,
<add> `${actExp} ... Lines skipped`,
<add> '',
<add> ' [',
<add> ' 1,',
<add> '...',
<add> ' 0,',
<add> `${minus} 1,`,
<add> ' 1,',
<add> '...',
<add> ' 1',
<add> ' ]'
<add> ].join('\n');
<add> assert.throws(
<add> () => assert.deepEqual(
<add> [1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1],
<add> [1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1]),
<add> { message });
<add>
<add> message = [
<add> start,
<add> actExp,
<add> '',
<add> ' [',
<add> ' 1,',
<add> `${minus} 2,`,
<add> `${plus} 1,`,
<add> ' 1,',
<add> ' 1,',
<add> ' 0,',
<add> `${minus} 1,`,
<add> ' 1',
<add> ' ]'
<add> ].join('\n');
<add> assert.throws(
<add> () => assert.deepEqual(
<add> [1, 2, 1, 1, 0, 1, 1],
<add> [1, 1, 1, 1, 0, 1]),
<add> { message });
<add>
<add> message = [
<add> start,
<add> actExp,
<add> '',
<add> `${minus} [`,
<add> `${minus} 1,`,
<add> `${minus} 2,`,
<add> `${minus} 1`,
<add> `${minus} ]`,
<add> `${plus} undefined`,
<add> ].join('\n');
<add> assert.throws(
<add> () => assert.deepEqual([1, 2, 1]),
<add> { message });
<add>
<add> message = [
<add> start,
<add> actExp,
<add> '',
<add> ' [',
<add> `${minus} 1,`,
<add> ' 2,',
<add> ' 1',
<add> ' ]'
<add> ].join('\n');
<add> assert.throws(
<add> () => assert.deepEqual([1, 2, 1], [2, 1]),
<add> { message });
<add>
<add> message = `${start}\n` +
<add> `${actExp} ... Lines skipped\n` +
<add> '\n' +
<add> ' [\n' +
<add> `${minus} 1,\n`.repeat(10) +
<add> '...\n' +
<add> `${plus} 2,\n`.repeat(10) +
<add> '...';
<add> assert.throws(
<add> () => assert.deepEqual(Array(12).fill(1), Array(12).fill(2)),
<add> { message });
<add>
<add> // notDeepEqual tests
<add> message = 'Identical input passed to notDeepStrictEqual:\n[\n 1\n]';
<add> assert.throws(
<add> () => assert.notDeepEqual([1], [1]),
<add> { message });
<add>
<add> message = 'Identical input passed to notDeepStrictEqual:' +
<add> `\n[${'\n 1,'.repeat(18)}\n...`;
<add> const data = Array(21).fill(1);
<add> assert.throws(
<add> () => assert.notDeepEqual(data, data),
<add> { message });
<add> /* eslint-enable no-restricted-properties */
<ide> }
<ide>
<ide> common.expectsError( | 4 |
Ruby | Ruby | reduce thread locals | e4de78aec504b885d40c0244ce3222c05ca3dc2e | <ide><path>activesupport/lib/active_support/notifications.rb
<ide> module ActiveSupport
<ide> module Notifications
<ide> @instrumenters = Hash.new { |h,k| h[k] = notifier.listening?(k) }
<ide>
<add> class Registry # :nodoc:
<add> def self.instance
<add> Thread.current[name] ||= new
<add> end
<add>
<add> attr_reader :notifier, :instrumenter
<add>
<add> def initialize
<add> self.notifier = Fanout.new
<add> end
<add>
<add> def notifier=(notifier)
<add> @notifier = notifier
<add> @instrumenter = Instrumenter.new(notifier)
<add> end
<add> end
<add>
<ide> class << self
<ide> def publish(name, *args)
<ide> notifier.publish(name, *args)
<ide> def unsubscribe(args)
<ide> end
<ide>
<ide> def instrumenter
<del> Thread.current[:"instrumentation_#{notifier.object_id}"] ||= Instrumenter.new(notifier)
<add> Registry.instance.instrumenter
<ide> end
<ide>
<ide> def notifier
<del> Thread.current[:notifier] ||= Fanout.new
<add> Registry.instance.notifier
<ide> end
<ide>
<ide> def notifier=(notifier)
<del> Thread.current[:notifier] = notifier
<add> Registry.instance.notifier = notifier
<ide> end
<ide> end
<ide> end | 1 |
Python | Python | improve docs on providers and types | caeeeee2846f172c3216be9856b1f726180b331c | <ide><path>libcloud/providers.py
<ide> }
<ide>
<ide> def get_driver(provider):
<add> """ Gets a driver
<add> @param provider: Id of provider to get driver
<add> @type provider: L{libcloud.types.Provider}
<add> """
<ide> if provider in DRIVERS:
<ide> mod_name, driver_name = DRIVERS[provider]
<ide> _mod = __import__(mod_name, globals(), locals(), [driver_name])
<ide><path>libcloud/types.py
<ide> # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License.
<add>"""
<add>Base types used by other parts of libcloud
<add>"""
<add>
<ide> class Provider(object):
<del> """ Defines for each of the supported providers """
<del> DUMMY = 0 # Example provider
<del> EC2 = 1 # Amazon AWS
<del> EC2_EU = 2 # Amazon AWS EU
<del> RACKSPACE = 3 # Cloud Servers
<del> SLICEHOST = 4 # Cloud Servers
<del> GOGRID = 5 # GoGrid
<del> VPSNET = 6 # VPS.net
<del> LINODE = 7 # Linode.com
<del> VCLOUD = 8 # vCloud
<del> RIMUHOSTING = 9 #RimuHosting.com
<add> """
<add> Defines for each of the supported providers
<add>
<add> @cvar DUMMY: Example provider
<add> @cvar EC2: Amazon AWS
<add> @cvar EC2_EU: Amazon AWS EU
<add> @cvar RACKSPACE: Rackspace Cloud Servers
<add> @cvar SLICEHOST: Slicehost.com
<add> @cvar GOGRID: GoGrid
<add> @cvar VPSNET: VPS.net
<add> @cvar LINODE: Linode.com
<add> @cvar VCLOUD: vmware vCloud
<add> @cvar RIMUHOSTING: RimuHosting.com
<add> """
<add> DUMMY = 0
<add> EC2 = 1
<add> EC2_EU = 2
<add> RACKSPACE = 3
<add> SLICEHOST = 4
<add> GOGRID = 5
<add> VPSNET = 6
<add> LINODE = 7
<add> VCLOUD = 8
<add> RIMUHOSTING = 9
<ide>
<ide> class NodeState(object):
<del> """ Standard states for a node """
<add> """
<add> Standard states for a node
<add>
<add> @cvar RUNNING: Node is running
<add> @cvar REBOOTING: Node is rebooting
<add> @cvar TERMINATED: Node is terminated
<add> @cvar PENDING: Node is pending
<add> @cvar UNKNOWN: Node state is unknown
<add> """
<ide> RUNNING = 0
<ide> REBOOTING = 1
<ide> TERMINATED = 2
<ide> PENDING = 3
<ide> UNKNOWN = 4
<ide>
<ide> class InvalidCredsException(Exception):
<add> """Exception used when invalid credentials are used on a provider."""
<ide> def __init__(self, value='Invalid credentials with the provider'):
<ide> self.value = value
<ide> def __str__(self): | 2 |
Javascript | Javascript | add more options to threejs-lesson-utils | 65d6bb7ba09261858930ab4a19781a640739ee32 | <ide><path>threejs/lessons/resources/threejs-lesson-utils.js
<ide> window.threejsLessonUtils = {
<ide> this.init();
<ide>
<ide> const scene = new THREE.Scene();
<del> const fov = 60;
<add> let targetFOVDeg = 60;
<ide> const aspect = 1;
<ide> const zNear = 0.1;
<ide> const zFar = 50;
<del> const camera = new THREE.PerspectiveCamera(fov, aspect, zNear, zFar);
<add> const camera = new THREE.PerspectiveCamera(targetFOVDeg, aspect, zNear, zFar);
<ide> camera.position.z = 15;
<ide> scene.add(camera);
<ide>
<ide> window.threejsLessonUtils = {
<ide> const root = new THREE.Object3D();
<ide> scene.add(root);
<ide>
<del> const controls = new THREE.TrackballControls(camera, elem);
<del> controls.noZoom = true;
<del> controls.noPan = true;
<del>
<del> // add the lights as children of the camera.
<del> // this is because TrackbacllControls move the camera.
<del> // We really want to rotate the object itself but there's no
<del> // controls for that so we fake it by putting all the lights
<del> // on the camera so they move with it.
<del> camera.add(new THREE.HemisphereLight(0xaaaaaa, 0x444444, .5));
<del> const light = new THREE.DirectionalLight(0xffffff, 1);
<del> light.position.set(-1, 2, 4 - 15);
<del> camera.add(light);
<del>
<del> let updateFunction;
<add> const resizeFunctions = [];
<add> const updateFunctions = [];
<ide>
<ide> promise.then((result) => {
<ide> const info = result instanceof THREE.Object3D ? {
<ide> obj3D: result,
<ide> } : result;
<del> const { obj3D, update } = info;
<add> const { obj3D, update, trackball, lights } = info;
<ide> root.add(obj3D);
<del> updateFunction = update;
<add>
<add> targetFOVDeg = camera.fov;
<add>
<add> if (trackball !== false) {
<add> const controls = new THREE.TrackballControls(camera, elem);
<add> controls.noZoom = true;
<add> controls.noPan = true;
<add> resizeFunctions.push(controls.handleResize.bind(controls));
<add> updateFunctions.push(controls.update.bind(controls));
<add> }
<add>
<add> if (update) {
<add> updateFunctions.push(update);
<add> }
<add>
<add> // add the lights as children of the camera.
<add> // this is because TrackbacllControls move the camera.
<add> // We really want to rotate the object itself but there's no
<add> // controls for that so we fake it by putting all the lights
<add> // on the camera so they move with it.
<add> if (lights !== false) {
<add> camera.add(new THREE.HemisphereLight(0xaaaaaa, 0x444444, .5));
<add> const light = new THREE.DirectionalLight(0xffffff, 1);
<add> light.position.set(-1, 2, 4 - 15);
<add> camera.add(light);
<add> }
<add>
<ide> });
<ide>
<ide> let oldWidth = -1;
<ide> window.threejsLessonUtils = {
<ide> if (width !== oldWidth || height !== oldHeight) {
<ide> oldWidth = width;
<ide> oldHeight = height;
<del> controls.handleResize();
<del> }
<del> controls.update();
<del>
<del> if (updateFunction) {
<del> updateFunction(time);
<add> resizeFunctions.forEach(fn => fn());
<ide> }
<add> updateFunctions.forEach(fn => fn(time));
<ide>
<ide> const aspect = width / height;
<del> const targetFov = THREE.Math.degToRad(60);
<del> const fov = aspect >= 1
<del> ? targetFov
<del> : (2 * Math.atan(Math.tan(targetFov * .5) / aspect));
<add> const fovDeg = aspect >= 1
<add> ? targetFOVDeg
<add> : THREE.Math.radToDeg(2 * Math.atan(Math.tan(THREE.Math.degToRad(targetFOVDeg) * .5) / aspect));
<ide>
<del> camera.fov = THREE.Math.radToDeg(fov);
<add> camera.fov = fovDeg;
<ide> camera.aspect = aspect;
<ide> camera.updateProjectionMatrix();
<ide> | 1 |
Python | Python | use numpy version for f2py version | 8a6bcbcfe8db6cf2fd784631b480d5865077ad98 | <ide><path>numpy/f2py/__version__.py
<del>major = 2
<del>
<del>try:
<del> from __svn_version__ import version
<del> version_info = (major, version)
<del> version = '%s_%s' % version_info
<del>except (ImportError, ValueError):
<del> version = str(major)
<add>from numpy.version import version
<ide><path>numpy/f2py/capi_maps.py
<ide> Pearu Peterson
<ide>
<ide> """
<del>__version__ = "$Revision: 1.60 $"[10:-1]
<del>
<ide> from . import __version__
<ide> f2py_version = __version__.version
<ide>
<ide><path>numpy/f2py/common_rules.py
<ide> Pearu Peterson
<ide>
<ide> """
<del>__version__ = "$Revision: 1.19 $"[10:-1]
<del>
<ide> from . import __version__
<ide> f2py_version = __version__.version
<ide>
<ide><path>numpy/f2py/f2py2e.py
<ide> from . import capi_maps
<ide>
<ide> f2py_version = __version__.version
<add>numpy_version = __version__.version
<ide> errmess = sys.stderr.write
<ide> # outmess=sys.stdout.write
<ide> show = pprint.pprint
<ide> outmess = auxfuncs.outmess
<ide>
<del>try:
<del> from numpy import __version__ as numpy_version
<del>except ImportError:
<del> numpy_version = 'N/A'
<del>
<del>__usage__ = """\
<del>Usage:
<add>__usage__ =\
<add>f"""Usage:
<ide>
<ide> 1) To construct extension module sources:
<ide>
<ide> --[no-]latex-doc Create (or not) <modulename>module.tex.
<ide> Default is --no-latex-doc.
<ide> --short-latex Create 'incomplete' LaTeX document (without commands
<del> \\documentclass, \\tableofcontents, and \\begin{document},
<del> \\end{document}).
<add> \\documentclass, \\tableofcontents, and \\begin{{document}},
<add> \\end{{document}}).
<ide>
<ide> --[no-]rest-doc Create (or not) <modulename>module.rst.
<ide> Default is --no-rest-doc.
<ide> array. Integer <int> sets the threshold for array sizes when
<ide> a message should be shown.
<ide>
<del>Version: %s
<del>numpy Version: %s
<add>Version: {f2py_version}
<add>numpy Version: {numpy_version}
<ide> Requires: Python 3.5 or higher.
<ide> License: NumPy license (see LICENSE.txt in the NumPy source code)
<ide> Copyright 1999 - 2011 Pearu Peterson all rights reserved.
<del>http://cens.ioc.ee/projects/f2py2e/""" % (f2py_version, numpy_version)
<add>http://cens.ioc.ee/projects/f2py2e/"""
<ide>
<ide>
<ide> def scaninputline(inputline):
<ide><path>numpy/f2py/rules.py
<ide> Pearu Peterson
<ide>
<ide> """
<del>__version__ = "$Revision: 1.129 $"[10:-1]
<del>
<del>from . import __version__
<del>f2py_version = __version__.version
<del>
<del>from .. import version as _numpy_version
<del>numpy_version = _numpy_version.version
<del>
<ide> import os
<ide> import time
<ide> import copy
<ide>
<add># __version__.version is now the same as the NumPy version
<add>from . import __version__
<add>f2py_version = __version__.version
<add>numpy_version = __version__.version
<add>
<ide> from .auxfuncs import (
<ide> applyrules, debugcapi, dictappend, errmess, gentitle, getargs2,
<ide> hascallstatement, hasexternals, hasinitvalue, hasnote, hasresultnote,
<ide> \tif (PyErr_Occurred())
<ide> \t\t{PyErr_SetString(PyExc_ImportError, \"can't initialize module #modulename# (failed to import numpy)\"); return m;}
<ide> \td = PyModule_GetDict(m);
<del>\ts = PyUnicode_FromString(\"$R""" + """evision: $\");
<add>\ts = PyUnicode_FromString(\"#f2py_version#\");
<ide> \tPyDict_SetItemString(d, \"__version__\", s);
<ide> \tPy_DECREF(s);
<ide> \ts = PyUnicode_FromString(
<ide><path>numpy/tests/test_scripts.py
<ide> def find_f2py_commands():
<ide> def test_f2py(f2py_cmd):
<ide> # test that we can run f2py script
<ide> stdout = subprocess.check_output([f2py_cmd, '-v'])
<del> assert_equal(stdout.strip(), b'2')
<add> assert_equal(stdout.strip(), np.__version__.encode('ascii'))
<ide>
<ide>
<ide> def test_pep338():
<ide> stdout = subprocess.check_output([sys.executable, '-mnumpy.f2py', '-v'])
<del> assert_equal(stdout.strip(), b'2')
<add> assert_equal(stdout.strip(), np.__version__.encode('ascii')) | 6 |
Text | Text | create separate instruction | fab60e7faad8421be639c0887740489244079dc0 | <ide><path>docs/build-instructions/windows.md
<ide> * [Python 2.7.x](http://www.python.org/download/)
<ide> * [GitHub for Windows](http://windows.github.com/)
<ide> * Open the Windows GitHub shell (NOT the Standard PowerShell, the shortcut labeled 'Git Shell' - make sure you have logged in at least once to the GitHub for Windows GUI App)
<add> * Log in to the GitHub for Windows GUI App
<ide> * `$env:Path = $env:Path + ";C:\path\to\atom\repo\node_modules"`
<ide>
<ide> ## Instructions | 1 |
PHP | PHP | fix exception handling | e2b48c006965ccce5c0a98b11a974c97cf410198 | <ide><path>src/Illuminate/Foundation/Testing/Concerns/InteractsWithExceptionHandling.php
<ide> trait InteractsWithExceptionHandling
<ide> {
<ide> /**
<del> * The previous exception handler.
<add> * The original exception handler.
<ide> *
<ide> * @var ExceptionHandler|null
<ide> */
<del> protected $previousExceptionHandler;
<add> protected $originalExceptionHandler;
<ide>
<ide> /**
<ide> * Restore exception handling.
<ide> trait InteractsWithExceptionHandling
<ide> */
<ide> protected function withExceptionHandling()
<ide> {
<del> if ($this->previousExceptionHandler) {
<del> $this->app->instance(ExceptionHandler::class, $this->previousExceptionHandler);
<add> if ($this->originalExceptionHandler) {
<add> $this->app->instance(ExceptionHandler::class, $this->originalExceptionHandler);
<ide> }
<ide>
<ide> return $this;
<ide> protected function handleValidationExceptions()
<ide> */
<ide> protected function withoutExceptionHandling(array $except = [])
<ide> {
<del> $this->previousExceptionHandler = app(ExceptionHandler::class);
<add> if ($this->originalExceptionHandler == null) {
<add> $this->originalExceptionHandler = app(ExceptionHandler::class);
<add> }
<ide>
<del> $this->app->instance(ExceptionHandler::class, new class($this->previousExceptionHandler, $except) implements ExceptionHandler {
<add> $this->app->instance(ExceptionHandler::class, new class($this->originalExceptionHandler, $except) implements ExceptionHandler {
<ide> protected $except;
<del> protected $previousHandler;
<add> protected $originalHandler;
<ide>
<ide> /**
<ide> * Create a new class instance.
<ide> protected function withoutExceptionHandling(array $except = [])
<ide> * @param array $except
<ide> * @return void
<ide> */
<del> public function __construct($previousHandler, $except = [])
<add> public function __construct($originalHandler, $except = [])
<ide> {
<ide> $this->except = $except;
<del> $this->previousHandler = $previousHandler;
<add> $this->originalHandler = $originalHandler;
<ide> }
<ide>
<ide> /**
<ide> public function render($request, Exception $e)
<ide>
<ide> foreach ($this->except as $class) {
<ide> if ($e instanceof $class) {
<del> return $this->previousHandler->render($request, $e);
<add> return $this->originalHandler->render($request, $e);
<ide> }
<ide> }
<ide> | 1 |
Text | Text | add vsts ci badge to readme.md | 2f15551ec23cd4868b67bbccc21ff1ceebd42d71 | <ide><path>README.md
<ide> 
<ide>
<del>[](https://circleci.com/gh/atom/atom) [](https://travis-ci.org/atom/atom) [](https://ci.appveyor.com/project/Atom/atom)
<add>[](https://github.visualstudio.com/Atom/_build/latest?definitionId=32&branch=master) [](https://circleci.com/gh/atom/atom) [](https://travis-ci.org/atom/atom) [](https://ci.appveyor.com/project/Atom/atom)
<ide> [](https://david-dm.org/atom/atom)
<ide> [](https://atom-slack.herokuapp.com)
<ide> | 1 |
Javascript | Javascript | fix "super() use outside of constructor" issues | 81a375b24351dc90db40b55f4fee4ce1b75c0ec2 | <ide><path>packages/ember-glimmer/tests/integration/components/closure-components-test.js
<ide> moduleFor('@htmlbars Components test: closure components', class extends Renderi
<ide>
<ide> class ClosureComponentMutableParamsTest extends RenderingTest {
<ide> render(templateStr, context = {}) {
<del> super(`${templateStr}<span class="value">{{model.val2}}</span>`, assign(context, { model: { val2: 8 } }));
<add> super.render(`${templateStr}<span class="value">{{model.val2}}</span>`, assign(context, { model: { val2: 8 } }));
<ide> }
<ide> }
<ide>
<ide><path>packages/ember-glimmer/tests/integration/content-test.js
<ide> if (!EmberDev.runningProdBuild) {
<ide> }
<ide>
<ide> teardown() {
<del> super(...arguments);
<add> super.teardown(...arguments);
<ide> setDebugFunction('warn', originalWarn);
<ide> }
<ide>
<ide><path>packages/ember-glimmer/tests/integration/helpers/loc-test.js
<ide> moduleFor('Helpers test: {{loc}}', class extends RenderingTest {
<ide> }
<ide>
<ide> teardown() {
<del> super();
<add> super.teardown();
<ide> Ember.STRINGS = this.oldString;
<ide> }
<ide>
<ide><path>packages/ember-glimmer/tests/integration/helpers/log-test.js
<ide> moduleFor('Helpers test: {{log}}', class extends RenderingTest {
<ide> }
<ide>
<ide> teardown() {
<del> super();
<add> super.teardown();
<ide> Logger.log = this.originalLog;
<ide> }
<ide>
<ide><path>packages/ember-testing/lib/test/promise.js
<ide> export default class TestPromise extends RSVP.Promise {
<ide> }
<ide>
<ide> then(onFulfillment, ...args) {
<del> return super(result => isolate(onFulfillment, result), ...args);
<add> return super.then(result => isolate(onFulfillment, result), ...args);
<ide> }
<ide> }
<ide> | 5 |
Text | Text | remove unnecessary whitespace | e7ba7c3b95be4f46d7d27fcfd82dd4865c159393 | <ide><path>guides/source/rails_application_templates.md
<ide> CODE
<ide> These methods let you ask questions from templates and decide the flow based on the user’s answer. Lets say you want to freeze rails only if the user want to:
<ide>
<ide> ```ruby
<del>rake("rails:freeze:gems") if yes?("Freeze rails gems ?")
<add>rake("rails:freeze:gems") if yes?("Freeze rails gems?")
<ide> # no?(question) acts just the opposite.
<ide> ```
<ide> | 1 |
Javascript | Javascript | fix doc typo | 96522474290fd46353045b1655febf349843fba2 | <ide><path>packages/ember-handlebars/lib/helpers/binding.js
<ide> var helpers = EmberHandlebars.helpers;
<ide>
<ide> (function() {
<ide> // Binds a property into the DOM. This will create a hook in DOM that the
<del> // KVO system will look for and upate if the property changes.
<add> // KVO system will look for and update if the property changes.
<ide> var bind = function(property, options, preserveContext, shouldDisplay) {
<ide> var data = options.data,
<ide> fn = options.fn, | 1 |
Javascript | Javascript | update permissionsandroid docs to use new apis | 9338fbd7810e8b7b3b0cfe467ca63601b8f43d50 | <ide><path>Libraries/PermissionsAndroid/PermissionsAndroid.js
<ide> type PermissionStatus = 'granted' | 'denied' | 'never_ask_again';
<ide> * permissions.
<ide> *
<ide> * On devices before SDK version 23, the permissions are automatically granted
<del> * if they appear in the manifest, so `checkPermission` and `requestPermission`
<add> * if they appear in the manifest, so `check` and `request`
<ide> * should always be true.
<ide> *
<ide> * If a user has previously turned off a permission that you prompt for, the OS
<ide> type PermissionStatus = 'granted' | 'denied' | 'never_ask_again';
<ide> * ```
<ide> * async function requestCameraPermission() {
<ide> * try {
<del> * const granted = await PermissionsAndroid.requestPermission(
<add> * const granted = await PermissionsAndroid.request(
<ide> * PermissionsAndroid.PERMISSIONS.CAMERA,
<ide> * {
<ide> * 'title': 'Cool Photo App Camera Permission', | 1 |
Python | Python | fix 'from_native' method when rel is none | 756297ad1d07f56459471bff041828850ace0496 | <ide><path>rest_framework/fields.py
<ide> def __init__(self, *args, **kwargs):
<ide> def from_native(self, value):
<ide> try:
<ide> rel = self.model_field.rel
<add> return rel.to._meta.get_field(rel.field_name).to_python(value)
<ide> except:
<ide> return self.model_field.to_python(value)
<del> return rel.to._meta.get_field(rel.field_name).to_python(value)
<ide>
<ide> def field_to_native(self, obj, field_name):
<ide> value = self.model_field._get_val_from_obj(obj) | 1 |
Ruby | Ruby | add docs for `rails.env` and `rails.env=` | a770d7e404f994d717843035eba9c1094a6e90d8 | <ide><path>railties/lib/rails.rb
<ide> def root
<ide> application && application.config.root
<ide> end
<ide>
<add> # Returns the current Rails environment.
<add> #
<add> # Rails.env # => "development"
<add> # Rails.env.development? # => true
<add> # Rails.env.production? # => false
<ide> def env
<ide> @_env ||= ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "development")
<ide> end
<ide>
<add> # Sets the Rails environment.
<add> #
<add> # Rails.env = "staging" # => "staging"
<ide> def env=(environment)
<ide> @_env = ActiveSupport::StringInquirer.new(environment)
<ide> end | 1 |
Ruby | Ruby | remove initialize method | 70e48a6ac34615ba5ed3b807917c3b099256d67c | <ide><path>activerecord/lib/active_record/associations/preloader/through_association.rb
<ide> module ActiveRecord
<ide> module Associations
<ide> class Preloader
<ide> module ThroughAssociation #:nodoc:
<del> def initialize(klass, owners, reflection, preload_scope)
<del> super
<del> end
<del>
<ide> def through_reflection
<ide> reflection.through_reflection
<ide> end | 1 |
Mixed | Javascript | fix auto refresh for graph view | 64622929a043436b235b9fb61fb076c5d2e02124 | <ide><path>airflow/www/static/js/graph.js
<ide> function handleRefresh() {
<ide> // only refresh if the data has changed
<ide> if (prevTis !== tis) {
<ide> // eslint-disable-next-line no-global-assign
<del> taskInstances = JSON.parse(tis);
<del> updateNodesStates(taskInstances);
<add> updateNodesStates(tis);
<ide>
<ide> // Only redraw the graph if labels have changed
<del> const haveLabelsChanged = updateNodeLabels(nodes, taskInstances);
<add> const haveLabelsChanged = updateNodeLabels(nodes, tis);
<ide> if (haveLabelsChanged) draw();
<ide>
<ide> // end refresh if all states are final
<ide><path>tests/www/views/test_views_tasks.py
<ide> import urllib.parse
<ide> from datetime import timedelta
<ide>
<add>import freezegun
<ide> import pytest
<ide>
<ide> from airflow import settings
<ide> def reset_dagruns():
<ide>
<ide> @pytest.fixture(autouse=True)
<ide> def init_dagruns(app, reset_dagruns):
<del> app.dag_bag.get_dag("example_bash_operator").create_dagrun(
<del> run_id=DEFAULT_DAGRUN,
<del> run_type=DagRunType.SCHEDULED,
<del> execution_date=DEFAULT_DATE,
<del> data_interval=(DEFAULT_DATE, DEFAULT_DATE),
<del> start_date=timezone.utcnow(),
<del> state=State.RUNNING,
<del> )
<del> app.dag_bag.get_dag("example_subdag_operator").create_dagrun(
<del> run_id=DEFAULT_DAGRUN,
<del> run_type=DagRunType.SCHEDULED,
<del> execution_date=DEFAULT_DATE,
<del> data_interval=(DEFAULT_DATE, DEFAULT_DATE),
<del> start_date=timezone.utcnow(),
<del> state=State.RUNNING,
<del> )
<del> app.dag_bag.get_dag("example_xcom").create_dagrun(
<del> run_id=DEFAULT_DAGRUN,
<del> run_type=DagRunType.SCHEDULED,
<del> execution_date=DEFAULT_DATE,
<del> data_interval=(DEFAULT_DATE, DEFAULT_DATE),
<del> start_date=timezone.utcnow(),
<del> state=State.RUNNING,
<del> )
<add> with freezegun.freeze_time(DEFAULT_DATE):
<add> app.dag_bag.get_dag("example_bash_operator").create_dagrun(
<add> run_id=DEFAULT_DAGRUN,
<add> run_type=DagRunType.SCHEDULED,
<add> execution_date=DEFAULT_DATE,
<add> data_interval=(DEFAULT_DATE, DEFAULT_DATE),
<add> start_date=timezone.utcnow(),
<add> state=State.RUNNING,
<add> )
<add> app.dag_bag.get_dag("example_subdag_operator").create_dagrun(
<add> run_id=DEFAULT_DAGRUN,
<add> run_type=DagRunType.SCHEDULED,
<add> execution_date=DEFAULT_DATE,
<add> data_interval=(DEFAULT_DATE, DEFAULT_DATE),
<add> start_date=timezone.utcnow(),
<add> state=State.RUNNING,
<add> )
<add> app.dag_bag.get_dag("example_xcom").create_dagrun(
<add> run_id=DEFAULT_DAGRUN,
<add> run_type=DagRunType.SCHEDULED,
<add> execution_date=DEFAULT_DATE,
<add> data_interval=(DEFAULT_DATE, DEFAULT_DATE),
<add> start_date=timezone.utcnow(),
<add> state=State.RUNNING,
<add> )
<ide> yield
<ide> clear_db_runs()
<ide>
<ide> def test_graph_view_doesnt_fail_on_recursion_error(app, dag_maker, admin_client)
<ide> url = f'/dags/{dag.dag_id}/graph'
<ide> resp = admin_client.get(url, follow_redirects=True)
<ide> assert resp.status_code == 200
<add>
<add>
<add>def test_task_instances(admin_client):
<add> """Test task_instances view."""
<add> resp = admin_client.get(
<add> f'/object/task_instances?dag_id=example_bash_operator&execution_date={DEFAULT_DATE}',
<add> follow_redirects=True,
<add> )
<add> assert resp.status_code == 200
<add> assert resp.json == {
<add> 'also_run_this': {
<add> 'dag_id': 'example_bash_operator',
<add> 'duration': None,
<add> 'end_date': None,
<add> 'executor_config': {},
<add> 'external_executor_id': None,
<add> 'hostname': '',
<add> 'job_id': None,
<add> 'map_index': -1,
<add> 'max_tries': 0,
<add> 'next_kwargs': None,
<add> 'next_method': None,
<add> 'operator': 'BashOperator',
<add> 'pid': None,
<add> 'pool': 'default_pool',
<add> 'pool_slots': 1,
<add> 'priority_weight': 2,
<add> 'queue': 'default',
<add> 'queued_by_job_id': None,
<add> 'queued_dttm': None,
<add> 'run_id': 'TEST_DAGRUN',
<add> 'start_date': None,
<add> 'state': None,
<add> 'task_id': 'also_run_this',
<add> 'trigger_id': None,
<add> 'trigger_timeout': None,
<add> 'try_number': 1,
<add> 'unixname': 'root',
<add> 'updated_at': DEFAULT_DATE.isoformat(),
<add> },
<add> 'run_after_loop': {
<add> 'dag_id': 'example_bash_operator',
<add> 'duration': None,
<add> 'end_date': None,
<add> 'executor_config': {},
<add> 'external_executor_id': None,
<add> 'hostname': '',
<add> 'job_id': None,
<add> 'map_index': -1,
<add> 'max_tries': 0,
<add> 'next_kwargs': None,
<add> 'next_method': None,
<add> 'operator': 'BashOperator',
<add> 'pid': None,
<add> 'pool': 'default_pool',
<add> 'pool_slots': 1,
<add> 'priority_weight': 2,
<add> 'queue': 'default',
<add> 'queued_by_job_id': None,
<add> 'queued_dttm': None,
<add> 'run_id': 'TEST_DAGRUN',
<add> 'start_date': None,
<add> 'state': None,
<add> 'task_id': 'run_after_loop',
<add> 'trigger_id': None,
<add> 'trigger_timeout': None,
<add> 'try_number': 1,
<add> 'unixname': 'root',
<add> 'updated_at': DEFAULT_DATE.isoformat(),
<add> },
<add> 'run_this_last': {
<add> 'dag_id': 'example_bash_operator',
<add> 'duration': None,
<add> 'end_date': None,
<add> 'executor_config': {},
<add> 'external_executor_id': None,
<add> 'hostname': '',
<add> 'job_id': None,
<add> 'map_index': -1,
<add> 'max_tries': 0,
<add> 'next_kwargs': None,
<add> 'next_method': None,
<add> 'operator': 'EmptyOperator',
<add> 'pid': None,
<add> 'pool': 'default_pool',
<add> 'pool_slots': 1,
<add> 'priority_weight': 1,
<add> 'queue': 'default',
<add> 'queued_by_job_id': None,
<add> 'queued_dttm': None,
<add> 'run_id': 'TEST_DAGRUN',
<add> 'start_date': None,
<add> 'state': None,
<add> 'task_id': 'run_this_last',
<add> 'trigger_id': None,
<add> 'trigger_timeout': None,
<add> 'try_number': 1,
<add> 'unixname': 'root',
<add> 'updated_at': DEFAULT_DATE.isoformat(),
<add> },
<add> 'runme_0': {
<add> 'dag_id': 'example_bash_operator',
<add> 'duration': None,
<add> 'end_date': None,
<add> 'executor_config': {},
<add> 'external_executor_id': None,
<add> 'hostname': '',
<add> 'job_id': None,
<add> 'map_index': -1,
<add> 'max_tries': 0,
<add> 'next_kwargs': None,
<add> 'next_method': None,
<add> 'operator': 'BashOperator',
<add> 'pid': None,
<add> 'pool': 'default_pool',
<add> 'pool_slots': 1,
<add> 'priority_weight': 3,
<add> 'queue': 'default',
<add> 'queued_by_job_id': None,
<add> 'queued_dttm': None,
<add> 'run_id': 'TEST_DAGRUN',
<add> 'start_date': None,
<add> 'state': None,
<add> 'task_id': 'runme_0',
<add> 'trigger_id': None,
<add> 'trigger_timeout': None,
<add> 'try_number': 1,
<add> 'unixname': 'root',
<add> 'updated_at': DEFAULT_DATE.isoformat(),
<add> },
<add> 'runme_1': {
<add> 'dag_id': 'example_bash_operator',
<add> 'duration': None,
<add> 'end_date': None,
<add> 'executor_config': {},
<add> 'external_executor_id': None,
<add> 'hostname': '',
<add> 'job_id': None,
<add> 'map_index': -1,
<add> 'max_tries': 0,
<add> 'next_kwargs': None,
<add> 'next_method': None,
<add> 'operator': 'BashOperator',
<add> 'pid': None,
<add> 'pool': 'default_pool',
<add> 'pool_slots': 1,
<add> 'priority_weight': 3,
<add> 'queue': 'default',
<add> 'queued_by_job_id': None,
<add> 'queued_dttm': None,
<add> 'run_id': 'TEST_DAGRUN',
<add> 'start_date': None,
<add> 'state': None,
<add> 'task_id': 'runme_1',
<add> 'trigger_id': None,
<add> 'trigger_timeout': None,
<add> 'try_number': 1,
<add> 'unixname': 'root',
<add> 'updated_at': DEFAULT_DATE.isoformat(),
<add> },
<add> 'runme_2': {
<add> 'dag_id': 'example_bash_operator',
<add> 'duration': None,
<add> 'end_date': None,
<add> 'executor_config': {},
<add> 'external_executor_id': None,
<add> 'hostname': '',
<add> 'job_id': None,
<add> 'map_index': -1,
<add> 'max_tries': 0,
<add> 'next_kwargs': None,
<add> 'next_method': None,
<add> 'operator': 'BashOperator',
<add> 'pid': None,
<add> 'pool': 'default_pool',
<add> 'pool_slots': 1,
<add> 'priority_weight': 3,
<add> 'queue': 'default',
<add> 'queued_by_job_id': None,
<add> 'queued_dttm': None,
<add> 'run_id': 'TEST_DAGRUN',
<add> 'start_date': None,
<add> 'state': None,
<add> 'task_id': 'runme_2',
<add> 'trigger_id': None,
<add> 'trigger_timeout': None,
<add> 'try_number': 1,
<add> 'unixname': 'root',
<add> 'updated_at': DEFAULT_DATE.isoformat(),
<add> },
<add> 'this_will_skip': {
<add> 'dag_id': 'example_bash_operator',
<add> 'duration': None,
<add> 'end_date': None,
<add> 'executor_config': {},
<add> 'external_executor_id': None,
<add> 'hostname': '',
<add> 'job_id': None,
<add> 'map_index': -1,
<add> 'max_tries': 0,
<add> 'next_kwargs': None,
<add> 'next_method': None,
<add> 'operator': 'BashOperator',
<add> 'pid': None,
<add> 'pool': 'default_pool',
<add> 'pool_slots': 1,
<add> 'priority_weight': 2,
<add> 'queue': 'default',
<add> 'queued_by_job_id': None,
<add> 'queued_dttm': None,
<add> 'run_id': 'TEST_DAGRUN',
<add> 'start_date': None,
<add> 'state': None,
<add> 'task_id': 'this_will_skip',
<add> 'trigger_id': None,
<add> 'trigger_timeout': None,
<add> 'try_number': 1,
<add> 'unixname': 'root',
<add> 'updated_at': DEFAULT_DATE.isoformat(),
<add> },
<add> } | 2 |
Javascript | Javascript | add test for buffer.slice | 2f7234d89c3b7e17d6cf341ab12aaffebb337470 | <ide><path>test/simple/test-buffer-slice.js
<add>// Copyright Joyent, Inc. and other Node contributors.
<add>//
<add>// Permission is hereby granted, free of charge, to any person obtaining a
<add>// copy of this software and associated documentation files (the
<add>// "Software"), to deal in the Software without restriction, including
<add>// without limitation the rights to use, copy, modify, merge, publish,
<add>// distribute, sublicense, and/or sell copies of the Software, and to permit
<add>// persons to whom the Software is furnished to do so, subject to the
<add>// following conditions:
<add>//
<add>// The above copyright notice and this permission notice shall be included
<add>// in all copies or substantial portions of the Software.
<add>//
<add>// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<add>// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
<add>// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
<add>// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
<add>// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
<add>// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
<add>// USE OR OTHER DEALINGS IN THE SOFTWARE.
<add>
<add>var common = require('../common');
<add>var assert = require('assert');
<add>
<add>var Buffer = require('buffer').Buffer;
<add>
<add>var buff = new Buffer(Buffer.poolSize + 1);
<add>var slicedBuffer = buff.slice();
<add>assert.equal(slicedBuffer.parent,
<add> buff,
<add> "slicedBufffer should have its parent set to the original " +
<add> " buffer"); | 1 |
Java | Java | expose prestartallcorethreads on executorservice | 2c53e9e308dc39c5988c014a12d32917f0b5528d | <ide><path>spring-context/src/main/java/org/springframework/scheduling/concurrent/ThreadPoolExecutorFactoryBean.java
<ide> public class ThreadPoolExecutorFactoryBean extends ExecutorConfigurationSupport
<ide>
<ide> private int keepAliveSeconds = 60;
<ide>
<del> private boolean allowCoreThreadTimeOut = false;
<del>
<ide> private int queueCapacity = Integer.MAX_VALUE;
<ide>
<add> private boolean allowCoreThreadTimeOut = false;
<add>
<ide> private boolean exposeUnconfigurableExecutor = false;
<ide>
<add> private boolean prestartAllCoreThreads = false;
<add>
<ide> @Nullable
<ide> private ExecutorService exposedExecutor;
<ide>
<ide> public void setQueueCapacity(int queueCapacity) {
<ide> this.queueCapacity = queueCapacity;
<ide> }
<ide>
<add> /**
<add> * Specify whether this FactoryBean should prestart all threads
<add> * for the created executor.
<add> * <p>Default is "false".
<add> * Switch this flag to "true" to prestart the threads allocated for the current executor
<add> * @see java.util.concurrent.ThreadPoolExecutor#prestartAllCoreThreads
<add> */
<add> public void setPrestartAllCoreThreads(boolean prestartAllCoreThreads) {
<add> this.prestartAllCoreThreads = prestartAllCoreThreads;
<add> }
<add>
<ide> /**
<ide> * Specify whether this FactoryBean should expose an unconfigurable
<ide> * decorator for the created executor.
<ide> protected ExecutorService initializeExecutor(
<ide> executor.allowCoreThreadTimeOut(true);
<ide> }
<ide>
<add> if (this.prestartAllCoreThreads) {
<add> executor.prestartAllCoreThreads();
<add> }
<add>
<ide> // Wrap executor with an unconfigurable decorator.
<ide> this.exposedExecutor = (this.exposeUnconfigurableExecutor ?
<ide> Executors.unconfigurableExecutorService(executor) : executor);
<ide><path>spring-context/src/test/java/org/springframework/scheduling/concurrent/ThreadPoolExecutorFactoryBeanTests.java
<ide>
<ide> package org.springframework.scheduling.concurrent;
<ide>
<add>import java.util.concurrent.BlockingQueue;
<ide> import java.util.concurrent.ExecutorService;
<ide> import java.util.concurrent.FutureTask;
<add>import java.util.concurrent.RejectedExecutionHandler;
<add>import java.util.concurrent.ThreadFactory;
<add>import java.util.concurrent.ThreadPoolExecutor;
<ide>
<ide> import org.junit.jupiter.api.Test;
<ide>
<add>import org.springframework.context.ApplicationContext;
<ide> import org.springframework.context.ConfigurableApplicationContext;
<ide> import org.springframework.context.annotation.AnnotationConfigApplicationContext;
<ide> import org.springframework.context.annotation.Bean;
<ide> import org.springframework.context.annotation.Configuration;
<ide>
<ide> import static org.assertj.core.api.Assertions.assertThat;
<add>import static org.mockito.Mockito.mock;
<add>import static org.mockito.Mockito.never;
<add>import static org.mockito.Mockito.verify;
<ide>
<ide> /**
<ide> * @author Juergen Hoeller
<ide> void defaultExecutor() throws Exception {
<ide> context.close();
<ide> }
<ide>
<add> @Test
<add> public void executorWithPreStartedThreads() throws Exception {
<add> ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(ExecutorConfigWithPreStartedThreads.class);
<add> ThreadPoolExecutor executor = context.getBean("childExecutor", ThreadPoolExecutor.class);
<add>
<add> verify(executor).prestartAllCoreThreads();
<add> }
<add>
<add> @Test
<add> public void executorWithNoPreStartedThreads() throws Exception {
<add> ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(ExecutorConfigWithNoPreStartedThreads.class);
<add> ThreadPoolExecutor executor = context.getBean("childExecutor", ThreadPoolExecutor.class);
<add>
<add> verify(executor, never()).prestartAllCoreThreads();
<add> }
<ide>
<ide> @Configuration
<ide> static class ExecutorConfig {
<ide> ThreadPoolExecutorFactoryBean executor() {
<ide>
<ide> }
<ide>
<add> @Configuration
<add> public static class ExecutorConfigWithPreStartedThreads {
<add>
<add> @Bean
<add> public ThreadPoolExecutorFactoryBean executorChildFactory() {
<add> ThreadPoolExecutorFactoryBeanMockingChild threadPoolExecutorFactoryBeanMockingChild = new ThreadPoolExecutorFactoryBeanMockingChild();
<add> threadPoolExecutorFactoryBeanMockingChild.setPrestartAllCoreThreads(true);
<add> return threadPoolExecutorFactoryBeanMockingChild;
<add> }
<add>
<add> @Bean
<add> public ExecutorService childExecutor() {
<add> return executorChildFactory().getObject();
<add> }
<add> }
<add>
<add> @Configuration
<add> public static class ExecutorConfigWithNoPreStartedThreads {
<add>
<add> @Bean
<add> public ThreadPoolExecutorFactoryBean executorChildFactory() {
<add> return new ThreadPoolExecutorFactoryBeanMockingChild();
<add> }
<add>
<add> @Bean
<add> public ExecutorService childExecutor() {
<add> return executorChildFactory().getObject();
<add> }
<add> }
<add>
<add> private static class ThreadPoolExecutorFactoryBeanMockingChild extends ThreadPoolExecutorFactoryBean {
<add> @Override
<add> protected ThreadPoolExecutor createExecutor(
<add> int corePoolSize, int maxPoolSize, int keepAliveSeconds, BlockingQueue<Runnable> queue,
<add> ThreadFactory threadFactory, RejectedExecutionHandler rejectedExecutionHandler) {
<add>
<add> return mock(ThreadPoolExecutor.class);
<add> }
<add> }
<add>
<add>
<ide> } | 2 |
Javascript | Javascript | add benchmark for fspromises.writefile | a97f01ea8cb3b421dfe70c0271b9b37e549b8b0b | <ide><path>benchmark/fs/writefile-promises.js
<add>// Call fs.promises.writeFile over and over again really fast.
<add>// Then see how many times it got called.
<add>// Yes, this is a silly benchmark. Most benchmarks are silly.
<add>'use strict';
<add>
<add>const path = require('path');
<add>const common = require('../common.js');
<add>const fs = require('fs');
<add>const assert = require('assert');
<add>const tmpdir = require('../../test/common/tmpdir');
<add>
<add>tmpdir.refresh();
<add>const filename = path.resolve(tmpdir.path,
<add> `.removeme-benchmark-garbage-${process.pid}`);
<add>let filesWritten = 0;
<add>const bench = common.createBenchmark(main, {
<add> duration: [5],
<add> encodingType: ['buf', 'asc', 'utf'],
<add> size: [2, 1024, 65535, 1024 * 1024],
<add> concurrent: [1, 10]
<add>});
<add>
<add>function main({ encodingType, duration, concurrent, size }) {
<add> let encoding;
<add> let chunk;
<add> switch (encodingType) {
<add> case 'buf':
<add> chunk = Buffer.alloc(size, 'b');
<add> break;
<add> case 'asc':
<add> chunk = 'a'.repeat(size);
<add> encoding = 'ascii';
<add> break;
<add> case 'utf':
<add> chunk = 'ü'.repeat(Math.ceil(size / 2));
<add> encoding = 'utf8';
<add> break;
<add> default:
<add> throw new Error(`invalid encodingType: ${encodingType}`);
<add> }
<add>
<add> let writes = 0;
<add> let benchEnded = false;
<add> bench.start();
<add> setTimeout(() => {
<add> benchEnded = true;
<add> bench.end(writes);
<add> for (let i = 0; i < filesWritten; i++) {
<add> try { fs.unlinkSync(`${filename}-${i}`); } catch { }
<add> }
<add> process.exit(0);
<add> }, duration * 1000);
<add>
<add> function write() {
<add> fs.promises.writeFile(`${filename}-${filesWritten++}`, chunk, encoding)
<add> .then(() => afterWrite())
<add> .catch((err) => afterWrite(err));
<add> }
<add>
<add> function afterWrite(er) {
<add> if (er) {
<add> if (er.code === 'ENOENT') {
<add> // Only OK if unlinked by the timer from main.
<add> assert.ok(benchEnded);
<add> return;
<add> }
<add> throw er;
<add> }
<add>
<add> writes++;
<add> if (!benchEnded)
<add> write();
<add> }
<add>
<add> while (concurrent--) write();
<add>} | 1 |
Javascript | Javascript | remove a duplicate file in angularfiles.js | 8c269883fd4353414ea5b6cf77c80bfa54a4ae2f | <ide><path>angularFiles.js
<ide> angularFiles = {
<ide> 'src/ngScenario/Describe.js',
<ide> 'src/ngScenario/Future.js',
<ide> 'src/ngScenario/ObjectModel.js',
<del> 'src/ngScenario/Describe.js',
<ide> 'src/ngScenario/Runner.js',
<ide> 'src/ngScenario/SpecRunner.js',
<ide> 'src/ngScenario/dsl.js', | 1 |
Ruby | Ruby | fix typo on method name | 3f866cbdabaa2f85c5aef754aa9a3127c99fba20 | <ide><path>activejob/lib/active_job/test_helper.rb
<ide> def assert_enqueued_with(args = {}, &_block)
<ide> serialized_args.all? { |key, value| value == job[key] }
<ide> end
<ide> assert matching_job, "No enqueued job found with #{args}"
<del> instanciate_job(matching_job)
<add> instantiate_job(matching_job)
<ide> ensure
<ide> queue_adapter.enqueued_jobs = original_enqueued_jobs + enqueued_jobs
<ide> end
<ide> def assert_performed_with(args = {}, &_block)
<ide> serialized_args.all? { |key, value| value == job[key] }
<ide> end
<ide> assert matching_job, "No performed job found with #{args}"
<del> instanciate_job(matching_job)
<add> instantiate_job(matching_job)
<ide> ensure
<ide> queue_adapter.performed_jobs = original_performed_jobs + performed_jobs
<ide> end
<ide> def serialize_args_for_assertion(args)
<ide> serialized_args
<ide> end
<ide>
<del> def instanciate_job(payload)
<add> def instantiate_job(payload)
<ide> job = payload[:job].new(*payload[:args])
<ide> job.scheduled_at = Time.at(payload[:at]) if payload.key?(:at)
<ide> job.queue_name = payload[:queue] | 1 |
Python | Python | remove two todo notes that got outdated | 3139a881346ff7ad4326ecd296e6eeddf6c268a0 | <ide><path>numpy/core/tests/test_datetime.py
<ide> def test_different_unit_comparison(self):
<ide> casting='unsafe'))
<ide>
<ide> # Shouldn't be able to compare datetime and timedelta
<del> # TODO: Changing to 'same_kind' or 'safe' casting in the ufuncs by
<del> # default is needed to properly catch this kind of thing...
<ide> a = np.array('2012-12-21', dtype='M8[D]')
<ide> b = np.array(3, dtype='m8[D]')
<del> #assert_raises(TypeError, np.less, a, b)
<del> assert_raises(TypeError, np.less, a, b, casting='same_kind')
<add> assert_raises(TypeError, np.less, a, b)
<add> # not even if "unsafe"
<add> assert_raises(TypeError, np.less, a, b, casting='unsafe')
<ide>
<ide> def test_datetime_like(self):
<ide> a = np.array([3], dtype='m8[4D]')
<ide><path>numpy/exceptions.py
<ide> """
<ide>
<ide>
<del>from ._utils import set_module as _set_module
<del>
<ide> __all__ = [
<ide> "ComplexWarning", "VisibleDeprecationWarning", "ModuleDeprecationWarning",
<ide> "TooHardError", "AxisError", "DTypePromotionError"]
<ide> _is_loaded = True
<ide>
<ide>
<del># TODO: One day, we should remove the _set_module here before removing them
<del># fully. Not doing it now, just to allow unpickling to work on older
<del># versions for a bit. (Module exists since NumPy 1.25.)
<del># This then also means that the typing stubs should be moved!
<del>
<del>
<ide> class ComplexWarning(RuntimeWarning):
<ide> """
<ide> The warning raised when casting a complex dtype to a real dtype. | 2 |
Javascript | Javascript | normalize controller lookup, fix | 532dec210363bc7f412eb2ac5bef9e8a9adc52fd | <ide><path>packages/container/lib/main.js
<ide> define("container",
<ide> register: function(type, name, factory, options) {
<ide> var fullName;
<ide>
<del>
<ide> if (type.indexOf(':') !== -1){
<ide> options = factory;
<ide> factory = name;
<ide> define("container",
<ide> fullName = type + ":" + name;
<ide> }
<ide>
<del> this.registry.set(fullName, factory);
<del> this._options.set(fullName, options || {});
<add> var normalizedName = this.normalize(fullName);
<add>
<add> this.registry.set(normalizedName, factory);
<add> this._options.set(normalizedName, options || {});
<ide> },
<ide>
<ide> resolve: function(fullName) {
<ide> return this.resolver(fullName) || this.registry.get(fullName);
<ide> },
<ide>
<add> normalize: function(fullName) {
<add> return fullName;
<add> },
<add>
<ide> lookup: function(fullName, options) {
<ide> options = options || {};
<ide>
<ide> define("container",
<ide> }
<ide>
<ide> function factoryFor(container, fullName) {
<del> return container.resolve(fullName);
<add> var name = container.normalize(fullName);
<add> return container.resolve(name);
<ide> }
<ide>
<ide> function instantiate(container, fullName) {
<ide><path>packages/container/tests/container_test.js
<ide> test("The container respect the resolver hook for `has`", function() {
<ide> ok(container.has('controller:post'), "the `has` method uses the resolver hook");
<ide> });
<ide>
<add>test("The container normalizes names before resolving", function() {
<add> var container = new Container();
<add> var PostController = factory();
<add>
<add> container.normalize = function(fullName) {
<add> return 'controller:post';
<add> };
<add>
<add> container.register('controller:post', PostController);
<add> var postController = container.lookup('wycats');
<add>
<add> ok(postController instanceof PostController, "Normalizes the name before resolving");
<add>});
<add>
<ide> test("The container can get options that should be applied to all factories for a given type", function() {
<ide> var container = new Container();
<ide> var PostView = factory();
<ide><path>packages/ember-application/lib/system/application.js
<ide> Ember.Application.reopenClass({
<ide> Ember.Container.defaultContainer = Ember.Container.defaultContainer || container;
<ide>
<ide> container.set = Ember.set;
<add> container.normalize = normalize;
<ide> container.resolver = resolverFor(namespace);
<ide> container.optionsForType('view', { singleton: false });
<ide> container.optionsForType('template', { instantiate: false });
<ide> function resolverFor(namespace) {
<ide>
<ide> if (type === 'template') {
<ide> var templateName = name.replace(/\./g, '/');
<add>
<ide> if (Ember.TEMPLATES[templateName]) {
<ide> return Ember.TEMPLATES[templateName];
<ide> }
<ide> function resolverFor(namespace) {
<ide> };
<ide> }
<ide>
<del>Ember.runLoadHooks('Ember.Application', Ember.Application);
<add>function normalize(fullName) {
<add> var split = fullName.split(':'),
<add> type = split[0],
<add> name = split[1];
<add>
<add> if (type !== 'template' && name.indexOf('.') > -1) {
<add> return type + ':' + name.replace(/\.(.)/g, function(m) { return m[1].toUpperCase(); });
<add> } else {
<add> return fullName;
<add> }
<add>}
<ide>
<add>Ember.runLoadHooks('Ember.Application', Ember.Application);
<ide><path>packages/ember-application/tests/system/application_test.js
<ide> module("Ember.Application Depedency Injection", {
<ide> application = Ember.Application.create().initialize();
<ide> });
<ide>
<del> application.Person = Ember.Object.extend({});
<del> application.Orange = Ember.Object.extend({});
<del> application.Email = Ember.Object.extend({});
<del> application.User = Ember.Object.extend({});
<add> application.Person = Ember.Object.extend({});
<add> application.Orange = Ember.Object.extend({});
<add> application.Email = Ember.Object.extend({});
<add> application.User = Ember.Object.extend({});
<add> application.PostIndexController = Ember.Object.extend({});
<ide>
<ide> application.register('model:person', application.Person, {singleton: false });
<ide> application.register('model:user', application.User, {singleton: false });
<ide> application.register('fruit:favorite', application.Orange);
<ide> application.register('communication:main', application.Email, {singleton: false});
<add> application.register('controller:postIndex', application.PostIndexController, {singleton: true});
<ide>
<ide> locator = application.__container__;
<ide>
<ide> module("Ember.Application Depedency Injection", {
<ide> }
<ide> });
<ide>
<add>test('container lookup is normalized', function() {
<add> ok(locator.lookup('controller:post.index') instanceof application.PostIndexController);
<add> ok(locator.lookup('controller:postIndex') instanceof application.PostIndexController);
<add>});
<add>
<ide> test('registered entities can be looked up later', function(){
<ide> equal(locator.resolve('model:person'), application.Person);
<ide> equal(locator.resolve('model:user'), application.User);
<ide> equal(locator.resolve('fruit:favorite'), application.Orange);
<ide> equal(locator.resolve('communication:main'), application.Email);
<add> equal(locator.resolve('controller:postIndex'), application.PostIndexController);
<ide>
<ide> equal(locator.lookup('fruit:favorite'), locator.lookup('fruit:favorite'), 'singleton lookup worked');
<ide> ok(locator.lookup('model:user') !== locator.lookup('model:user'), 'non-singleton lookup worked');
<ide> });
<ide>
<add>
<ide> test('injections', function(){
<ide> application.inject('model', 'fruit', 'fruit:favorite');
<ide> application.inject('model:user', 'communication', 'communication:main');
<ide><path>packages/ember-application/tests/system/controller_test.js
<ide> test("If a controller specifies an unavailable dependency, it raises", function(
<ide> container.lookup('controller:post');
<ide> }, /controller:posts/);
<ide> });
<del> | 5 |
PHP | PHP | add tests for gh-558 | 381e5b10bc4041038e14a4825212f340a620671b | <ide><path>lib/Cake/Test/Case/View/Helper/FormHelperTest.php
<ide> public function testFormSecurityArrayFields() {
<ide> $this->Form->create('Address');
<ide> $this->Form->input('Address.primary.1');
<ide> $this->assertEquals('Address.primary', $this->Form->fields[0]);
<add>
<add> $this->Form->input('Address.secondary.1.0');
<add> $this->assertEquals('Address.secondary', $this->Form->fields[1]);
<ide> }
<ide>
<ide> /** | 1 |
Go | Go | add parent img refcount for faster rmi | 292a1564dca2f32b9158a6886fadee1cc184f987 | <ide><path>graph/graph.go
<ide> type Graph struct {
<ide> imageMutex imageMutex // protect images in driver.
<ide> retained *retainedLayers
<ide> tarSplitDisabled bool
<add>
<add> parentRefs map[string]int
<add> parentRefsMutex sync.Mutex
<ide> }
<ide>
<ide> // file names for ./graph/<ID>/
<ide> func NewGraph(root string, driver graphdriver.Driver) (*Graph, error) {
<ide> }
<ide>
<ide> graph := &Graph{
<del> root: abspath,
<del> idIndex: truncindex.NewTruncIndex([]string{}),
<del> driver: driver,
<del> retained: &retainedLayers{layerHolders: make(map[string]map[string]struct{})},
<add> root: abspath,
<add> idIndex: truncindex.NewTruncIndex([]string{}),
<add> driver: driver,
<add> retained: &retainedLayers{layerHolders: make(map[string]map[string]struct{})},
<add> parentRefs: make(map[string]int),
<ide> }
<ide>
<ide> // Windows does not currently support tarsplit functionality.
<ide> func (graph *Graph) Register(img *image.Image, layerData io.Reader) (err error)
<ide> return err
<ide> }
<ide> graph.idIndex.Add(img.ID)
<add>
<add> graph.parentRefsMutex.Lock()
<add> if img.Parent != "" {
<add> graph.parentRefs[img.Parent]++
<add> }
<add> graph.parentRefsMutex.Unlock()
<add>
<ide> return nil
<ide> }
<ide>
<ide> func (graph *Graph) Delete(name string) error {
<ide> if err != nil {
<ide> return err
<ide> }
<add> img, err := graph.Get(id)
<add> if err != nil {
<add> return err
<add> }
<ide> tmp, err := graph.mktemp()
<ide> graph.idIndex.Delete(id)
<ide> if err == nil {
<ide> func (graph *Graph) Delete(name string) error {
<ide> }
<ide> // Remove rootfs data from the driver
<ide> graph.driver.Remove(id)
<add>
<add> graph.parentRefsMutex.Lock()
<add> if img.Parent != "" {
<add> graph.parentRefs[img.Parent]--
<add> if graph.parentRefs[img.Parent] == 0 {
<add> delete(graph.parentRefs, img.Parent)
<add> }
<add> }
<add> graph.parentRefsMutex.Unlock()
<add>
<ide> // Remove the trashed image directory
<ide> return os.RemoveAll(tmp)
<ide> }
<ide> func (graph *Graph) Map() map[string]*image.Image {
<ide> // The walking order is undetermined.
<ide> func (graph *Graph) walkAll(handler func(*image.Image)) {
<ide> graph.idIndex.Iterate(func(id string) {
<del> if img, err := graph.Get(id); err != nil {
<add> img, err := graph.Get(id)
<add> if err != nil {
<ide> return
<del> } else if handler != nil {
<add> }
<add> if handler != nil {
<ide> handler(img)
<ide> }
<ide> })
<ide> func (graph *Graph) ByParent() map[string][]*image.Image {
<ide>
<ide> // HasChildren returns whether the given image has any child images.
<ide> func (graph *Graph) HasChildren(img *image.Image) bool {
<del> return len(graph.ByParent()[img.ID]) > 0
<add> graph.parentRefsMutex.Lock()
<add> refCount := graph.parentRefs[img.ID]
<add> graph.parentRefsMutex.Unlock()
<add> return refCount > 0
<ide> }
<ide>
<ide> // Retain keeps the images and layers that are in the pulling chain so that
<ide> func (graph *Graph) Release(sessionID string, layerIDs ...string) {
<ide> // A head is an image which is not the parent of another image in the graph.
<ide> func (graph *Graph) Heads() map[string]*image.Image {
<ide> heads := make(map[string]*image.Image)
<del> byParent := graph.ByParent()
<ide> graph.walkAll(func(image *image.Image) {
<ide> // If it's not in the byParent lookup table, then
<ide> // it's not a parent -> so it's a head!
<del> if _, exists := byParent[image.ID]; !exists {
<add> graph.parentRefsMutex.Lock()
<add> if _, exists := graph.parentRefs[image.ID]; !exists {
<ide> heads[image.ID] = image
<ide> }
<add> graph.parentRefsMutex.Unlock()
<ide> })
<ide> return heads
<ide> }
<ide><path>pkg/truncindex/truncindex.go
<del>// Package truncindex package provides a general 'index tree', used by Docker
<add>// Package truncindex provides a general 'index tree', used by Docker
<ide> // in order to be able to reference containers by only a few unambiguous
<ide> // characters of their id.
<ide> package truncindex | 2 |
Javascript | Javascript | use standard for loop instead of for..of | ad6cd7fa9cd31d6997bd7505e5b77587441491c4 | <ide><path>lib/internal/streams/duplex.js
<ide> ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype);
<ide> ObjectSetPrototypeOf(Duplex, Readable);
<ide>
<ide> {
<add> const keys = ObjectKeys(Writable.prototype);
<ide> // Allow the keys array to be GC'ed.
<del> for (const method of ObjectKeys(Writable.prototype)) {
<add> for (let i = 0; i < keys.length; i++) {
<add> const method = keys[i];
<ide> if (!Duplex.prototype[method])
<ide> Duplex.prototype[method] = Writable.prototype[method];
<ide> } | 1 |
PHP | PHP | add operator support to collection@where | 83c3c56df9ec1ee764134021ce055db7bdc26038 | <ide><path>src/Illuminate/Support/Collection.php
<ide> public function filter(callable $callback = null)
<ide> * Filter items by the given key value pair.
<ide> *
<ide> * @param string $key
<add> * @param mixed $operator
<ide> * @param mixed $value
<del> * @param bool $strict
<ide> * @return static
<ide> */
<del> public function where($key, $value, $strict = true)
<add> public function where($key, $operator, $value = null)
<ide> {
<del> return $this->filter(function ($item) use ($key, $value, $strict) {
<del> return $strict ? data_get($item, $key) === $value
<del> : data_get($item, $key) == $value;
<del> });
<add> if (func_num_args() == 2) {
<add> $value = $operator;
<add>
<add> $operator = '===';
<add> }
<add>
<add> return $this->filter($this->operatorChecker($key, $operator, $value));
<ide> }
<ide>
<ide> /**
<ide> public function where($key, $value, $strict = true)
<ide> */
<ide> public function whereLoose($key, $value)
<ide> {
<del> return $this->where($key, $value, false);
<add> return $this->where($key, '=', $value);
<ide> }
<ide>
<ide> /**
<ide> public function zip($items)
<ide> return new static(call_user_func_array('array_map', $params));
<ide> }
<ide>
<add> /**
<add> * Get an operator checker callback.
<add> *
<add> * @param string $key
<add> * @param string $operator
<add> * @param mixed $value
<add> * @return \Closure
<add> */
<add> protected function operatorChecker($key, $operator, $value)
<add> {
<add> return function ($item) use ($key, $operator, $value) {
<add> $retrieved = data_get($item, $key);
<add>
<add> switch ($operator) {
<add> default:
<add> case '=':
<add> case '==': return $retrieved == $value;
<add> case '===': return $retrieved === $value;
<add> case '<=': return $retrieved <= $value;
<add> case '>=': return $retrieved >= $value;
<add> case '<': return $retrieved < $value;
<add> case '>': return $retrieved > $value;
<add> case '<>':
<add> case '!=': return $retrieved != $value;
<add> case '!==': return $retrieved !== $value;
<add> }
<add> };
<add> }
<add>
<ide> /**
<ide> * Get the collection of items as a plain array.
<ide> *
<ide><path>tests/Support/SupportCollectionTest.php
<ide> public function testFilter()
<ide> public function testWhere()
<ide> {
<ide> $c = new Collection([['v' => 1], ['v' => 2], ['v' => 3], ['v' => '3'], ['v' => 4]]);
<del> $this->assertEquals([['v' => 3]], $c->where('v', 3)->values()->all());
<add>
<add> $this->assertEquals(
<add> [['v' => 3]],
<add> $c->where('v', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 3], ['v' => '3']],
<add> $c->where('v', '=', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 3], ['v' => '3']],
<add> $c->where('v', '==', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 3], ['v' => '3']],
<add> $c->where('v', 'garbage', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 3]],
<add> $c->where('v', '===', 3)->values()->all()
<add> );
<add>
<add> $this->assertEquals(
<add> [['v' => 1], ['v' => 2], ['v' => 4]],
<add> $c->where('v', '<>', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 1], ['v' => 2], ['v' => 4]],
<add> $c->where('v', '!=', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 1], ['v' => 2], ['v' => '3'], ['v' => 4]],
<add> $c->where('v', '!==', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 1], ['v' => 2], ['v' => 3], ['v' => '3']],
<add> $c->where('v', '<=', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 3], ['v' => '3'], ['v' => 4]],
<add> $c->where('v', '>=', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 1], ['v' => 2]],
<add> $c->where('v', '<', 3)->values()->all()
<add> );
<add> $this->assertEquals(
<add> [['v' => 4]],
<add> $c->where('v', '>', 3)->values()->all()
<add> );
<ide> }
<ide>
<ide> public function testWhereLoose() | 2 |
Text | Text | update maintainer documentation | 1b609ceeb344512319edc50ff72e0d0026f702a7 | <ide><path>docs/Maintainer-Guidelines.md
<ide> This document is current practice. If you wish to change or discuss any of the b
<ide>
<ide> ## Mission
<ide>
<del>Homebrew aims to be the missing package manager for macOS. Its primary goal is to be useful to as many people as possible, while remaining maintainable to a professional, high standard by a small group of volunteers. Where possible and sensible, it should seek to use features of macOS to blend in with the macOS and Apple ecosystems.
<add>Homebrew aims to be the missing package manager for macOS (and Linux). Its primary goal is to be useful to as many people as possible, while remaining maintainable to a professional, high standard by a small group of volunteers. Where possible and sensible, it should seek to use features of macOS to blend in with the macOS and Apple ecosystems. On Linux and Windows, it should seek to be as self-contained as possible.
<ide>
<ide> ## Quick checklist
<ide>
<ide> Checking dependencies is important, because they will probably stick around
<ide> forever. Nobody really checks if they are necessary or not. Use the
<ide> `:optional` and `:recommended` modifiers as appropriate.
<ide>
<del>Depend on as little stuff as possible. Disable X11 functionality by default.
<del>For example, we build Wireshark, but not the heavy GTK/Qt GUI by default.
<add>Depend on as little stuff as possible. Disable X11 functionality if possible.
<add>For example, we build Wireshark, but not the heavy GUI.
<ide>
<ide> Homebrew is about Unix software. Stuff that builds to an `.app` should
<del>probably be in Homebrew Cask instead.
<add>be in Homebrew Cask instead.
<ide>
<ide> ### Naming
<add>
<ide> The name is the strictest item, because avoiding a later name change is
<ide> desirable.
<ide>
<ide> underscores and hyphens and so on.
<ide> We now accept versioned formulae as long as they [meet the requirements](Versions.md).
<ide>
<ide> ### Merging, rebasing, cherry-picking
<add>
<ide> Merging should be done in the `Homebrew/brew` repository to preserve history & GPG commit signing,
<ide> and squash/merge via GitHub should be used for formulae where those formulae
<ide> don't need bottles or the change does not require new bottles to be pulled.
<ide> the commits. Our main branch history should be useful to other people,
<ide> not confusing.
<ide>
<ide> ### Testing
<add>
<ide> We need to at least check that it builds. Use the [Brew Test Bot](Brew-Test-Bot.md) for this.
<ide>
<ide> Verify the formula works if possible. If you can’t tell (e.g. if it’s a
<ide> that bug must be fixed, or worked around in the formula to yield a passing test,
<ide> before the PR can be merged.
<ide>
<ide> ## Common “gotchas”
<del>1. [Ensure you have set your username and email address
<del> properly](https://help.github.com/articles/setting-your-email-in-git/)
<del>2. Sign off cherry-picks if you amended them ([GitX-dev](https://github.com/rowanj/gitx)
<del> can do this, otherwise there is a command-line flag for it)
<del>3. If the commit fixes a bug, use “Fixes \#104” syntax to close the bug
<del> report and link to the commit
<add>
<add>1. [Ensure you have set your username and email address properly](https://help.github.com/articles/setting-your-email-in-git/)
<add>2. Sign off cherry-picks if you amended them (use `git -s`)
<add>3. If the commit fixes a bug, use “Fixes \#104” syntax to close the bug report and link to the commit
<ide>
<ide> ### Duplicates
<add>
<ide> We now accept stuff that comes with macOS as long as it uses `keg_only :provided_by_macos` to be keg-only by default.
<ide>
<ide> ### Add comments
<add>
<ide> It may be enough to refer to an issue ticket, but make sure changes are clear so that
<ide> if you came to them unaware of the surrounding issues they would make sense
<ide> to you. Many times on other projects I’ve seen code removed because the
<ide> new guy didn’t know why it was there. Regressions suck.
<ide>
<ide> ### Don’t allow bloated diffs
<add>
<ide> Amend a cherry-pick to remove commits that are only changes in
<ide> whitespace. They are not acceptable because our history is important and
<ide> `git blame` should be useful.
<ide> of modification that is not whitespace in it. But be careful about
<ide> making changes to inline patches—make sure they still apply.
<ide>
<ide> ### Adding or updating formulae
<del>Any one maintainer is necessary to approve and merge the addition of a new or updated formula which passes CI. However, if the formula addition or update proves controversial the maintainer who adds it will be expected to answer requests and fix problems that arise with it in future.
<add>
<add>Only one maintainer is necessary to approve and merge the addition of a new or updated formula which passes CI. However, if the formula addition or update proves controversial the maintainer who adds it will be expected to answer requests and fix problems that arise with it in future.
<ide>
<ide> ### Removing formulae
<add>
<ide> Formulae that:
<ide>
<ide> - work on at least 2/3 of our supported macOS versions in the default Homebrew prefix
<ide> Formulae that:
<ide> should not be removed from Homebrew. The exception to this rule are [versioned formulae](Versions.md) for which there are higher standards of usage and a maximum number of versions for a given formula.
<ide>
<ide> ### Closing issues/PRs
<add>
<ide> Maintainers (including the lead maintainer) should not close issues or pull requests (note a merge is not considered a close in this case) opened by other maintainers unless they are stale (i.e. have seen no updates for 28 days) in which case they can be closed by any maintainer. Any maintainer is encouraged to reopen a closed issue when they wish to do additional work on the issue.
<ide>
<ide> Any maintainer can merge any PR they have carefully reviewed and is passing CI that has been opened by any other maintainer. If you do not wish to have other maintainers merge your PRs: please use the `do not merge` label to indicate that until you're ready to merge it yourself.
<ide>
<ide> ## Reverting PRs
<add>
<ide> Any maintainer can revert a PR created by another maintainer after a user submitted issue or CI failure that results. The maintainer who created the original PR should be given no less than an hour to fix the issue themselves or decide to revert the PR themselves if they would rather.
<ide>
<ide> ## Communication
<add>
<ide> Maintainers have a variety of ways to communicate with each other:
<ide>
<ide> - Homebrew's public repositories on GitHub
<ide> All maintainers (and lead maintainer) communication through any medium is bound
<ide> Maintainers should feel free to pleasantly disagree with the work and decisions of other maintainers. Healthy, friendly, technical disagreement between maintainers is actively encouraged and should occur in public on the issue tracker to make the project better. Interpersonal issues should be handled privately in Slack, ideally with moderation. If work or decisions are insufficiently documented or explained any maintainer or contributor should feel free to ask for clarification. No maintainer may ever justify a decision with e.g. "because I say so" or "it was I who did X" alone. Off-topic discussions on the issue tracker, [bike-shedding](https://en.wikipedia.org/wiki/Law_of_triviality) and personal attacks are forbidden.
<ide>
<ide> ## Project lead guidelines
<add>
<ide> There should be one project lead for Homebrew. On February 4, 2019, coinciding with the Homebrew maintainers' conference, Mike McQuaid stepped down as lead maintainer of Homebrew. He was elected by a supermajority of Homebrew maintainers into the new project lead role.
<ide>
<ide> Additional documentation for the project lead and governance are to follow.
<ide><path>docs/New-Maintainer-Checklist.md
<ide>
<ide> **This is a guide used by existing maintainers to invite new maintainers. You might find it interesting but there's nothing here users should have to know.**
<ide>
<del>There's someone who has been making consistently high-quality contributions to Homebrew for a long time and shown themselves able to make slightly more advanced contributions than just e.g. formula updates? Let's invite them to be a maintainer!
<add>There's someone who has been making consistently high-quality contributions to Homebrew and shown themselves able to make slightly more advanced contributions than just e.g. formula updates? Let's invite them to be a maintainer!
<ide>
<ide> First, send them the invitation email:
<ide>
<del>```
<add>```markdown
<ide> The Homebrew team and I really appreciate your help on issues, pull requests and
<ide> your contributions to Homebrew.
<ide>
<ide> We would like to invite you to have commit access and be a Homebrew maintainer.
<ide> If you agree to be a maintainer, you should spend a significant proportion of
<del>the time you are working on Homebrew fixing user-reported issues, resolving any
<del>issues that arise from your code in a timely fashion and reviewing user
<del>contributions. You should also be making contributions to Homebrew every month
<del>unless you are ill or on vacation (and please let another maintainer know if
<del>that's the case so we're aware you won't be able to help while you are out).
<add>the time you are working on Homebrew applying and self-merging widely used
<add>changes (e.g. version updates), triaging, fixing and debugging user-reported
<add>issues, or reviewing user pull requests. You should also be making contributions
<add>to Homebrew at least once per quarter.
<ide>
<del>You will need to watch Homebrew/brew and/or Homebrew/homebrew-core. Let us know
<del>which (or both) so we can grant you commit access appropriately.
<add>You will should watch or regularly check Homebrew/brew and/or
<add>Homebrew/homebrew-core. Let us know which (or both) so we can grant you commit
<add>access appropriately.
<ide>
<ide> If you're no longer able to perform all of these tasks, please continue to
<ide> contribute to Homebrew, but we will ask you to step down as a maintainer.
<ide> A few requests:
<ide> multiple formulae and let it auto-close issues wherever possible (it may
<ide> take ~5m). When this isn't necessary use GitHub's "Merge pull request"
<ide> button in "create a merge commit" mode for Homebrew/brew or "squash and
<del> merge" for a single formulae change. If in doubt, check with e.g. GitX that
<del> you've not accidentally added merge commits.
<add> merge" for a single formulae change. If in doubt, check with e.g. Fork.app
<add> that you've not accidentally added merge commits.
<ide> - Still create your branches on your fork rather than in the main repository.
<ide> Note GitHub's UI will create edits and reverts on the main repository if you
<ide> make edits or click "Revert" on the Homebrew/brew repository rather than your
<ide> If they accept, follow a few steps to get them set up:
<ide> - Ask them to (regularly) review remove any unneeded [GitHub personal access tokens](https://github.com/settings/tokens).
<ide> - Add them to [Homebrew/brew's README](https://github.com/Homebrew/brew/edit/master/README.md).
<ide>
<del>If they are also interested in doing system administration work:
<add>If they are interested in doing system administration work or Homebrew/brew releases:
<ide>
<ide> - Invite them to the [`homebrew-ops` private operations mailing list](https://lists.sfconservancy.org/mailman/admin/homebrew-ops/members/add).
<ide> - Invite them to the [`homebrew` private 1Password](https://homebrew.1password.com/people).
<ide>
<del>If they want to consume raw anonymous aggregate analytics data (rather than use `brew formula-analytics`):
<del>
<del>- Invite them to [Google Analytics](https://analytics.google.com/analytics/web/?authuser=1#management/Settings/a76679469w115400090p120682403/%3Fm.page%3DAccountUsers/).
<add>If they are elected to of the Homebrew's [Software Freedom Conservancy](https://sfconservancy.org) Project Leadership Committee:
<ide>
<del>Once they have been active maintainers for at least a year and had some activity on more than one Homebrew organisation repository (or one repository and helped with system administration work):
<del>
<del>- Homebrew's [Software Freedom Conservancy](https://sfconservancy.org) Project Leadership Committee can take a vote on whether to extend an offer to the maintainer to join the committee. If they accept, email their name, email and employer to [email protected], make them [owners on the Homebrew GitHub organisation](https://github.com/orgs/Homebrew/people) and add them to the relevant section of the [Homebrew/brew's README](https://github.com/Homebrew/brew/edit/master/README.md).
<add>- Email their name, email and employer to [email protected]
<add>- Make them [owners on the Homebrew GitHub organisation](https://github.com/orgs/Homebrew/people)
<add>- Invite them to the [**@Homebrew/plc** team](https://github.com/orgs/Homebrew/teams/plc/members)
<add>- Invite them to [Google Analytics](https://analytics.google.com/analytics/web/#management/Settings/a76679469w115400090p120682403/%3Fm.page%3DAccountUsers/) and add them to the relevant section of the [Homebrew/brew's README](https://github.com/Homebrew/brew/edit/master/README.md).
<add>- Invite them to the [`homebrew` private 1Password](https://homebrew.1password.com/people).
<add>- Make them owners on the [`machomebrew` private maintainers Slack](https://machomebrew.slack.com/admin)).
<ide>
<ide> If there are problems, ask them to step down as a maintainer and revoke their access to all of the above.
<ide>
<ide><path>docs/README.md
<ide> # Documentation
<ide>
<ide> ## Users
<add>
<ide> - [`brew` man-page (command documentation)](Manpage.md)
<ide> - [Troubleshooting](Troubleshooting.md)
<ide> - [Installation](Installation.md)
<ide> - [Kickstarter Supporters](Kickstarter-Supporters.md)
<ide>
<ide> ## Contributors
<add>
<ide> - [How To Open A Pull Request (and get it merged)](How-To-Open-a-Homebrew-Pull-Request.md)
<ide> - [Formula Cookbook](Formula-Cookbook.md)
<ide> - [Acceptable Formulae](Acceptable-Formulae.md)
<ide> - [Prose Style Guidelines](Prose-Style-Guidelines.md)
<ide>
<ide> ## Maintainers
<add>
<ide> - [New Maintainer Checklist](New-Maintainer-Checklist.md)
<ide> - [Maintainers: Avoiding Burnout](Maintainers-Avoiding-Burnout.md)
<ide> - [Maintainer Guidelines](Maintainer-Guidelines.md)
<ide> - [Brew Test Bot For Maintainers](Brew-Test-Bot-For-Core-Contributors.md)
<ide> - [Common Issues for Maintainers](Common-Issues-for-Core-Contributors.md)
<add>- [Releases](Releases.md)
<ide><path>docs/Releases.md
<add># Releases
<add>
<add>Since Homebrew 1.0.0 most Homebrew users (those who haven't run a `dev-cmd` or
<add>set `HOMEBREW_DEVELOPER=1`) require tags on the [Homebrew/brew repository](https://github.com/homebrew/brew)
<add>in order to get new versions of Homebrew. There are a few steps in making a new
<add>Homebrew release:
<add>
<add>1. Check the [Homebrew/brew pull requests](https://github.com/homebrew/brew/pulls)
<add> and [issues](https://github.com/homebrew/brew/issues) to see if there is
<add> anything pressing that needs to be fixed or merged before the next release.
<add> If so, fix and merge these changes.
<add>2. After no code changes have happened for at least a few hours (ideally 24 hours)
<add> and you are confident there's no major regressions on the current `master`
<add> branch you can create a new Git tag. Ideally this should be signed with your
<add> GPG key. This can then be pushed to GitHub.
<add>3. Use `brew release-notes --markdown $PREVIOUS_TAG` to generate the release
<add> notes for the release. [Create a new release on GitHub](https://github.com/Homebrew/brew/releases)
<add> based on the new tag.
<add>
<add>If this is a major or minor release (e.g. X.0.0 or X.Y.0) then there are a few more steps:
<add>
<add>1. Before creating the tag you should delete any `odisabled` code, make any
<add> `odeprecated` code `odisabled` and add any new `odeprecations` that are
<add> desired.
<add>2. Write up a release notes blog post to https://brew.sh
<add> e.g. https://github.com/Homebrew/brew.sh/pull/319.
<add> This should use `brew release-notes` as input but have the wording adjusted
<add> to be more human readable and explain not just what has changed but why.
<add>3. When the release has shipped and the blog post has been merged, tweet the
<add> blog post as the @MacHomebrew Twitter account or tweet it yourself and
<add> retweet it with the @MacHomebrew Twitter account (credentials are in
<add> 1Password).
<add>4. Send the email to the Homebrew TinyLetter email list (credentials are in
<add> 1Password).
<add>5. Consider whether to submit it to other sources e.g. Hacker News, Reddit.
<add>
<add> - Pros: gets a wider reach and user feedback
<add> - Cons: negative comments are common and people take this as a chance to
<add> complain about Homebrew (regardless of their usage) | 4 |
Mixed | Javascript | add `imagesizes` and `imagesrcset` to know props | ca106a02d1648f4f0048b07c6b88f69aac175d3c | <ide><path>fixtures/attribute-behavior/AttributeTableSnapshot.md
<ide> | `imageRendering=(null)`| (initial)| `<null>` |
<ide> | `imageRendering=(undefined)`| (initial)| `<null>` |
<ide>
<add>## `imageSizes` (on `<link>` inside `<div>`)
<add>| Test Case | Flags | Result |
<add>| --- | --- | --- |
<add>| `imageSizes=(string)`| (changed)| `"a string"` |
<add>| `imageSizes=(empty string)`| (initial)| `<empty string>` |
<add>| `imageSizes=(array with string)`| (changed)| `"string"` |
<add>| `imageSizes=(empty array)`| (initial)| `<empty string>` |
<add>| `imageSizes=(object)`| (changed)| `"result of toString()"` |
<add>| `imageSizes=(numeric string)`| (changed)| `"42"` |
<add>| `imageSizes=(-1)`| (changed)| `"-1"` |
<add>| `imageSizes=(0)`| (changed)| `"0"` |
<add>| `imageSizes=(integer)`| (changed)| `"1"` |
<add>| `imageSizes=(NaN)`| (changed, warning)| `"NaN"` |
<add>| `imageSizes=(float)`| (changed)| `"99.99"` |
<add>| `imageSizes=(true)`| (initial, warning)| `<empty string>` |
<add>| `imageSizes=(false)`| (initial, warning)| `<empty string>` |
<add>| `imageSizes=(string 'true')`| (changed)| `"true"` |
<add>| `imageSizes=(string 'false')`| (changed)| `"false"` |
<add>| `imageSizes=(string 'on')`| (changed)| `"on"` |
<add>| `imageSizes=(string 'off')`| (changed)| `"off"` |
<add>| `imageSizes=(symbol)`| (initial, warning)| `<empty string>` |
<add>| `imageSizes=(function)`| (initial, warning)| `<empty string>` |
<add>| `imageSizes=(null)`| (initial)| `<empty string>` |
<add>| `imageSizes=(undefined)`| (initial)| `<empty string>` |
<add>
<add>## `imageSrcSet` (on `<link>` inside `<div>`)
<add>| Test Case | Flags | Result |
<add>| --- | --- | --- |
<add>| `imageSrcSet=(string)`| (changed)| `"a string"` |
<add>| `imageSrcSet=(empty string)`| (initial)| `<empty string>` |
<add>| `imageSrcSet=(array with string)`| (changed)| `"string"` |
<add>| `imageSrcSet=(empty array)`| (initial)| `<empty string>` |
<add>| `imageSrcSet=(object)`| (changed)| `"result of toString()"` |
<add>| `imageSrcSet=(numeric string)`| (changed)| `"42"` |
<add>| `imageSrcSet=(-1)`| (changed)| `"-1"` |
<add>| `imageSrcSet=(0)`| (changed)| `"0"` |
<add>| `imageSrcSet=(integer)`| (changed)| `"1"` |
<add>| `imageSrcSet=(NaN)`| (changed, warning)| `"NaN"` |
<add>| `imageSrcSet=(float)`| (changed)| `"99.99"` |
<add>| `imageSrcSet=(true)`| (initial, warning)| `<empty string>` |
<add>| `imageSrcSet=(false)`| (initial, warning)| `<empty string>` |
<add>| `imageSrcSet=(string 'true')`| (changed)| `"true"` |
<add>| `imageSrcSet=(string 'false')`| (changed)| `"false"` |
<add>| `imageSrcSet=(string 'on')`| (changed)| `"on"` |
<add>| `imageSrcSet=(string 'off')`| (changed)| `"off"` |
<add>| `imageSrcSet=(symbol)`| (initial, warning)| `<empty string>` |
<add>| `imageSrcSet=(function)`| (initial, warning)| `<empty string>` |
<add>| `imageSrcSet=(null)`| (initial)| `<empty string>` |
<add>| `imageSrcSet=(undefined)`| (initial)| `<empty string>` |
<add>
<ide> ## `in` (on `<feBlend>` inside `<svg>`)
<ide> | Test Case | Flags | Result |
<ide> | --- | --- | --- |
<ide><path>fixtures/attribute-behavior/src/attributes.js
<ide> const attributes = [
<ide> tagName: 'svg',
<ide> read: getSVGAttribute('image-rendering'),
<ide> },
<add> {name: 'imageSizes', tagName: 'link', read: getProperty('imageSizes')},
<add> {name: 'imageSrcSet', tagName: 'link', read: getProperty('imageSrcset')},
<ide> {
<ide> name: 'in',
<ide> read: getSVGAttribute('in'),
<ide><path>packages/react-dom/src/shared/possibleStandardNames.js
<ide> const possibleStandardNames = {
<ide> 'http-equiv': 'httpEquiv',
<ide> icon: 'icon',
<ide> id: 'id',
<add> imagesizes: 'imageSizes',
<add> imagesrcset: 'imageSrcSet',
<ide> innerhtml: 'innerHTML',
<ide> inputmode: 'inputMode',
<ide> integrity: 'integrity', | 3 |
Python | Python | use absolute import style | 3032a06c9bd8cc98387b14f7feceb1f5d76041fd | <ide><path>rest_framework/authtoken/admin.py
<ide> from django.contrib import admin
<del>from .models import Token
<add>from rest_framework.authtoken.models import Token
<ide>
<ide>
<ide> class TokenAdmin(admin.ModelAdmin): | 1 |
Javascript | Javascript | use regular require paths | bdebe575b7d5faeafcc2b99f598904328bfa2fbd | <ide><path>static/index.js
<ide> window.onload = function() {
<del> var path = require('path');
<ide> var ipc = require('ipc');
<ide> try {
<ide> // Skip "?loadSettings=".
<ide> window.onload = function() {
<ide>
<ide> require('vm-compatibility-layer');
<ide> require('coffee-script').register();
<del> require(path.resolve(__dirname, '..', 'src', 'coffee-cache')).register();
<add> require('../src/coffee-cache')).register();
<ide>
<del> ModuleCache = require(path.resolve(__dirname, '..', 'src', 'module-cache'));
<add> ModuleCache = require('../src/module-cache');
<ide> ModuleCache.add(loadSettings.resourcePath);
<ide> ModuleCache.register();
<ide> | 1 |
Javascript | Javascript | harmonize progress bar + stderr output | 4b841cb0b6d765813a02f9b348f9ff04854ee969 | <ide><path>benchmark/_benchmark_progress.js
<ide> class BenchmarkProgress {
<ide> `| ${fraction(completedFiles, scheduledFiles)} files ` +
<ide> `| ${fraction(completedRunsForFile, runsPerFile)} runs ` +
<ide> `| ${fraction(completedConfig, scheduledConfig)} configs]` +
<del> `: ${caption}`;
<add> `: ${caption} `;
<ide> }
<ide>
<ide> updateProgress(finished) { | 1 |
Javascript | Javascript | fix datepickerios e2e tests | 8270de9c2cfad721b554201ab97d645ffbfdc84b | <ide><path>RNTester/e2e/__tests__/DatePickerIOS-test.js
<ide> describe('DatePickerIOS', () => {
<ide> it('Should change indicator with datetime picker', async () => {
<ide> await openExampleWithTitle('Date and time picker');
<ide> const testID = 'date-and-time';
<del> const indicatorID = 'date-and-time-indicator';
<ide>
<ide> const testElement = await element(
<ide> by.type('UIPickerView').withAncestor(by.id(testID)),
<ide> );
<del> const indicator = await element(by.id(indicatorID));
<add> const dateIndicator = await element(by.id('date-indicator'));
<add> const timeIndicator = await element(by.id('time-indicator'));
<ide>
<ide> await expect(testElement).toBeVisible();
<del> await expect(indicator).toBeVisible();
<add> await expect(dateIndicator).toBeVisible();
<add> await expect(timeIndicator).toBeVisible();
<ide>
<ide> await testElement.setColumnToValue(0, 'Dec 4');
<ide> await testElement.setColumnToValue(1, '4');
<ide> await testElement.setColumnToValue(2, '10');
<ide> await testElement.setColumnToValue(3, 'AM');
<ide>
<del> await expect(indicator).toHaveText('12/4/2005 4:10 AM');
<add> await expect(dateIndicator).toHaveText('12/4/2005');
<add> await expect(timeIndicator).toHaveText('4:10 AM');
<ide> });
<ide>
<ide> it('Should change indicator with date-only picker', async () => {
<del> await openExampleWithTitle('Date only');
<add> await openExampleWithTitle('Date only picker');
<ide> const testID = 'date-only';
<del> const indicatorID = 'date-and-time-indicator';
<ide>
<ide> const testElement = await element(
<ide> by.type('UIPickerView').withAncestor(by.id(testID)),
<ide> );
<del> const indicator = await element(by.id(indicatorID));
<add> const indicator = await element(by.id('date-indicator'));
<ide>
<ide> await expect(testElement).toBeVisible();
<ide> await expect(indicator).toBeVisible();
<ide> describe('DatePickerIOS', () => {
<ide> await testElement.setColumnToValue(1, '3');
<ide> await testElement.setColumnToValue(2, '2006');
<ide>
<del> await expect(indicator).toHaveText('11/3/2006 4:10 AM');
<add> await expect(indicator).toHaveText('11/3/2006');
<ide> });
<ide> });
<ide><path>RNTester/js/DatePickerIOSExample.js
<ide>
<ide> const React = require('react');
<ide> const ReactNative = require('react-native');
<del>const {DatePickerIOS, StyleSheet, Text, TextInput, View} = ReactNative;
<add>const {DatePickerIOS, StyleSheet, Text, View} = ReactNative;
<ide>
<ide> type State = {|
<ide> date: Date,
<del> timeZoneOffsetInHours: number,
<ide> |};
<ide>
<ide> type Props = {|
<ide> type Props = {|
<ide> class WithDatePickerData extends React.Component<Props, State> {
<ide> state = {
<ide> date: new Date(),
<del> timeZoneOffsetInHours: (-1 * new Date().getTimezoneOffset()) / 60,
<ide> };
<ide>
<ide> onDateChange = date => {
<ide> this.setState({date: date});
<ide> };
<ide>
<del> onTimezoneChange = event => {
<del> const offset = parseInt(event.nativeEvent.text, 10);
<del> if (isNaN(offset)) {
<del> return;
<del> }
<del> this.setState({timeZoneOffsetInHours: offset});
<del> };
<del>
<ide> render() {
<del> // Ideally, the timezone input would be a picker rather than a
<del> // text input, but we don't have any pickers yet :(
<ide> return (
<ide> <View>
<ide> <WithLabel label="Value:">
<del> <Text testID="date-and-time-indicator">
<del> {this.state.date.toLocaleDateString() +
<del> ' ' +
<del> this.state.date.toLocaleTimeString([], {
<del> hour: '2-digit',
<del> minute: '2-digit',
<del> })}
<add> <Text testID="date-indicator">
<add> {this.state.date.toLocaleDateString()}
<add> </Text>
<add> <Text> </Text>
<add> <Text testID="time-indicator">
<add> {this.state.date.toLocaleTimeString([], {
<add> hour: '2-digit',
<add> minute: '2-digit',
<add> })}
<ide> </Text>
<del> </WithLabel>
<del> <WithLabel label="Timezone:">
<del> <TextInput
<del> onChange={this.onTimezoneChange}
<del> style={styles.textinput}
<del> value={this.state.timeZoneOffsetInHours.toString()}
<del> />
<del> <Text> hours from UTC</Text>
<ide> </WithLabel>
<ide> {this.props.children(this.state, this.onDateChange)}
<ide> </View>
<ide> exports.examples = [
<ide> testID="date-and-time"
<ide> date={state.date}
<ide> mode="datetime"
<del> timeZoneOffsetInMinutes={state.timeZoneOffsetInHours * 60}
<ide> onDateChange={onDateChange}
<ide> />
<ide> )}
<ide> exports.examples = [
<ide> testID="date-only"
<ide> date={state.date}
<ide> mode="date"
<del> timeZoneOffsetInMinutes={state.timeZoneOffsetInHours * 60}
<ide> onDateChange={onDateChange}
<ide> />
<ide> )}
<ide> exports.examples = [
<ide> },
<ide> },
<ide> {
<del> title: 'Time only picker, 10-minute interval',
<add> title: 'Picker with 20-minute interval',
<ide> render: function(): React.Element<any> {
<ide> return (
<ide> <WithDatePickerData>
<ide> {(state, onDateChange) => (
<ide> <DatePickerIOS
<del> testID="time-only"
<add> testID="date-and-time-with-interval"
<ide> date={state.date}
<del> mode="time"
<del> timeZoneOffsetInMinutes={state.timeZoneOffsetInHours * 60}
<add> minuteInterval={20}
<add> mode="datetime"
<ide> onDateChange={onDateChange}
<ide> />
<ide> )} | 2 |
Javascript | Javascript | implement identifierprefix option for useid | 4729ff6d1f191902897927ff4ecd3d1f390177fa | <ide><path>packages/react-art/src/ReactART.js
<ide> class Surface extends React.Component {
<ide>
<ide> this._surface = Mode.Surface(+width, +height, this._tagRef);
<ide>
<del> this._mountNode = createContainer(this._surface, LegacyRoot, false, null);
<add> this._mountNode = createContainer(
<add> this._surface,
<add> LegacyRoot,
<add> false,
<add> null,
<add> false,
<add> false,
<add> '',
<add> );
<ide> updateContainer(this.props.children, this._mountNode, this);
<ide> }
<ide>
<ide><path>packages/react-dom/src/__tests__/ReactDOMUseId-test.js
<ide> describe('useId', () => {
<ide>
<ide> function normalizeTreeIdForTesting(id) {
<ide> const [serverClientPrefix, base32, hookIndex] = id.split(':');
<del> if (serverClientPrefix === 'r') {
<add> if (serverClientPrefix.endsWith('r')) {
<ide> // Client ids aren't stable. For testing purposes, strip out the counter.
<ide> return (
<ide> 'CLIENT_GENERATED_ID' +
<ide> describe('useId', () => {
<ide> // Should have hydrated successfully
<ide> expect(span.current).toBe(dehydratedSpan);
<ide> });
<add>
<add> test('identifierPrefix option', async () => {
<add> function Child() {
<add> const id = useId();
<add> return <div>{id}</div>;
<add> }
<add>
<add> function App({showMore}) {
<add> return (
<add> <>
<add> <Child />
<add> <Child />
<add> {showMore && <Child />}
<add> </>
<add> );
<add> }
<add>
<add> await serverAct(async () => {
<add> const {pipe} = ReactDOMFizzServer.renderToPipeableStream(<App />, {
<add> identifierPrefix: 'custom-prefix-',
<add> });
<add> pipe(writable);
<add> });
<add> let root;
<add> await clientAct(async () => {
<add> root = ReactDOM.hydrateRoot(container, <App />, {
<add> identifierPrefix: 'custom-prefix-',
<add> });
<add> });
<add> expect(container).toMatchInlineSnapshot(`
<add> <div
<add> id="container"
<add> >
<add> <div>
<add> custom-prefix-R:1
<add> </div>
<add> <div>
<add> custom-prefix-R:2
<add> </div>
<add> </div>
<add> `);
<add>
<add> // Mount a new, client-only id
<add> await clientAct(async () => {
<add> root.render(<App showMore={true} />);
<add> });
<add> expect(container).toMatchInlineSnapshot(`
<add> <div
<add> id="container"
<add> >
<add> <div>
<add> custom-prefix-R:1
<add> </div>
<add> <div>
<add> custom-prefix-R:2
<add> </div>
<add> <div>
<add> custom-prefix-r:0
<add> </div>
<add> </div>
<add> `);
<add> });
<ide> });
<ide><path>packages/react-dom/src/client/ReactDOMLegacy.js
<ide> function legacyCreateRootFromDOMContainer(
<ide> null, // hydrationCallbacks
<ide> false, // isStrictMode
<ide> false, // concurrentUpdatesByDefaultOverride,
<add> '', // identiferPrefix
<ide> );
<ide> markContainerAsRoot(root.current, container);
<ide>
<ide><path>packages/react-dom/src/client/ReactDOMRoot.js
<ide> export type CreateRootOptions = {
<ide> // END OF TODO
<ide> unstable_strictMode?: boolean,
<ide> unstable_concurrentUpdatesByDefault?: boolean,
<add> identifierPrefix?: string,
<ide> ...
<ide> };
<ide>
<ide> export type HydrateRootOptions = {
<ide> // Options for all roots
<ide> unstable_strictMode?: boolean,
<ide> unstable_concurrentUpdatesByDefault?: boolean,
<add> identifierPrefix?: string,
<ide> ...
<ide> };
<ide>
<ide> export function createRoot(
<ide> null;
<ide> // END TODO
<ide>
<del> const isStrictMode = options != null && options.unstable_strictMode === true;
<del> let concurrentUpdatesByDefaultOverride = null;
<del> if (allowConcurrentByDefault) {
<del> concurrentUpdatesByDefaultOverride =
<del> options != null && options.unstable_concurrentUpdatesByDefault != null
<del> ? options.unstable_concurrentUpdatesByDefault
<del> : null;
<add> let isStrictMode = false;
<add> let concurrentUpdatesByDefaultOverride = false;
<add> let identifierPrefix = '';
<add> if (options !== null && options !== undefined) {
<add> if (options.unstable_strictMode === true) {
<add> isStrictMode = true;
<add> }
<add> if (
<add> allowConcurrentByDefault &&
<add> options.unstable_concurrentUpdatesByDefault === true
<add> ) {
<add> concurrentUpdatesByDefaultOverride = true;
<add> }
<add> if (options.identifierPrefix !== undefined) {
<add> identifierPrefix = options.identifierPrefix;
<add> }
<ide> }
<ide>
<ide> const root = createContainer(
<ide> export function createRoot(
<ide> hydrationCallbacks,
<ide> isStrictMode,
<ide> concurrentUpdatesByDefaultOverride,
<add> identifierPrefix,
<ide> );
<ide> markContainerAsRoot(root.current, container);
<ide>
<ide> export function hydrateRoot(
<ide> // For now we reuse the whole bag of options since they contain
<ide> // the hydration callbacks.
<ide> const hydrationCallbacks = options != null ? options : null;
<add> // TODO: Delete this option
<ide> const mutableSources = (options != null && options.hydratedSources) || null;
<del> const isStrictMode = options != null && options.unstable_strictMode === true;
<del>
<del> let concurrentUpdatesByDefaultOverride = null;
<del> if (allowConcurrentByDefault) {
<del> concurrentUpdatesByDefaultOverride =
<del> options != null && options.unstable_concurrentUpdatesByDefault != null
<del> ? options.unstable_concurrentUpdatesByDefault
<del> : null;
<add>
<add> let isStrictMode = false;
<add> let concurrentUpdatesByDefaultOverride = false;
<add> let identifierPrefix = '';
<add> if (options !== null && options !== undefined) {
<add> if (options.unstable_strictMode === true) {
<add> isStrictMode = true;
<add> }
<add> if (
<add> allowConcurrentByDefault &&
<add> options.unstable_concurrentUpdatesByDefault === true
<add> ) {
<add> concurrentUpdatesByDefaultOverride = true;
<add> }
<add> if (options.identifierPrefix !== undefined) {
<add> identifierPrefix = options.identifierPrefix;
<add> }
<ide> }
<ide>
<ide> const root = createContainer(
<ide> export function hydrateRoot(
<ide> hydrationCallbacks,
<ide> isStrictMode,
<ide> concurrentUpdatesByDefaultOverride,
<add> identifierPrefix,
<ide> );
<ide> markContainerAsRoot(root.current, container);
<ide> // This can't be a comment node since hydration doesn't work on comment nodes anyway.
<ide><path>packages/react-dom/src/server/ReactDOMServerFormatConfig.js
<ide> export type ResponseState = {
<ide> placeholderPrefix: PrecomputedChunk,
<ide> segmentPrefix: PrecomputedChunk,
<ide> boundaryPrefix: string,
<add> idPrefix: string,
<ide> nextSuspenseID: number,
<ide> sentCompleteSegmentFunction: boolean,
<ide> sentCompleteBoundaryFunction: boolean,
<ide> export function createResponseState(
<ide> placeholderPrefix: stringToPrecomputedChunk(idPrefix + 'P:'),
<ide> segmentPrefix: stringToPrecomputedChunk(idPrefix + 'S:'),
<ide> boundaryPrefix: idPrefix + 'B:',
<add> idPrefix: idPrefix + 'R:',
<ide> nextSuspenseID: 0,
<ide> sentCompleteSegmentFunction: false,
<ide> sentCompleteBoundaryFunction: false,
<ide> export function assignSuspenseBoundaryID(
<ide> );
<ide> }
<ide>
<add>export function makeId(
<add> responseState: ResponseState,
<add> treeId: string,
<add> localId: number,
<add>): string {
<add> const idPrefix = responseState.idPrefix;
<add>
<add> let id = idPrefix + treeId;
<add>
<add> // Unless this is the first id at this level, append a number at the end
<add> // that represents the position of this useId hook among all the useId
<add> // hooks for this fiber.
<add> if (localId > 0) {
<add> id += ':' + localId.toString(32);
<add> }
<add>
<add> return id;
<add>}
<add>
<ide> function encodeHTMLTextNode(text: string): string {
<ide> return escapeTextForBrowser(text);
<ide> }
<ide><path>packages/react-dom/src/server/ReactDOMServerLegacyFormatConfig.js
<ide> export type ResponseState = {
<ide> placeholderPrefix: PrecomputedChunk,
<ide> segmentPrefix: PrecomputedChunk,
<ide> boundaryPrefix: string,
<add> idPrefix: string,
<ide> nextSuspenseID: number,
<ide> sentCompleteSegmentFunction: boolean,
<ide> sentCompleteBoundaryFunction: boolean,
<ide> export function createResponseState(
<ide> placeholderPrefix: responseState.placeholderPrefix,
<ide> segmentPrefix: responseState.segmentPrefix,
<ide> boundaryPrefix: responseState.boundaryPrefix,
<add> idPrefix: responseState.idPrefix,
<ide> nextSuspenseID: responseState.nextSuspenseID,
<ide> sentCompleteSegmentFunction: responseState.sentCompleteSegmentFunction,
<ide> sentCompleteBoundaryFunction: responseState.sentCompleteBoundaryFunction,
<ide> export {
<ide> getChildFormatContext,
<ide> UNINITIALIZED_SUSPENSE_BOUNDARY_ID,
<ide> assignSuspenseBoundaryID,
<add> makeId,
<ide> pushStartInstance,
<ide> pushEndInstance,
<ide> pushStartCompletedSuspenseBoundary,
<ide><path>packages/react-native-renderer/src/ReactFabric.js
<ide> function render(
<ide> null,
<ide> false,
<ide> null,
<add> '',
<ide> );
<ide> roots.set(containerTag, root);
<ide> }
<ide><path>packages/react-native-renderer/src/ReactNativeRenderer.js
<ide> function render(
<ide> if (!root) {
<ide> // TODO (bvaughn): If we decide to keep the wrapper component,
<ide> // We could create a wrapper for containerTag as well to reduce special casing.
<del> root = createContainer(containerTag, LegacyRoot, false, null, false, null);
<add> root = createContainer(
<add> containerTag,
<add> LegacyRoot,
<add> false,
<add> null,
<add> false,
<add> null,
<add> '',
<add> );
<ide> roots.set(containerTag, root);
<ide> }
<ide> updateContainer(element, root, null, callback);
<ide><path>packages/react-native-renderer/src/server/ReactNativeServerFormatConfig.js
<ide> export function assignSuspenseBoundaryID(
<ide> return responseState.nextSuspenseID++;
<ide> }
<ide>
<add>export function makeId(
<add> responseState: ResponseState,
<add> treeId: string,
<add> localId: number,
<add>): string {
<add> throw new Error('Not implemented');
<add>}
<add>
<ide> const RAW_TEXT = stringToPrecomputedChunk('RCTRawText');
<ide>
<ide> export function pushTextInstance(
<ide><path>packages/react-noop-renderer/src/createReactNoop.js
<ide> function createReactNoop(reconciler: Function, useMutation: boolean) {
<ide> false,
<ide> null,
<ide> null,
<add> false,
<add> '',
<ide> );
<ide> return {
<ide> _Scheduler: Scheduler,
<ide> function createReactNoop(reconciler: Function, useMutation: boolean) {
<ide> false,
<ide> null,
<ide> null,
<add> false,
<add> '',
<ide> );
<ide> return {
<ide> _Scheduler: Scheduler,
<ide><path>packages/react-reconciler/src/ReactFiberHooks.new.js
<ide> export function getIsUpdatingOpaqueValueInRenderPhaseInDEV(): boolean | void {
<ide> function mountId(): string {
<ide> const hook = mountWorkInProgressHook();
<ide>
<add> const root = ((getWorkInProgressRoot(): any): FiberRoot);
<add> // TODO: In Fizz, id generation is specific to each server config. Maybe we
<add> // should do this in Fiber, too? Deferring this decision for now because
<add> // there's no other place to store the prefix except for an internal field on
<add> // the public createRoot object, which the fiber tree does not currently have
<add> // a reference to.
<add> const identifierPrefix = root.identifierPrefix;
<add>
<ide> let id;
<ide> if (getIsHydrating()) {
<ide> const treeId = getTreeId();
<ide>
<ide> // Use a captial R prefix for server-generated ids.
<del> id = 'R:' + treeId;
<add> id = identifierPrefix + 'R:' + treeId;
<ide>
<ide> // Unless this is the first id at this level, append a number at the end
<ide> // that represents the position of this useId hook among all the useId
<ide> function mountId(): string {
<ide> } else {
<ide> // Use a lowercase r prefix for client-generated ids.
<ide> const globalClientId = globalClientIdCounter++;
<del> id = 'r:' + globalClientId.toString(32);
<add> id = identifierPrefix + 'r:' + globalClientId.toString(32);
<ide> }
<ide>
<ide> hook.memoizedState = id;
<ide><path>packages/react-reconciler/src/ReactFiberHooks.old.js
<ide> export function getIsUpdatingOpaqueValueInRenderPhaseInDEV(): boolean | void {
<ide> function mountId(): string {
<ide> const hook = mountWorkInProgressHook();
<ide>
<add> const root = ((getWorkInProgressRoot(): any): FiberRoot);
<add> // TODO: In Fizz, id generation is specific to each server config. Maybe we
<add> // should do this in Fiber, too? Deferring this decision for now because
<add> // there's no other place to store the prefix except for an internal field on
<add> // the public createRoot object, which the fiber tree does not currently have
<add> // a reference to.
<add> const identifierPrefix = root.identifierPrefix;
<add>
<ide> let id;
<ide> if (getIsHydrating()) {
<ide> const treeId = getTreeId();
<ide>
<ide> // Use a captial R prefix for server-generated ids.
<del> id = 'R:' + treeId;
<add> id = identifierPrefix + 'R:' + treeId;
<ide>
<ide> // Unless this is the first id at this level, append a number at the end
<ide> // that represents the position of this useId hook among all the useId
<ide> function mountId(): string {
<ide> } else {
<ide> // Use a lowercase r prefix for client-generated ids.
<ide> const globalClientId = globalClientIdCounter++;
<del> id = 'r:' + globalClientId.toString(32);
<add> id = identifierPrefix + 'r:' + globalClientId.toString(32);
<ide> }
<ide>
<ide> hook.memoizedState = id;
<ide><path>packages/react-reconciler/src/ReactFiberReconciler.new.js
<ide> export function createContainer(
<ide> hydrationCallbacks: null | SuspenseHydrationCallbacks,
<ide> isStrictMode: boolean,
<ide> concurrentUpdatesByDefaultOverride: null | boolean,
<add> identifierPrefix: string,
<ide> ): OpaqueRoot {
<ide> return createFiberRoot(
<ide> containerInfo,
<ide> export function createContainer(
<ide> hydrationCallbacks,
<ide> isStrictMode,
<ide> concurrentUpdatesByDefaultOverride,
<add> identifierPrefix,
<ide> );
<ide> }
<ide>
<ide><path>packages/react-reconciler/src/ReactFiberReconciler.old.js
<ide> export function createContainer(
<ide> hydrationCallbacks: null | SuspenseHydrationCallbacks,
<ide> isStrictMode: boolean,
<ide> concurrentUpdatesByDefaultOverride: null | boolean,
<add> identifierPrefix: string,
<ide> ): OpaqueRoot {
<ide> return createFiberRoot(
<ide> containerInfo,
<ide> export function createContainer(
<ide> hydrationCallbacks,
<ide> isStrictMode,
<ide> concurrentUpdatesByDefaultOverride,
<add> identifierPrefix,
<ide> );
<ide> }
<ide>
<ide><path>packages/react-reconciler/src/ReactFiberRoot.new.js
<ide> import {initializeUpdateQueue} from './ReactUpdateQueue.new';
<ide> import {LegacyRoot, ConcurrentRoot} from './ReactRootTags';
<ide> import {createCache, retainCache} from './ReactFiberCacheComponent.new';
<ide>
<del>function FiberRootNode(containerInfo, tag, hydrate) {
<add>function FiberRootNode(containerInfo, tag, hydrate, identifierPrefix) {
<ide> this.tag = tag;
<ide> this.containerInfo = containerInfo;
<ide> this.pendingChildren = null;
<ide> function FiberRootNode(containerInfo, tag, hydrate) {
<ide> this.entangledLanes = NoLanes;
<ide> this.entanglements = createLaneMap(NoLanes);
<ide>
<add> this.identifierPrefix = identifierPrefix;
<add>
<ide> if (enableCache) {
<ide> this.pooledCache = null;
<ide> this.pooledCacheLanes = NoLanes;
<ide> export function createFiberRoot(
<ide> hydrationCallbacks: null | SuspenseHydrationCallbacks,
<ide> isStrictMode: boolean,
<ide> concurrentUpdatesByDefaultOverride: null | boolean,
<add> identifierPrefix: string,
<ide> ): FiberRoot {
<del> const root: FiberRoot = (new FiberRootNode(containerInfo, tag, hydrate): any);
<add> const root: FiberRoot = (new FiberRootNode(
<add> containerInfo,
<add> tag,
<add> hydrate,
<add> identifierPrefix,
<add> ): any);
<ide> if (enableSuspenseCallback) {
<ide> root.hydrationCallbacks = hydrationCallbacks;
<ide> }
<ide><path>packages/react-reconciler/src/ReactFiberRoot.old.js
<ide> import {initializeUpdateQueue} from './ReactUpdateQueue.old';
<ide> import {LegacyRoot, ConcurrentRoot} from './ReactRootTags';
<ide> import {createCache, retainCache} from './ReactFiberCacheComponent.old';
<ide>
<del>function FiberRootNode(containerInfo, tag, hydrate) {
<add>function FiberRootNode(containerInfo, tag, hydrate, identifierPrefix) {
<ide> this.tag = tag;
<ide> this.containerInfo = containerInfo;
<ide> this.pendingChildren = null;
<ide> function FiberRootNode(containerInfo, tag, hydrate) {
<ide> this.entangledLanes = NoLanes;
<ide> this.entanglements = createLaneMap(NoLanes);
<ide>
<add> this.identifierPrefix = identifierPrefix;
<add>
<ide> if (enableCache) {
<ide> this.pooledCache = null;
<ide> this.pooledCacheLanes = NoLanes;
<ide> export function createFiberRoot(
<ide> hydrationCallbacks: null | SuspenseHydrationCallbacks,
<ide> isStrictMode: boolean,
<ide> concurrentUpdatesByDefaultOverride: null | boolean,
<add> identifierPrefix: string,
<ide> ): FiberRoot {
<del> const root: FiberRoot = (new FiberRootNode(containerInfo, tag, hydrate): any);
<add> const root: FiberRoot = (new FiberRootNode(
<add> containerInfo,
<add> tag,
<add> hydrate,
<add> identifierPrefix,
<add> ): any);
<ide> if (enableSuspenseCallback) {
<ide> root.hydrationCallbacks = hydrationCallbacks;
<ide> }
<ide><path>packages/react-reconciler/src/ReactInternalTypes.js
<ide> type BaseFiberRootProperties = {|
<ide>
<ide> pooledCache: Cache | null,
<ide> pooledCacheLanes: Lanes,
<add>
<add> // TODO: In Fizz, id generation is specific to each server config. Maybe we
<add> // should do this in Fiber, too? Deferring this decision for now because
<add> // there's no other place to store the prefix except for an internal field on
<add> // the public createRoot object, which the fiber tree does not currently have
<add> // a reference to.
<add> identifierPrefix: string,
<ide> |};
<ide>
<ide> // The following attributes are only used by DevTools and are only present in DEV builds.
<ide><path>packages/react-reconciler/src/__tests__/ReactFiberHostContext-test.internal.js
<ide> describe('ReactFiberHostContext', () => {
<ide> ConcurrentRoot,
<ide> false,
<ide> null,
<add> false,
<add> '',
<ide> );
<ide> act(() => {
<ide> Renderer.updateContainer(
<ide> describe('ReactFiberHostContext', () => {
<ide> ConcurrentRoot,
<ide> false,
<ide> null,
<add> false,
<add> '',
<ide> );
<ide> act(() => {
<ide> Renderer.updateContainer(
<ide><path>packages/react-server/src/ReactFizzHooks.js
<ide> import type {Task} from './ReactFizzServer';
<ide> import {readContext as readContextImpl} from './ReactFizzNewContext';
<ide> import {getTreeId} from './ReactFizzTreeContext';
<ide>
<add>import {makeId} from './ReactServerFormatConfig';
<add>
<ide> import {enableCache} from 'shared/ReactFeatureFlags';
<ide> import is from 'shared/objectIs';
<ide>
<ide> function useId(): string {
<ide> const task: Task = (currentlyRenderingTask: any);
<ide> const treeId = getTreeId(task.treeContext);
<ide>
<del> // Use a captial R prefix for server-generated ids.
<del> let id = 'R:' + treeId;
<del>
<del> // Unless this is the first id at this level, append a number at the end
<del> // that represents the position of this useId hook among all the useId
<del> // hooks for this fiber.
<del> const localId = localIdCounter++;
<del> if (localId > 0) {
<del> id += ':' + localId.toString(32);
<add> const responseState = currentResponseState;
<add> if (responseState === null) {
<add> throw new Error(
<add> 'Invalid hook call. Hooks can only be called inside of the body of a function component.',
<add> );
<ide> }
<ide>
<del> return id;
<add> const localId = localIdCounter++;
<add> return makeId(responseState, treeId, localId);
<ide> }
<ide>
<ide> function unsupportedRefresh() {
<ide><path>packages/react-server/src/forks/ReactServerFormatConfig.custom.js
<ide> export const getChildFormatContext = $$$hostConfig.getChildFormatContext;
<ide> export const UNINITIALIZED_SUSPENSE_BOUNDARY_ID =
<ide> $$$hostConfig.UNINITIALIZED_SUSPENSE_BOUNDARY_ID;
<ide> export const assignSuspenseBoundaryID = $$$hostConfig.assignSuspenseBoundaryID;
<add>export const makeId = $$$hostConfig.makeId;
<ide> export const pushTextInstance = $$$hostConfig.pushTextInstance;
<ide> export const pushStartInstance = $$$hostConfig.pushStartInstance;
<ide> export const pushEndInstance = $$$hostConfig.pushEndInstance;
<ide><path>packages/react-test-renderer/src/ReactTestRenderer.js
<ide> function create(element: React$Element<any>, options: TestRendererOptions) {
<ide> null,
<ide> isStrictMode,
<ide> concurrentUpdatesByDefault,
<add> '',
<ide> );
<ide>
<ide> if (root == null) { | 21 |
Javascript | Javascript | use worker scope in wpt | 2742f3869a540b01ed51ca91075af9215c13bd54 | <ide><path>test/common/wpt.js
<ide> class WPTRunner {
<ide> sandbox.self = sandbox;
<ide> // TODO(joyeecheung): we are not a window - work with the upstream to
<ide> // add a new scope for us.
<del> sandbox.document = {}; // Pretend we are Window
<add>
<add> const { Worker } = require('worker_threads');
<add> sandbox.DedicatedWorker = Worker; // Pretend we are a Worker
<ide> return context;
<ide> }
<ide>
<ide><path>test/wpt/test-whatwg-console.js
<ide> 'use strict';
<ide>
<del>// Flags: --expose-internals
<add>// Flags: --expose-internals --experimental-worker
<ide>
<ide> require('../common');
<ide> const { WPTRunner } = require('../common/wpt');
<ide><path>test/wpt/test-whatwg-url.js
<ide> 'use strict';
<ide>
<del>// Flags: --expose-internals
<add>// Flags: --expose-internals --experimental-worker
<ide>
<ide> require('../common');
<ide> const { WPTRunner } = require('../common/wpt'); | 3 |
PHP | PHP | fix typeerror on invalid base64 data | e9c9bbcc55e052ebbdf7408f096a5983c6b4f552 | <ide><path>src/Http/Middleware/CsrfProtectionMiddleware.php
<ide> protected function _verifyToken(string $token): bool
<ide> } else {
<ide> $decoded = base64_decode($token, true);
<ide> }
<del> if (strlen($decoded) <= static::TOKEN_VALUE_LENGTH) {
<add> if (!$decoded || strlen($decoded) <= static::TOKEN_VALUE_LENGTH) {
<ide> return false;
<ide> }
<ide>
<ide><path>tests/TestCase/Http/Middleware/CsrfProtectionMiddlewareTest.php
<ide> public function testInvalidTokenStringCookies()
<ide> $middleware->process($request, $this->_getRequestHandler());
<ide> }
<ide>
<add> /**
<add> * Test that empty value cookies are rejected
<add> *
<add> * @return void
<add> */
<add> public function testInvalidTokenEmptyStringCookies()
<add> {
<add> $this->expectException(InvalidCsrfTokenException::class);
<add> $request = new ServerRequest([
<add> 'environment' => [
<add> 'REQUEST_METHOD' => 'POST',
<add> ],
<add> 'post' => ['_csrfToken' => '*(&'],
<add> // Invalid data that can't be base64 decoded.
<add> 'cookies' => ['csrfToken' => '*(&'],
<add> ]);
<add> $middleware = new CsrfProtectionMiddleware();
<add> $middleware->process($request, $this->_getRequestHandler());
<add> }
<add>
<ide> /**
<ide> * Test that request non string cookies are ignored.
<ide> */ | 2 |
Javascript | Javascript | fix path for schedulerfeatureflags | 13a62feab8c39bc0292eb36d636af0bb4f3a78df | <ide><path>packages/scheduler/src/forks/SchedulerFeatureFlags.www.js
<ide> export const {
<ide> enableIsInputPending,
<ide> enableSchedulerDebugging,
<ide> enableProfiling: enableProfilingFeatureFlag,
<del>} = require('packages/scheduler/src/SchedulerFeatureFlags');
<add>} = require('SchedulerFeatureFlags');
<ide>
<ide> export const enableProfiling = __PROFILE__ && enableProfilingFeatureFlag; | 1 |
Ruby | Ruby | fix an ar test of schema dump when using oracle | 569cd97c80de95154312a0b33596211e55722a9e | <ide><path>activerecord/test/cases/comment_test.rb
<ide> def test_schema_dump_with_comments
<ide> assert_match %r[t\.string\s+"name",\s+comment: "Comment should help clarify the column purpose"], output
<ide> assert_match %r[t\.string\s+"obvious"\n], output
<ide> assert_match %r[t\.string\s+"content",\s+comment: "Whoa, content describes itself!"], output
<del> assert_match %r[t\.integer\s+"rating",\s+comment: "I am running out of imagination"], output
<add> if current_adapter?(:OracleAdapter)
<add> assert_match %r[t\.integer\s+"rating",\s+precision: 38,\s+comment: "I am running out of imagination"], output
<add> else
<add> assert_match %r[t\.integer\s+"rating",\s+comment: "I am running out of imagination"], output
<add> end
<ide> unless current_adapter?(:OracleAdapter)
<ide> assert_match %r[t\.index\s+.+\s+comment: "\\\"Very important\\\" index that powers all the performance.\\nAnd it's fun!"], output
<ide> assert_match %r[t\.index\s+.+\s+name: "idx_obvious",\s+comment: "We need to see obvious comments"], output | 1 |
Javascript | Javascript | remove global store (#908) | ee717af0889312cc6cc6337da0d405356a5a2a58 | <ide><path>examples/with-redux/store.js
<ide> export const startClock = () => dispatch => {
<ide> return setInterval(() => dispatch({ type: 'TICK', light: true, ts: Date.now() }), 800)
<ide> }
<ide>
<add>let store = null
<add>
<ide> export const initStore = (reducer, initialState, isServer) => {
<ide> if (isServer && typeof window === 'undefined') {
<ide> return createStore(reducer, initialState, applyMiddleware(thunkMiddleware))
<ide> } else {
<del> if (!window.store) {
<del> window.store = createStore(reducer, initialState, applyMiddleware(thunkMiddleware))
<add> if (!store) {
<add> store = createStore(reducer, initialState, applyMiddleware(thunkMiddleware))
<ide> }
<del> return window.store
<add> return store
<ide> }
<ide> } | 1 |
PHP | PHP | add doc block return | 11f410a3439f47efb60f19cf100e719e461fd75b | <ide><path>src/Illuminate/Http/Resources/Json/ResourceCollection.php
<ide> public function __construct($resource)
<ide>
<ide> /**
<ide> * Indicate that all current query parameters should be appended to pagination links.
<add> *
<add> * @return $this
<ide> */
<ide> public function preserveQuery()
<ide> { | 1 |
Javascript | Javascript | clarify value and constant injectability | 67a98112e424f5c6a490bc8719f01057ff110ba4 | <ide><path>src/auto/injector.js
<ide> function annotate(fn, strictDi, name) {
<ide> * @description
<ide> *
<ide> * Register a **value service** with the {@link auto.$injector $injector}, such as a string, a
<del> * number, an array, an object or a function. This is short for registering a service where its
<add> * number, an array, an object or a function. This is short for registering a service where its
<ide> * provider's `$get` property is a factory function that takes no arguments and returns the **value
<del> * service**.
<add> * service**. That also means it is not possible to inject other services into a value service.
<ide> *
<ide> * Value services are similar to constant services, except that they cannot be injected into a
<ide> * module configuration function (see {@link angular.Module#config}) but they can be overridden by
<del> * an Angular
<del> * {@link auto.$provide#decorator decorator}.
<add> * an Angular {@link auto.$provide#decorator decorator}.
<ide> *
<ide> * @param {string} name The name of the instance.
<ide> * @param {*} value The value.
<ide> function annotate(fn, strictDi, name) {
<ide> * @name $provide#constant
<ide> * @description
<ide> *
<del> * Register a **constant service**, such as a string, a number, an array, an object or a function,
<del> * with the {@link auto.$injector $injector}. Unlike {@link auto.$provide#value value} it can be
<add> * Register a **constant service** with the {@link auto.$injector $injector}, such as a string,
<add> * a number, an array, an object or a function. Like the {@link auto.$provide#value value}, it is not
<add> * possible to inject other services into a constant.
<add> *
<add> * But unlike {@link auto.$provide#value value}, a constant can be
<ide> * injected into a module configuration function (see {@link angular.Module#config}) and it cannot
<ide> * be overridden by an Angular {@link auto.$provide#decorator decorator}.
<ide> * | 1 |
PHP | PHP | fix missing default values for datetime/timestamp | f5a4c3b84958b9057ac6d6acf5b2a0a385e0be51 | <ide><path>src/Database/Schema/MysqlSchema.php
<ide> public function columnSql(Table $table, $name)
<ide> $out .= ' NULL';
<ide> unset($data['default']);
<ide> }
<del> if (isset($data['default']) && !in_array($data['type'], ['timestamp', 'datetime'])) {
<del> $out .= ' DEFAULT ' . $this->_driver->schemaValue($data['default']);
<del> unset($data['default']);
<del> }
<ide> if (isset($data['default']) &&
<ide> in_array($data['type'], ['timestamp', 'datetime']) &&
<ide> strtolower($data['default']) === 'current_timestamp'
<ide> ) {
<ide> $out .= ' DEFAULT CURRENT_TIMESTAMP';
<ide> unset($data['default']);
<ide> }
<add> if (isset($data['default'])) {
<add> $out .= ' DEFAULT ' . $this->_driver->schemaValue($data['default']);
<add> unset($data['default']);
<add> }
<ide> if (isset($data['comment']) && $data['comment'] !== '') {
<ide> $out .= ' COMMENT ' . $this->_driver->schemaValue($data['comment']);
<ide> }
<ide><path>tests/TestCase/Database/Schema/MysqlSchemaTest.php
<ide> public static function convertColumnProvider()
<ide> 'TIME',
<ide> ['type' => 'time', 'length' => null]
<ide> ],
<add> [
<add> 'TIMESTAMP',
<add> ['type' => 'timestamp', 'length' => null]
<add> ],
<ide> [
<ide> 'TINYINT(1)',
<ide> ['type' => 'boolean', 'length' => null]
<ide> public static function columnSqlProvider()
<ide> ['type' => 'datetime', 'comment' => 'Created timestamp'],
<ide> '`created` DATETIME COMMENT \'Created timestamp\''
<ide> ],
<add> [
<add> 'created',
<add> ['type' => 'datetime', 'null' => false, 'default' => 'current_timestamp'],
<add> '`created` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP'
<add> ],
<add> [
<add> 'open_date',
<add> ['type' => 'datetime', 'null' => false, 'default' => '2016-12-07 23:04:00'],
<add> '`open_date` DATETIME NOT NULL DEFAULT \'2016-12-07 23:04:00\''
<add> ],
<ide> // Date & Time
<ide> [
<ide> 'start_date',
<ide> public static function columnSqlProvider()
<ide> '`created` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP'
<ide> ],
<ide> [
<del> 'created',
<del> ['type' => 'datetime', 'null' => false, 'default' => 'current_timestamp'],
<del> '`created` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP'
<add> 'open_date',
<add> ['type' => 'timestamp', 'null' => false, 'default' => '2016-12-07 23:04:00'],
<add> '`open_date` TIMESTAMP NOT NULL DEFAULT \'2016-12-07 23:04:00\''
<ide> ],
<ide> ];
<ide> } | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.