content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Javascript | Javascript | remove 'warning' module from the js scheduler | 0154a79fedef38a824a837c535bc853013dd4588 | <ide><path>packages/react-scheduler/src/ReactScheduler.js
<ide> type CallbackConfigType = {|
<ide> export type CallbackIdType = CallbackConfigType;
<ide>
<ide> import {canUseDOM} from 'shared/ExecutionEnvironment';
<del>import warningWithoutStack from 'shared/warningWithoutStack';
<ide>
<ide> // We capture a local reference to any global, in case it gets polyfilled after
<ide> // this module is initially evaluated.
<ide> if (!canUseDOM) {
<ide> localClearTimeout(timeoutId);
<ide> };
<ide> } else {
<del> if (typeof localRequestAnimationFrame !== 'function') {
<del> warningWithoutStack(
<del> false,
<del> "This browser doesn't support requestAnimationFrame. " +
<del> 'Make sure that you load a ' +
<del> 'polyfill in older browsers. https://fb.me/react-polyfills',
<del> );
<del> }
<del> if (typeof localCancelAnimationFrame !== 'function') {
<del> warningWithoutStack(
<del> false,
<del> "This browser doesn't support cancelAnimationFrame. " +
<del> 'Make sure that you load a ' +
<del> 'polyfill in older browsers. https://fb.me/react-polyfills',
<del> );
<add> if (__DEV__) {
<add> if (typeof console !== 'undefined') {
<add> if (typeof localRequestAnimationFrame !== 'function') {
<add> console.error(
<add> "This browser doesn't support requestAnimationFrame. " +
<add> 'Make sure that you load a ' +
<add> 'polyfill in older browsers. https://fb.me/react-polyfills',
<add> );
<add> }
<add> if (typeof localCancelAnimationFrame !== 'function') {
<add> console.error(
<add> "This browser doesn't support cancelAnimationFrame. " +
<add> 'Make sure that you load a ' +
<add> 'polyfill in older browsers. https://fb.me/react-polyfills',
<add> );
<add> }
<add> }
<ide> }
<ide>
<ide> let headOfPendingCallbacksLinkedList: CallbackConfigType | null = null; | 1 |
PHP | PHP | set headers if available | a5ceed11eb4ccaa53e3771140583ace7c2a5df08 | <ide><path>lib/Cake/Network/Http/Client.php
<ide> protected function _createRequest($method, $url, $data, $options) {
<ide> $request->method($method)
<ide> ->url($url)
<ide> ->content($data);
<add> if (isset($options['headers'])) {
<add> $request->header($options['headers']);
<add> }
<ide> return $request;
<ide> }
<ide> | 1 |
Python | Python | fix tf doc bug | 9dd054fba29271739c164157811d6cbf2f0d4866 | <ide><path>src/transformers/models/bert/modeling_tf_bert.py
<ide> def call(
<ide> **kwargs,
<ide> ) -> Union[TFBertForPreTrainingOutput, Tuple[tf.Tensor]]:
<ide> r"""
<add> labels (:obj:`torch.LongTensor` of shape ``(batch_size, sequence_length)``, `optional`):
<add> Labels for computing the masked language modeling loss. Indices should be in ``[-100, 0, ...,
<add> config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are ignored
<add> (masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``
<add> next_sentence_label (``torch.LongTensor`` of shape ``(batch_size,)``, `optional`):
<add> Labels for computing the next sequence prediction (classification) loss. Input should be a sequence pair
<add> (see :obj:`input_ids` docstring) Indices should be in ``[0, 1]``:
<add>
<add> - 0 indicates sequence B is a continuation of sequence A,
<add> - 1 indicates sequence B is a random sequence.
<add> kwargs (:obj:`Dict[str, any]`, optional, defaults to `{}`):
<add> Used to hide legacy arguments that have been deprecated.
<add>
<ide> Return:
<ide>
<ide> Examples:: | 1 |
Python | Python | fix dtype in radnomly initialized head | 344e2664d450eaa9167ce31f7d1fc0f0fe3b10af | <ide><path>src/transformers/modeling_utils.py
<ide> def _fix_key(key):
<ide> param = model_state_dict[key]
<ide> if param.device == torch.device("meta"):
<ide> if not load_in_8bit:
<del> set_module_tensor_to_device(model, key, "cpu", torch.empty(*param.size()))
<add> set_module_tensor_to_device(model, key, "cpu", torch.empty(*param.size(), dtype=dtype))
<ide> else:
<del> set_module_8bit_tensor_to_device(model, key, "cpu", torch.empty(*param.size()))
<add> set_module_8bit_tensor_to_device(model, key, "cpu", torch.empty(*param.size(), dtype=dtype))
<ide>
<ide> # retrieve unintialized modules and initialize before maybe overriding that with the pretrained weights.
<ide> if _fast_init: | 1 |
Javascript | Javascript | fix devtool spelling in test | 48e496ce43cf81a6cdbdbc4dcef18baad5399d5d | <ide><path>test/TestCasesMinimizedSourceMap.test.js
<ide> describe("TestCases", () => {
<ide> describeCases({
<ide> name: "minimized-source-map",
<ide> mode: "production",
<del> devtool: "cheap-module-eval-source-map",
<add> devtool: "eval-cheap-module-source-map",
<ide> minimize: true
<ide> });
<ide> }); | 1 |
Text | Text | add 0.70.6 changelog | c8c901513f0e3068c75e1450d918a3fc58f0174b | <ide><path>CHANGELOG.md
<ide> # Changelog
<ide>
<add>## v0.70.6
<add>
<add>### Fixed
<add>
<add>- Fixed regression: @jest/create-cache-key-function dependency was inadvertedly bumped to 29.x. We are bringing it back to 27.0.1. ([fb0e88beb9](https://github.com/facebook/react-native/commit/fb0e88beb9dc443abed3886e459e7a7715738adc) by [@kelset](https://github.com/kelset))
<add>
<add>### Changed
<add>
<add>- Bump version of Promise from 8.0.3 to 8.3.0, enabling `Promise.allSettled` and `Promise.any` ([475310dbba](https://github.com/facebook/react-native/commit/475310dbbaec8048411edefc6cdddab330df7966) by [@retyui](https://github.com/retyui))
<add>- Bump CLI to 9.3.2 ([9bcc5e0373](https://github.com/facebook/react-native/commit/9bcc5e037391b45315dac3cb5f566e290cbf48cb) by [@kelset](https://github.com/kelset))
<add>
<add>#### iOS specific
<add>
<add>- Cleanup codegen build folder before installing the pods ([0e316ec671](https://github.com/facebook/react-native/commit/0e316ec671617f5e7c1985b4b05cd0d45bcea403) by [@cipolleschi](https://github.com/cipolleschi))
<add>
<ide> ## v0.70.5
<ide>
<ide> ### Fixed | 1 |
Text | Text | add note regarding windows volume limitations | 020ec88542e88bb561c30a1ec984d0a17c74b7f9 | <ide><path>docs/reference/builder.md
<ide> This Dockerfile results in an image that causes `docker run`, to
<ide> create a new mount point at `/myvol` and copy the `greeting` file
<ide> into the newly created volume.
<ide>
<add>> **Note**:
<add>> When using Windows-based containers, the destination of a volume inside the
<add>> container must be one of: a non-existing or empty directory; or a drive other
<add>> than C:.
<add>
<ide> > **Note**:
<ide> > If any build steps change the data within the volume after it has been
<ide> > declared, those changes will be discarded. | 1 |
Text | Text | document the remote driver protocol | 65519600f003da75ff49ecd46b23e097fed0c78b | <ide><path>libnetwork/docs/remote.md
<ide> Remote Drivers
<ide> ==============
<ide>
<del>The remote driver package provides the integration point for dynamically-registered drivers.
<add>The `drivers.remote` package provides the integration point for dynamically-registered drivers. Unlike the other driver packages, it does not provide a single implementation of a driver; rather, it provides a proxy for remote driver processes, which are registered and communicate with LibNetwork via the Docker plugin package.
<ide>
<del>## LibNetwork Integration
<add>For the semantics of driver methods, which correspond to the protocol below, please see the [overall design](design.md).
<ide>
<del>When LibNetwork initialises the `Remote` package with the `Init()` function, it passes a `DriverCallback` as a parameter, which implements the `RegisterDriver()`. The Remote Driver package can use this interface to register any of the `Dynamic` Drivers/Plugins with LibNetwork's `NetworkController`.
<add>## LibNetwork integration with the Docker `plugins` package
<ide>
<del>This design ensures that the implementation details (TBD) of Dynamic Driver Registration mechanism is completely owned by the inbuilt-Remote driver, and it doesn't expose any of the driver layer to the North of LibNetwork (none of the LibNetwork client APIs are impacted).
<add>When LibNetwork initialises the `drivers.remote` package with the `Init()` function, it passes a `DriverCallback` as a parameter, which implements `RegisterDriver()`. The remote driver package uses this interface to register remote drivers with LibNetwork's `NetworkController`, by supplying it in a `plugins.Handle` callback.
<add>
<add>The callback is invoked when a driver is loaded with the `plugins.Get` API call. How that comes about is out of scope here (but it might be, for instance, when that driver is mentioned by the user).
<add>
<add>This design ensures that the details of driver registration mechanism are owned by the remote driver package, and it doesn't expose any of the driver layer to the North of LibNetwork.
<ide>
<ide> ## Implementation
<ide>
<del>The actual implementation of how the Inbuilt Remote Driver registers with the Dynamic Driver is Work-In-Progress. But, the Design Goal is to Honor the bigger goals of LibNetwork by keeping it Highly modular and make sure that LibNetwork is fully composable in nature.
<add>The remote driver implementation uses a `plugins.Client` to communicate with the remote driver process. The `driverapi.Driver` methods are implemented as RPCs over the plugin client.
<add>
<add>The payloads of these RPCs are mostly direct translations into JSON of the arguments given to the method. There are some exceptions to account for the use of the interfaces `EndpointInfo` and `JoinInfo`, and data types that do not serialise to JSON well (e.g., `net.IPNet`). The protocol is detailed below under "Protocol".
<ide>
<ide> ## Usage
<ide>
<del>The In-Built Remote Driver follows all the rules of any other In-Built Driver and has exactly the same Driver APIs exposed. LibNetwork will also support driver-specific `options` and User-supplied `Labels` which the Dynamic Drivers can make use for its operations.
<add>A remote driver proxy follows all the rules of any other in-built driver and has exactly the same `Driver` interface exposed. LibNetwork will also support driver-specific `options` and user-supplied `labels` which may influence the behaviour of a remote driver process.
<add>
<add>## Protocol
<add>
<add>The remote driver protocol is a set of RPCs, issued as HTTP POSTs with JSON payloads. The proxy issues requests, and the remote driver process is expected to respond usually with a JSON payload of its own, although in some cases these are empty maps.
<add>
<add>### Errors
<add>
<add>If the remote process cannot decode, or otherwise detects a syntactic problem with the HTTP request or payload, it must respond with an HTTP error status (4xx or 5xx).
<add>
<add>If the remote process can decode the request, but cannot complete the operation, it must send a response in the form
<add>
<add> {
<add> "Err": string
<add> }
<add>
<add>The string value supplied may appear in logs, so should not include confidential information.
<add>
<add>### Handshake
<add>
<add>When loaded, a remote driver process receives an HTTP POST on the URL `/Plugin.Activate` with no payload. It must respond with a manifest of the form
<add>
<add> {
<add> "Implements": ["NetworkDriver"]
<add> }
<add>
<add>Other entries in the list value are allowed; `"NetworkDriver"` indicates that the plugin should be registered with LibNetwork as a driver.
<add>
<add>### Create network
<add>
<add>When the proxy is asked to create a network, the remote process shall receive a POST to the URL `/NetworkDriver.CreateNetwork` of the form
<add>
<add> {
<add> "NetworkID": string,
<add> "Options": {
<add> ...
<add> }
<add> }
<add>
<add>The `NetworkID` value is generated by LibNetwork. The `Options` value is the arbitrary map given to the proxy by LibNetwork.
<add>
<add>The response indicating success is empty:
<add>
<add> `{}`
<add>
<add>### Delete network
<add>
<add>When a network owned by the remote driver is deleted, the remote process shall receive a POST to the URL `/NetworkDriver.DeleteNetwork` of the form
<add>
<add> {
<add> "NetworkID": string
<add> }
<add>
<add>The success response is empty:
<add>
<add> {}
<add>
<add>### Create endpoint
<add>
<add>When the proxy is asked to create an endpoint, the remote process shall receive a POST to the URL `/NetworkDriver.CreateEndpoint` of the form
<add>
<add> {
<add> "NetworkID": string,
<add> "EndpointID": string,
<add> "Options": {
<add> ...
<add> },
<add> "Interfaces": [{
<add> "ID": int,
<add> "Address": string,
<add> "AddressIPv6": string,
<add> "MacAddress": string
<add> }, ...]
<add> }
<add>
<add>The `NetworkID` is the generated identifier for the network to which the endpoint belongs; the `EndpointID` is a generated identifier for the endpoint.
<add>
<add>`Options` is an arbitrary map as supplied to the proxy.
<add>
<add>The `Interfaces` value is a list with values of the form given. The fields in the `Interfaces` entries may be empty; and the `Interfaces` list itself may be empty. If supplied, `Address` is an IPv4 address and subnet in CIDR notation; e.g., `"192.168.34.12/16"`. If supplied, `AddressIPv6` is an IPv6 address and subnet in CIDR notation. `MacAddress` is a MAC address as a string; e.g., `"6e:75:32:60:44:c9"`.
<add>
<add>A success response is of the form
<add>
<add> {
<add> "Interfaces": [{
<add> "ID": int,
<add> "Address": string,
<add> "AddressIPv6": string,
<add> "MacAddress": string
<add> }, ...]
<add> }
<add>
<add>with values in the `Interfaces` entries as above. For each entry, an `ID` and `MacAddress` and either or both of `Address` and `AddressIPv6` must be given. The `ID` is arbitrary but must differ among entries. It is used to identify, within the scope of the endpoint, an individual interface during a `Join` call.
<add>
<add>If the remote process was supplied entries in `Interfaces`, it must respond with an empty `Interfaces` list. LibNetwork will treat it as an error if it supplies a non-empty list and receives a non-empty list back, and roll back the operation.
<add>
<add>### Endpoint operational info
<add>
<add>The proxy may be asked for "operational info" on an endpoint. When this happens, the remote process shall receive a POST to `/NetworkDriver.EndpointOperInfo` of the form
<add>
<add> {
<add> "NetworkID": string,
<add> "EndpointID": string
<add> }
<add>
<add>where `NetworkID` and `EndpointID` have meanings as above. It must send a response of the form
<add>
<add> {
<add> "Value": { ... }
<add> }
<add>
<add>where the value of the `Value` field is an arbitrary (possibly empty) map.
<add>
<add>### Delete endpoint
<add>
<add>When an endpoint is deleted, the remote process shall receive a POST to the URL `/NetworkDriver.DeleteEndpoint` with a body of the form
<add>
<add> {
<add> "NetworkID": string,
<add> "EndpointID": string
<add> }
<add>
<add>where `NetworkID` and `EndpointID` have meanings as above. A success response is empty:
<add>
<add> {}
<add>
<add>### Join
<add>
<add>When a sandbox is given an endpoint, the remote process shall receive a POST to the URL `NetworkDriver.Join` of the form
<add>
<add> {
<add> "NetworkID": string,
<add> "EndpointID": string,
<add> "SandboxKey": string,
<add> "Options": { ... }
<add> }
<add>
<add>The `NetworkID` and `EndpointID` have meanings as above. The `SandboxKey` identifies the sandbox. `Options` is an arbitrary map as supplied to the proxy.
<add>
<add>The response must have the form
<add>
<add> {
<add> "InterfaceNames": [{
<add> SrcName: string,
<add> DstPrefix: string
<add> }, ...],
<add> "Gateway": string,
<add> "GatewayIPv6": string,
<add> "StaticRoutes": [{
<add> "Destination": string,
<add> "RouteType": int,
<add> "NextHop": string,
<add> "InterfaceID": int
<add> }, ...]
<add> "HostsPath": string,
<add> "ResolvConfPath": string
<add> }
<add>
<add>`Gateway` is optional and if supplied is an IP address as a string; e.g., `"192.168.0.1"`. `GatewayIPv6` is optional and if supplied is an IPv6 address as a string; e.g., `"fe80::7809:baff:fec6:7744"`. `HostsPath` is optional, as is `ResolvConfPath`.
<add>
<add>The entries in `InterfaceNames` represent veths that should be moved by LibNetwork into the sandbox; the `SrcName` is the name of the veth that the remote process created, and the `DstPrefix` is a prefix for the name the veth should have after it has been moved into the sandbox (LibNetwork will append an index to make sure the actual name does not collide with others).
<add>
<add>The position of the entries in the list must correspond to the interface IDs given in the response to `/NetworkDriver.CreateEndpoint` as described above. For example, if there were two `Interfaces` in the create endpoint response, with IDs `0` and `1`, then the `InterfaceNames` list would have the interface names respectively in positions `0` and `1`of the list. (For this reason it is recommended that interfaces are given sequential IDs starting with `0`.)
<add>
<add>The entries in `"StaticRoutes"` represent routes that should be added to an interface once it has been moved into the sandbox. Since there may be zero or more routes for an interface, unlike the interface names they can be supplied in any order, and are marked with the `InterfaceID` of the corresponding interface.
<add>
<add>Routes are either given a `RouteType` of `0` and a value for `NextHop`; or, a `RouteType` of `1` and no value for `NextHop`, meaning a connected route.
<add>
<add>### Leave
<add>
<add>If the proxy is asked to remove an endpoint from a sandbox, the remote process shall receive a POST to the URL `/NetworkDriver.Leave` of the form
<add>
<add> {
<add> "NetworkID": string,
<add> "EndpointID": string
<add> }
<add>
<add>where `NetworkID` and `EndpointID` have meanings as above. The success response is empty:
<add>
<add> {} | 1 |
PHP | PHP | add flashhelper tests | 2abaf3dd8ed0e5ba40358a021578f5832ea93b3b | <ide><path>tests/TestCase/View/Helper/FlashHelperTest.php
<add><?php
<add>/**
<add> * CakePHP(tm) Tests <http://book.cakephp.org/2.0/en/development/testing.html>
<add> * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> *
<add> * Licensed under The MIT License
<add> * For full copyright and license information, please see the LICENSE.txt
<add> * Redistributions of files must retain the above copyright notice
<add> *
<add> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> * @link http://book.cakephp.org/2.0/en/development/testing.html CakePHP(tm) Tests
<add> * @since 1.2.0
<add> * @license http://www.opensource.org/licenses/mit-license.php MIT License
<add> */
<add>namespace Cake\Test\TestCase\View\Helper;
<add>
<add>use Cake\Controller\Controller;
<add>use Cake\Core\App;
<add>use Cake\Core\Plugin;
<add>use Cake\Network\Request;
<add>use Cake\Network\Session;
<add>use Cake\TestSuite\TestCase;
<add>use Cake\View\Helper\FlashHelper;
<add>use Cake\View\View;
<add>
<add>/**
<add> * FlashHelperTest class
<add> *
<add> */
<add>class FlashHelperTest extends TestCase {
<add>
<add>/**
<add> * setUp method
<add> *
<add> * @return void
<add> */
<add> public function setUp() {
<add> parent::setUp();
<add> $this->View = new View();
<add> $session = new Session();
<add> $this->View->request = new Request(['session' => $session]);
<add> $this->Flash = new FlashHelper($this->View);
<add>
<add> $session->write(array(
<add> 'Message' => array(
<add> 'flash' => array(
<add> 'type' => 'info',
<add> 'params' => array(),
<add> 'message' => 'This is a calling'
<add> ),
<add> 'notification' => array(
<add> 'type' => 'info',
<add> 'params' => array(
<add> 'title' => 'Notice!',
<add> 'name' => 'Alert!',
<add> 'element' => 'session_helper'
<add> ),
<add> 'message' => 'This is a test of the emergency broadcasting system',
<add> ),
<add> 'classy' => array(
<add> 'type' => 'success',
<add> 'params' => array('class' => 'positive'),
<add> 'message' => 'Recorded'
<add> )
<add> )
<add> ));
<add> }
<add>
<add>/**
<add> * tearDown method
<add> *
<add> * @return void
<add> */
<add> public function tearDown() {
<add> $_SESSION = [];
<add> unset($this->View, $this->Session);
<add> Plugin::unload();
<add> parent::tearDown();
<add> }
<add>
<add>/**
<add> * testFlash method
<add> *
<add> * @return void
<add> */
<add> public function testFlash() {
<add> $result = $this->Flash->out();
<add> $expected = '<div id="flash-message" class="message-info">This is a calling</div>';
<add> $this->assertEquals($expected, $result);
<add>
<add> $expected = '<div id="classy-message" class="message-success">Recorded</div>';
<add> $result = $this->Flash->out('classy');
<add> $this->assertEquals($expected, $result);
<add>
<add> $result = $this->Flash->out('notification');
<add> $result = str_replace("\r\n", "\n", $result);
<add> $expected = "<div id=\"notificationLayout\">\n\t<h1>Alert!</h1>\n\t<h3>Notice!</h3>\n\t<p>This is a test of the emergency broadcasting system</p>\n</div>";
<add> $this->assertEquals($expected, $result);
<add> }
<add>
<add>/**
<add> * test flash() with the attributes.
<add> *
<add> * @return void
<add> */
<add> public function testFlashAttributes() {
<add> $result = $this->Flash->out('flash', array('class' => 'crazy'));
<add> $expected = '<div id="flash-message" class="message-crazy">This is a calling</div>';
<add> $this->assertEquals($expected, $result);
<add> }
<add>
<add>/**
<add> * test setting the element from the attrs.
<add> *
<add> * @return void
<add> */
<add> public function testFlashElementInAttrs() {
<add> $result = $this->Flash->out('flash', array(
<add> 'element' => 'session_helper',
<add> 'params' => array('title' => 'Notice!', 'name' => 'Alert!')
<add> ));
<add> $expected = "<div id=\"notificationLayout\">\n\t<h1>Alert!</h1>\n\t<h3>Notice!</h3>\n\t<p>This is a calling</p>\n</div>";
<add> $this->assertTextEquals($expected, $result);
<add> }
<add>
<add>/**
<add> * test using elements in plugins.
<add> *
<add> * @return void
<add> */
<add> public function testFlashWithPluginElement() {
<add> Plugin::load('TestPlugin');
<add>
<add> $result = $this->Flash->out('flash', array('element' => 'TestPlugin.plugin_element'));
<add> $expected = 'this is the plugin element using params[plugin]';
<add> $this->assertEquals($expected, $result);
<add> }
<add>} | 1 |
Python | Python | set version to v3.0.0.dev9 | 58c8f731bd7614027d537f984e799d47cdacad1f | <ide><path>spacy/about.py
<ide> # fmt: off
<ide> __title__ = "spacy"
<del>__version__ = "3.0.0"
<add>__version__ = "3.0.0.dev9"
<ide> __release__ = True
<ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download"
<ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json" | 1 |
Text | Text | update pr template clarifying autoclosing issues | edc89b98a9412ce96e968d526f6d22c671050ccf | <ide><path>.github/PULL_REQUEST_TEMPLATE.md
<ide>
<ide> #### Type of Change
<ide> <!-- What type of change does your code introduce? Put an `x` in the box that applies. -->
<del>- [ ] Bug fix (non-breaking change which fixes an issue)
<del>- [ ] New feature (non-breaking change which adds functionality)
<del>- [ ] Breaking change (fix or feature that would cause existing functionality to change)
<add>- [ ] Small bug fix (non-breaking change which fixes an issue)
<add>- [ ] New feature (non-breaking change which adds new functionality)
<add>- [ ] Breaking change (fix or feature that would change existing functionality)
<ide> - [ ] Add new translation (feature adding new translations)
<ide>
<ide> #### Checklist:
<ide> <!-- Go over all points below, and put an `x` in all the boxes that apply. -->
<del><!-- If you're unsure about any of these, don't hesitate to ask. We're here to help! -->
<add><!-- If you're unsure about any of these, don't hesitate to ask in the Help Contributors room linked above. We're here to help! -->
<ide> - [ ] Tested changes locally.
<del>- [ ] Closes currently open issue (`Closes #XXXX`): Closes
<add>- [ ] Closes currently open issue (e.g. `Closes #XXXX`): Closes
<ide>
<ide> #### Description
<ide> <!-- Describe your changes in detail --> | 1 |
PHP | PHP | remove unused static property | 34f0c0102449e40f0d0c243190f0cccc1c233446 | <ide><path>Cake/ORM/Table.php
<ide> */
<ide> class Table {
<ide>
<del>/**
<del> * A list of all table instances that has been built using the factory
<del> * method. Instances are indexed by alias
<del> *
<del> * @var array
<del> */
<del> protected static $_instances = [];
<del>
<ide> /**
<ide> * Name of the table as it can be found in the database
<ide> * | 1 |
Javascript | Javascript | add more tests for inherited listeners | 6a877c80255294821d07c47231c65dc8b336f8cd | <ide><path>packages/@ember/-internals/meta/tests/listeners_test.js
<ide> moduleFor(
<ide> }
<ide>
<ide> ['@test inheritance'](assert) {
<add> let matching;
<ide> let target = {};
<ide> let parent = {};
<ide> let parentMeta = meta(parent);
<ide> parentMeta.addToListeners('hello', target, 'm', 0);
<ide>
<del> let child = Object.create(parent);
<del> let m = meta(child);
<add> let child1 = Object.create(parent);
<add> let m1 = meta(child1);
<add>
<add> let child2 = Object.create(parent);
<add> let m2 = meta(child2);
<add>
<add> let child3 = Object.create(parent);
<add> let m3 = meta(child3);
<add>
<add> m3.removeFromListeners('hello', target, 'm');
<add>
<add> matching = m3.matchingListeners('hello');
<add> assert.deepEqual(matching, undefined, 'no listeners for child3');
<add>
<add> m3.addToListeners('hello', target, 'm', 0);
<add>
<add> matching = m3.matchingListeners('hello');
<add> assert.deepEqual(matching, [target, 'm', false], 'listener still exists for child1');
<add>
<add> m3.removeFromListeners('hello', target, 'm');
<add>
<add> matching = m3.matchingListeners('hello');
<add> assert.deepEqual(matching, undefined, 'no listeners for child3');
<add>
<add> matching = m1.matchingListeners('hello');
<add> assert.deepEqual(matching, [target, 'm', false], 'listener still exists for child1');
<add>
<add> matching = m2.matchingListeners('hello');
<add> assert.deepEqual(matching, [target, 'm', false], 'listener still exists for child2');
<add>
<add> m1.removeFromListeners('hello', target, 'm');
<add>
<add> matching = m1.matchingListeners('hello');
<add> assert.equal(matching, undefined, 'listener removed from child1');
<add>
<add> matching = m2.matchingListeners('hello');
<add> assert.deepEqual(matching, [target, 'm', false], 'listener still exists for child2');
<ide>
<del> let matching = m.matchingListeners('hello');
<del> assert.equal(matching.length, 3);
<del> assert.equal(matching[0], target);
<del> assert.equal(matching[1], 'm');
<del> assert.equal(matching[2], 0);
<del> m.removeFromListeners('hello', target, 'm');
<del> matching = m.matchingListeners('hello');
<del> assert.equal(matching, undefined);
<ide> matching = parentMeta.matchingListeners('hello');
<del> assert.equal(matching.length, 3);
<add> assert.deepEqual(matching, [target, 'm', false], 'listener still present for parent');
<ide> }
<ide>
<ide> ['@test deduplication'](assert) { | 1 |
PHP | PHP | show model type in missingmodelexception message | 90586cd6325912cf4865b2598e11f357a78e481b | <ide><path>src/Model/Error/MissingModelException.php
<ide> */
<ide> class MissingModelException extends Exception {
<ide>
<del> protected $_messageTemplate = 'Model class %s could not be found.';
<add> protected $_messageTemplate = 'Model class "%s" of type "%s" could not be found.';
<ide>
<ide> }
<ide><path>src/Model/ModelAwareTrait.php
<ide> public function loadModel($modelClass = null, $type = 'Table') {
<ide> $factory = $this->_modelFactories[$type];
<ide> $this->{$modelClass} = $factory($plugin . $modelClass);
<ide> if (!$this->{$modelClass}) {
<del> throw new Error\MissingModelException($modelClass);
<add> throw new Error\MissingModelException([$modelClass, $type]);
<ide> }
<ide> return true;
<ide> }
<ide><path>tests/TestCase/Model/ModelAwareTraitTest.php
<ide> public function testModelFactory() {
<ide> $this->assertEquals('Magic', $stub->Magic->name);
<ide> }
<ide>
<add>/**
<add> * test MissingModelException being thrown
<add> *
<add> * @return void
<add> * @expectedException Cake\Model\Error\MissingModelException
<add> * @expectedExceptionMessage Model class "Magic" of type "Test" could not be found.
<add> */
<add> public function testMissingModelException() {
<add> $stub = new Stub();
<add>
<add> $stub->modelFactory('Test', function($name) {
<add> return false;
<add> });
<add>
<add> $stub->loadModel('Magic', 'Test');
<add> }
<add>
<ide> } | 3 |
Java | Java | reimplement the amb operator | b8a3cad8b684177be06e65a0bad7850d8e7e9382 | <ide><path>rxjava-core/src/main/java/rx/operators/OperationAmb.java
<ide> import java.util.concurrent.atomic.AtomicInteger;
<ide>
<ide> import rx.Observable;
<del>import rx.Observable.OnSubscribeFunc;
<add>import rx.Observable.OnSubscribe;
<ide> import rx.Observer;
<del>import rx.Subscription;
<del>import rx.subscriptions.CompositeSubscription;
<add>import rx.Subscriber;
<ide>
<ide> /**
<ide> * Propagates the observable sequence that reacts first.
<ide> */
<del>public class OperationAmb {
<add>public final class OperationAmb<T> implements OnSubscribe<T>{
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> sources.add(o3);
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6, Observable<? extends T> o7) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6, Observable<? extends T> o7) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6, Observable<? extends T> o7, Observable<? extends T> o8) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6, Observable<? extends T> o7, Observable<? extends T> o8) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6, Observable<? extends T> o7, Observable<? extends T> o8, Observable<? extends T> o9) {
<add> public static <T> OnSubscribe<T> amb(Observable<? extends T> o1, Observable<? extends T> o2, Observable<? extends T> o3, Observable<? extends T> o4, Observable<? extends T> o5, Observable<? extends T> o6, Observable<? extends T> o7, Observable<? extends T> o8, Observable<? extends T> o9) {
<ide> List<Observable<? extends T>> sources = new ArrayList<Observable<? extends T>>();
<ide> sources.add(o1);
<ide> sources.add(o2);
<ide> public static <T> OnSubscribeFunc<T> amb(Observable<? extends T> o1, Observable<
<ide> return amb(sources);
<ide> }
<ide>
<del> public static <T> OnSubscribeFunc<T> amb(
<del> final Iterable<? extends Observable<? extends T>> sources) {
<del> return new OnSubscribeFunc<T>() {
<del>
<del> @Override
<del> public Subscription onSubscribe(final Observer<? super T> observer) {
<del> AtomicInteger choice = new AtomicInteger(AmbObserver.NONE);
<del> int index = 0;
<del> CompositeSubscription parentSubscription = new CompositeSubscription();
<del> for (Observable<? extends T> source : sources) {
<del> SafeObservableSubscription subscription = new SafeObservableSubscription();
<del> AmbObserver<T> ambObserver = new AmbObserver<T>(
<del> subscription, observer, index, choice);
<del> parentSubscription.add(subscription.wrap(source
<del> .subscribe(ambObserver)));
<del> index++;
<del> }
<del> return parentSubscription;
<del> }
<del> };
<add> public static <T> OnSubscribe<T> amb(final Iterable<? extends Observable<? extends T>> sources) {
<add> return new OperationAmb<T>(sources);
<ide> }
<ide>
<del> private static class AmbObserver<T> implements Observer<T> {
<add> private static final class AmbSubscriber<T> extends Subscriber<T> {
<ide>
<ide> private static final int NONE = -1;
<ide>
<del> private Subscription subscription;
<ide> private Observer<? super T> observer;
<ide> private int index;
<ide> private AtomicInteger choice;
<ide>
<del> private AmbObserver(Subscription subscription,
<del> Observer<? super T> observer, int index, AtomicInteger choice) {
<del> this.subscription = subscription;
<add> private AmbSubscriber(Subscriber<? super T> observer, int index, AtomicInteger choice) {
<ide> this.observer = observer;
<ide> this.choice = choice;
<ide> this.index = index;
<ide> private AmbObserver(Subscription subscription,
<ide> @Override
<ide> public void onNext(T args) {
<ide> if (!isSelected()) {
<del> subscription.unsubscribe();
<add> unsubscribe();
<ide> return;
<ide> }
<ide> observer.onNext(args);
<ide> public void onNext(T args) {
<ide> @Override
<ide> public void onCompleted() {
<ide> if (!isSelected()) {
<del> subscription.unsubscribe();
<add> unsubscribe();
<ide> return;
<ide> }
<ide> observer.onCompleted();
<ide> public void onCompleted() {
<ide> @Override
<ide> public void onError(Throwable e) {
<ide> if (!isSelected()) {
<del> subscription.unsubscribe();
<add> unsubscribe();
<ide> return;
<ide> }
<ide> observer.onError(e);
<ide> private boolean isSelected() {
<ide> }
<ide> }
<ide>
<add> private final Iterable<? extends Observable<? extends T>> sources;
<add>
<add> private OperationAmb(Iterable<? extends Observable<? extends T>> sources) {
<add> this.sources = sources;
<add> }
<add>
<add> @Override
<add> public void call(Subscriber<? super T> subscriber) {
<add> AtomicInteger choice = new AtomicInteger(AmbSubscriber.NONE);
<add> int index = 0;
<add> for (Observable<? extends T> source : sources) {
<add> if (subscriber.isUnsubscribed()) {
<add> break;
<add> }
<add> AmbSubscriber<T> ambSubscriber = new AmbSubscriber<T>(subscriber, index, choice);
<add> subscriber.add(ambSubscriber);
<add> source.subscribe(ambSubscriber);
<add> index++;
<add> }
<add> }
<add>
<ide> }
<ide><path>rxjava-core/src/test/java/rx/operators/OperationAmbTest.java
<ide> */
<ide> package rx.operators;
<ide>
<del>import static org.mockito.Mockito.*;
<del>import static rx.operators.OperationAmb.*;
<add>import static org.mockito.Mockito.inOrder;
<add>import static org.mockito.Mockito.mock;
<add>import static org.mockito.Mockito.times;
<add>import static rx.operators.OperationAmb.amb;
<ide>
<ide> import java.io.IOException;
<ide> import java.util.concurrent.TimeUnit;
<ide> import org.mockito.InOrder;
<ide>
<ide> import rx.Observable;
<del>import rx.Observable.OnSubscribeFunc;
<add>import rx.Observable.OnSubscribe;
<ide> import rx.Observer;
<ide> import rx.Scheduler.Inner;
<del>import rx.Subscription;
<add>import rx.Subscriber;
<ide> import rx.functions.Action1;
<ide> import rx.schedulers.TestScheduler;
<ide> import rx.subscriptions.CompositeSubscription;
<ide> public void setUp() {
<ide>
<ide> private Observable<String> createObservable(final String[] values,
<ide> final long interval, final Throwable e) {
<del> return Observable.create(new OnSubscribeFunc<String>() {
<add> return Observable.create(new OnSubscribe<String>() {
<ide>
<ide> @Override
<del> public Subscription onSubscribe(final Observer<? super String> observer) {
<add> public void call(final Subscriber<? super String> subscriber) {
<ide> CompositeSubscription parentSubscription = new CompositeSubscription();
<add> subscriber.add(parentSubscription);
<ide> long delay = interval;
<ide> for (final String value : values) {
<ide> parentSubscription.add(scheduler.schedule(new Action1<Inner>() {
<ide> @Override
<ide> public void call(Inner inner) {
<del> observer.onNext(value);
<add> subscriber.onNext(value);
<ide> }
<ide> }, delay, TimeUnit.MILLISECONDS));
<ide> delay += interval;
<ide> public void call(Inner inner) {
<ide> @Override
<ide> public void call(Inner inner) {
<ide> if (e == null) {
<del> observer.onCompleted();
<add> subscriber.onCompleted();
<ide> } else {
<del> observer.onError(e);
<add> subscriber.onError(e);
<ide> }
<ide> }
<ide> }, delay, TimeUnit.MILLISECONDS));
<del> return parentSubscription;
<ide> }
<ide> });
<ide> }
<ide> public void testAmb() {
<ide>
<ide> @Test
<ide> public void testAmb2() {
<del> IOException needHappenedException = new IOException(
<add> IOException expectedException = new IOException(
<ide> "fake exception");
<ide> Observable<String> observable1 = createObservable(new String[] {},
<ide> 2000, new IOException("fake exception"));
<ide> Observable<String> observable2 = createObservable(new String[] {
<del> "2", "22", "222", "2222" }, 1000, needHappenedException);
<add> "2", "22", "222", "2222" }, 1000, expectedException);
<ide> Observable<String> observable3 = createObservable(new String[] {},
<ide> 3000, new IOException("fake exception"));
<ide>
<ide> public void testAmb2() {
<ide> inOrder.verify(observer, times(1)).onNext("22");
<ide> inOrder.verify(observer, times(1)).onNext("222");
<ide> inOrder.verify(observer, times(1)).onNext("2222");
<del> inOrder.verify(observer, times(1)).onError(needHappenedException);
<add> inOrder.verify(observer, times(1)).onError(expectedException);
<ide> inOrder.verifyNoMoreInteractions();
<ide> }
<ide> | 2 |
Java | Java | provide more control over factory failure handling | 12244b2e51bfde11209211c3ee56eef905cd96a7 | <ide><path>spring-core/src/main/java/org/springframework/core/io/support/SpringFactoriesLoader.java
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.Properties;
<add>import java.util.function.BiConsumer;
<add>import java.util.function.BiFunction;
<ide> import java.util.function.Function;
<ide> import java.util.function.Supplier;
<ide>
<ide> public final class SpringFactoriesLoader {
<ide>
<ide> private static final Log logger = LogFactory.getLog(SpringFactoriesLoader.class);
<ide>
<add> private static final FailureHandler THROWING_HANDLER = FailureHandler.throwing();
<add>
<ide> static final Map<ClassLoader, Map<String, List<String>>> cache = new ConcurrentReferenceHashMap<>();
<ide>
<ide>
<ide> private SpringFactoriesLoader() {
<ide> * be loaded or if an error occurs while instantiating any factory
<ide> */
<ide> public static <T> List<T> loadFactories(Class<T> factoryType, @Nullable ClassLoader classLoader) {
<del> return loadFactories(factoryType, classLoader, null);
<add> return loadFactories(factoryType, classLoader, null, null);
<ide> }
<ide>
<ide> /**
<ide> public static <T> List<T> loadFactories(Class<T> factoryType, @Nullable ClassLoa
<ide> public static <T> List<T> loadFactories(Class<T> factoryType, @Nullable ClassLoader classLoader,
<ide> @Nullable ArgumentResolver argumentResolver) {
<ide>
<add> return loadFactories(factoryType, classLoader, argumentResolver, null);
<add> }
<add>
<add> /**
<add> * Load and instantiate the factory implementations of the given type from
<add> * {@value #FACTORIES_RESOURCE_LOCATION}, using the given class loader with custom failure
<add> * handling provided by the given failure handler.
<add> * <p>The returned factories are sorted through {@link AnnotationAwareOrderComparator}.
<add> * <p>As of Spring Framework 5.3, if duplicate implementation class names are
<add> * discovered for a given factory type, only one instance of the duplicated
<add> * implementation type will be instantiated.
<add> * <p>For any factory implementation class that cannot be loaded or error that occurs while
<add> * instantiating it, the given failure handler is called.
<add> * @param factoryType the interface or abstract class representing the factory
<add> * @param classLoader the ClassLoader to use for loading (can be {@code null} to use the default)
<add> * @param failureHandler the FactoryInstantiationFailureHandler to use for handling of factory instantiation failures
<add> * @since 6.0
<add> */
<add> public static <T> List<T> loadFactories(Class<T> factoryType, @Nullable ClassLoader classLoader,
<add> @Nullable FailureHandler failureHandler) {
<add>
<add> return loadFactories(factoryType, classLoader, null, failureHandler);
<add> }
<add>
<add> /**
<add> * Load and instantiate the factory implementations of the given type from
<add> * {@value #FACTORIES_RESOURCE_LOCATION}, using the given arguments and class loader with custom
<add> * failure handling provided by the given failure handler.
<add> * <p>The returned factories are sorted through {@link AnnotationAwareOrderComparator}.
<add> * <p>As of Spring Framework 5.3, if duplicate implementation class names are
<add> * discovered for a given factory type, only one instance of the duplicated
<add> * implementation type will be instantiated.
<add> * <p>For any factory implementation class that cannot be loaded or error that occurs while
<add> * instantiating it, the given failure handler is called.
<add> * @param factoryType the interface or abstract class representing the factory
<add> * @param classLoader the ClassLoader to use for loading (can be {@code null} to use the default)
<add> * @param argumentResolver strategy used to resolve constructor arguments by their type
<add> * @param failureHandler the FactoryInstantiationFailureHandler to use for handling of factory
<add> * instantiation failures
<add> * @since 6.0
<add> */
<add> public static <T> List<T> loadFactories(Class<T> factoryType, @Nullable ClassLoader classLoader,
<add> @Nullable ArgumentResolver argumentResolver, @Nullable FailureHandler failureHandler) {
<add>
<ide> Assert.notNull(factoryType, "'factoryType' must not be null");
<ide> ClassLoader classLoaderToUse = (classLoader != null) ? classLoader : SpringFactoriesLoader.class.getClassLoader();
<ide> List<String> factoryImplementationNames = loadFactoryNames(factoryType, classLoaderToUse);
<ide> logger.trace(LogMessage.format("Loaded [%s] names: %s", factoryType.getName(), factoryImplementationNames));
<ide> List<T> result = new ArrayList<>(factoryImplementationNames.size());
<add> FailureHandler failureHandlerToUse = (failureHandler != null) ? failureHandler : THROWING_HANDLER;
<ide> for (String factoryImplementationName : factoryImplementationNames) {
<del> T factory = instantiateFactory(factoryImplementationName, factoryType, argumentResolver, classLoaderToUse);
<add> T factory = instantiateFactory(factoryImplementationName, factoryType,
<add> argumentResolver, classLoaderToUse, failureHandlerToUse);
<ide> if (factory != null) {
<ide> result.add(factory);
<ide> }
<ide> private static List<String> toDistinctUnmodifiableList(String factoryType, List<
<ide> @Nullable
<ide> private static <T> T instantiateFactory(String factoryImplementationName,
<ide> Class<T> factoryType, @Nullable ArgumentResolver argumentResolver,
<del> ClassLoader classLoader) {
<add> ClassLoader classLoader, FailureHandler failureHandler) {
<ide> try {
<ide> Class<?> factoryImplementationClass = ClassUtils.forName(factoryImplementationName, classLoader);
<ide> Assert.isTrue(factoryType.isAssignableFrom(factoryImplementationClass),
<ide> private static <T> T instantiateFactory(String factoryImplementationName,
<ide> return factoryInstantiator.instantiate(argumentResolver);
<ide> }
<ide> catch (Throwable ex) {
<del> throw new IllegalArgumentException("Unable to instantiate factory class [" + factoryImplementationName +
<del> "] for factory type [" + factoryType.getName() + "]", ex);
<add> failureHandler.handleFailure(factoryType, factoryImplementationName, ex);
<add> return null;
<ide> }
<ide> }
<ide>
<ide> private static <T> T instantiate(Constructor<T> constructor, KFunction<T> kotlin
<ide> }
<ide>
<ide>
<add> /**
<add> * Strategy for handling a failure that occurs when instantiating a factory.
<add> *
<add> * @since 6.0
<add> * @see FailureHandler#throwing()
<add> * @see FailureHandler#logging(Log)
<add> */
<add> @FunctionalInterface
<add> public interface FailureHandler {
<add>
<add> /**
<add> * Handle the {@code failure} that occurred when instantiating the {@code factoryImplementationName}
<add> * that was expected to be of the given {@code factoryType}.
<add> * @param factoryType the type of the factory
<add> * @param factoryImplementationName the name of the factory implementation
<add> * @param failure the failure that occurred
<add> * @see #throwing()
<add> * @see #logging
<add> */
<add> void handleFailure(Class<?> factoryType, String factoryImplementationName, Throwable failure);
<add>
<add> /**
<add> * Return a new {@link FailureHandler} that handles
<add> * errors by throwing an {@link IllegalArgumentException}.
<add> * @return a new {@link FailureHandler} instance
<add> */
<add> static FailureHandler throwing() {
<add> return throwing(IllegalArgumentException::new);
<add> }
<add>
<add> /**
<add> * Return a new {@link FailureHandler} that handles
<add> * errors by throwing an exception.
<add> * @param exceptionFactory factory used to create the exception
<add> * @return a new {@link FailureHandler} instance
<add> */
<add> static FailureHandler throwing(BiFunction<String, Throwable, ? extends RuntimeException> exceptionFactory) {
<add> return handleMessage((message, failure) -> {
<add> throw exceptionFactory.apply(message.get(), failure);
<add> });
<add> }
<add>
<add> /**
<add> * Return a new {@link FailureHandler} that handles
<add> * errors by logging trace messages.
<add> * @param logger the logger used to log message
<add> * @return a new {@link FailureHandler} instance
<add> */
<add> static FailureHandler logging(Log logger) {
<add> return handleMessage((message, failure) -> logger.trace(LogMessage.of(message), failure));
<add> }
<add>
<add> /**
<add> * Return a new {@link FailureHandler} that handles
<add> * errors with using a standard formatted message.
<add> * @param messageHandler the message handler used to handle the problem
<add> * @return a new {@link FailureHandler} instance
<add> */
<add> static FailureHandler handleMessage(BiConsumer<Supplier<String>, Throwable> messageHandler) {
<add> return (factoryType, factoryImplementationName, failure) -> {
<add> Supplier<String> message = () -> "Unable to instantiate factory class [" + factoryImplementationName +
<add> "] for factory type [" + factoryType.getName() + "]";
<add> messageHandler.accept(message, failure);
<add> };
<add> }
<add>
<add> }
<add>
<add>
<ide> /**
<ide> * Strategy for resolving constructor arguments based on their type.
<ide> *
<ide><path>spring-core/src/test/java/org/springframework/core/io/support/SpringFactoriesLoaderTests.java
<ide> import java.net.MalformedURLException;
<ide> import java.net.URL;
<ide> import java.net.URLClassLoader;
<add>import java.util.ArrayList;
<ide> import java.util.List;
<ide>
<add>import org.apache.commons.logging.Log;
<ide> import org.junit.jupiter.api.AfterAll;
<ide> import org.junit.jupiter.api.BeforeAll;
<ide> import org.junit.jupiter.api.Nested;
<ide> import org.junit.jupiter.api.Test;
<ide>
<ide> import org.springframework.core.io.support.SpringFactoriesLoader.ArgumentResolver;
<ide> import org.springframework.core.io.support.SpringFactoriesLoader.FactoryInstantiator;
<add>import org.springframework.core.io.support.SpringFactoriesLoader.FailureHandler;
<add>import org.springframework.core.log.LogMessage;
<ide>
<ide> import static org.assertj.core.api.Assertions.assertThat;
<ide> import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
<ide> import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
<add>import static org.mockito.ArgumentMatchers.eq;
<add>import static org.mockito.ArgumentMatchers.isA;
<add>import static org.mockito.Mockito.mock;
<add>import static org.mockito.Mockito.verify;
<ide>
<ide> /**
<ide> * Tests for {@link SpringFactoriesLoader}.
<ide> void attemptToLoadFactoryOfIncompatibleType() {
<ide> + "[org.springframework.core.io.support.MyDummyFactory1] for factory type [java.lang.String]");
<ide> }
<ide>
<add> @Test
<add> void attemptToLoadFactoryOfIncompatibleTypeWithLoggingFailureHandler() {
<add> Log logger = mock(Log.class);
<add> FailureHandler failureHandler = FailureHandler.logging(logger);
<add> List<String> factories = SpringFactoriesLoader.loadFactories(String.class, null, failureHandler);
<add> assertThat(factories.isEmpty());
<add> }
<add>
<ide> @Test
<ide> void loadFactoryWithNonDefaultConstructor() {
<ide> ArgumentResolver resolver = ArgumentResolver.of(String.class, "injected");
<ide> void loadFactoryWithMultipleConstructors() {
<ide> .havingRootCause().withMessageContaining("Class [org.springframework.core.io.support.MultipleConstructorArgsDummyFactory] has no suitable constructor");
<ide> }
<ide>
<add> @Test
<add> void loadFactoryWithMissingArgumentUsingLoggingFailureHandler() {
<add> Log logger = mock(Log.class);
<add> FailureHandler failureHandler = FailureHandler.logging(logger);
<add> List<DummyFactory> factories = SpringFactoriesLoader.loadFactories(DummyFactory.class, LimitedClassLoader.multipleArgumentFactories, failureHandler);
<add> assertThat(factories).hasSize(2);
<add> assertThat(factories.get(0)).isInstanceOf(MyDummyFactory1.class);
<add> assertThat(factories.get(1)).isInstanceOf(MyDummyFactory2.class);
<add> }
<add>
<add>
<add> @Nested
<add> class FailureHandlerTests {
<add>
<add> @Test
<add> void throwingReturnsHandlerThatThrowsIllegalArgumentException() {
<add> FailureHandler handler = FailureHandler.throwing();
<add> RuntimeException cause = new RuntimeException();
<add> assertThatIllegalArgumentException().isThrownBy(() -> handler.handleFailure(
<add> DummyFactory.class, MyDummyFactory1.class.getName(),
<add> cause)).withMessageStartingWith("Unable to instantiate factory class").withCause(cause);
<add> }
<add>
<add> @Test
<add> void throwingWithFactoryReturnsHandlerThatThrows() {
<add> FailureHandler handler = FailureHandler.throwing(IllegalStateException::new);
<add> RuntimeException cause = new RuntimeException();
<add> assertThatIllegalStateException().isThrownBy(() -> handler.handleFailure(
<add> DummyFactory.class, MyDummyFactory1.class.getName(),
<add> cause)).withMessageStartingWith("Unable to instantiate factory class").withCause(cause);
<add> }
<add>
<add> @Test
<add> void loggingReturnsHandlerThatLogs() {
<add> Log logger = mock(Log.class);
<add> FailureHandler handler = FailureHandler.logging(logger);
<add> RuntimeException cause = new RuntimeException();
<add> handler.handleFailure(DummyFactory.class, MyDummyFactory1.class.getName(), cause);
<add> verify(logger).trace(isA(LogMessage.class), eq(cause));
<add> }
<add>
<add> @Test
<add> void handleMessageReturnsHandlerThatAcceptsMessage() {
<add> List<Throwable> failures = new ArrayList<>();
<add> List<String> messages = new ArrayList<>();
<add> FailureHandler handler = FailureHandler.handleMessage((message, failure) -> {
<add> failures.add(failure);
<add> messages.add(message.get());
<add> });
<add> RuntimeException cause = new RuntimeException();
<add> handler.handleFailure(DummyFactory.class, MyDummyFactory1.class.getName(), cause);
<add> assertThat(failures).containsExactly(cause);
<add> assertThat(messages).hasSize(1);
<add> assertThat(messages.get(0)).startsWith("Unable to instantiate factory class");
<add> }
<add>
<add> }
<add>
<ide>
<ide> @Nested
<ide> class ArgumentResolverTests { | 2 |
Javascript | Javascript | add row style for multi-column lists | 9e2d3c5d0dcd4b681b8a154678f1cf42d10f30dc | <ide><path>Libraries/Experimental/FlatList.js
<ide> const VirtualizedList = require('VirtualizedList');
<ide>
<ide> const invariant = require('invariant');
<ide>
<add>import type {StyleObj} from 'StyleSheetTypes';
<ide> import type {Viewable} from 'ViewabilityHelper';
<ide>
<ide> type Item = any;
<ide> type OptionalProps = {
<ide> * Set this true while waiting for new data from a refresh.
<ide> */
<ide> refreshing?: ?boolean,
<add> /**
<add> * Optional custom style for multi-item rows generated when numColumns > 1
<add> */
<add> columnWrapperStyle?: StyleObj,
<ide> /**
<ide> * Optional optimization to minimize re-rendering items.
<ide> */
<ide> class FlatList extends React.PureComponent {
<ide> _captureRef = (ref) => { this._listRef = ref; };
<ide>
<ide> _checkProps(props: Props) {
<del> const {getItem, getItemCount, horizontal, legacyImplementation, numColumns} = props;
<add> const {
<add> getItem,
<add> getItemCount,
<add> horizontal,
<add> legacyImplementation,
<add> numColumns,
<add> columnWrapperStyle,
<add> } = props;
<ide> invariant(!getItem && !getItemCount, 'FlatList does not support custom data formats.');
<ide> if (numColumns > 1) {
<ide> invariant(!horizontal, 'numColumns does not support horizontal.');
<add> } else {
<add> invariant(!columnWrapperStyle, 'columnWrapperStyle not supported for single column lists');
<ide> }
<ide> if (legacyImplementation) {
<ide> invariant(numColumns === 1, 'Legacy list does not support multiple columns.');
<ide> class FlatList extends React.PureComponent {
<ide> };
<ide>
<ide> _renderItem = ({item, index}) => {
<del> const {ItemComponent, numColumns} = this.props;
<add> const {ItemComponent, numColumns, columnWrapperStyle} = this.props;
<ide> if (numColumns > 1) {
<ide> return (
<del> <View style={{flexDirection: 'row'}}>
<add> <View style={[{flexDirection: 'row'}, columnWrapperStyle]}>
<ide> {item.map((it, kk) =>
<ide> <ItemComponent key={kk} item={it} index={index * numColumns + kk} />)
<ide> } | 1 |
Ruby | Ruby | return templatepaths from all_template_paths | ef2cb320903c3e93e9d2625fb7da597eb135ce87 | <ide><path>actionview/lib/action_view/dependency_tracker.rb
<ide> def resolve_directories(wildcard_dependencies)
<ide> return [] unless @view_paths
<ide> return [] if wildcard_dependencies.empty?
<ide>
<del> # Remove trailing "*"
<del> prefixes = wildcard_dependencies.map { |query| query[0..-2] }
<add> # Remove trailing "/*"
<add> prefixes = wildcard_dependencies.map { |query| query[0..-3] }
<ide>
<ide> @view_paths.flat_map(&:all_template_paths).uniq.select { |path|
<del> prefixes.any? do |prefix|
<del> path.start_with?(prefix) && !path.index("/", prefix.size)
<del> end
<del> }.sort
<add> prefixes.include?(path.prefix)
<add> }.map(&:to_s).sort
<ide> end
<ide>
<ide> def explicit_dependencies
<ide> dependencies = source.scan(EXPLICIT_DEPENDENCY).flatten.uniq
<ide>
<del> wildcards, explicits = dependencies.partition { |dependency| dependency.end_with?("*") }
<add> wildcards, explicits = dependencies.partition { |dependency| dependency.end_with?("/*") }
<ide>
<ide> (explicits + resolve_directories(wildcards)).uniq
<ide> end
<ide><path>actionview/lib/action_view/template/error.rb
<ide> def corrections
<ide>
<ide> candidates = @error.paths.flat_map(&:all_template_paths).uniq
<ide> if @error.partial
<del> candidates = candidates.grep(%r{_[^/]+\z})
<add> candidates = candidates.select!(&:partial?)
<ide> else
<del> candidates = candidates.grep_v(%r{_[^/]+\z})
<add> candidates = candidates.reject!(&:partial?)
<ide> end
<ide>
<ide> # Group by possible prefixes
<del> files_by_dir = candidates.group_by do |x|
<del> File.dirname(x)
<del> end.transform_values do |files|
<add> files_by_dir = candidates.group_by(&:prefix)
<add> files_by_dir.transform_values! do |files|
<ide> files.map do |file|
<del> # Remove directory
<del> File.basename(file)
<add> # Remove prefix
<add> File.basename(file.to_s)
<ide> end
<ide> end
<ide>
<ide><path>actionview/lib/action_view/template/resolver.rb
<ide> def all_template_paths # :nodoc:
<ide> paths = template_glob("**/*")
<ide> paths.map do |filename|
<ide> filename.from(@path.size + 1).remove(/\.[^\/]*\z/)
<del> end.uniq
<add> end.uniq.map do |filename|
<add> TemplatePath.parse(filename)
<add> end
<ide> end
<ide>
<ide> private
<ide><path>actionview/lib/action_view/template_path.rb
<ide> def to_str
<ide> @virtual
<ide> end
<ide> alias :to_s :to_str
<add>
<add> def hash
<add> @virtual.hash
<add> end
<add>
<add> def eql?(other)
<add> @virtual == other.virtual
<add> end
<add> alias :== :eql?
<ide> end
<ide> end | 4 |
Ruby | Ruby | use proper casting | 501e979e186a3861581aa54f9930421223688d01 | <ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
<ide> def can_perform_case_insensitive_comparison_for?(column)
<ide> SELECT exists(
<ide> SELECT * FROM pg_proc
<ide> INNER JOIN pg_cast
<del> ON casttarget::text::oidvector = proargtypes
<add> ON ARRAY[casttarget]::oidvector = proargtypes
<ide> WHERE proname = 'lower'
<del> AND castsource = '#{column.sql_type}'::regtype::oid
<add> AND castsource = #{quote column.sql_type}::regtype
<ide> )
<ide> end_sql
<ide> execute_and_clear(sql, "SCHEMA", []) do |result| | 1 |
Mixed | Ruby | add option to verify origin header in csrf checks | 85783534fcf1baefa5b502a2bfee235ae6d612d7 | <ide><path>actionpack/lib/action_controller/metal/request_forgery_protection.rb
<ide> module RequestForgeryProtection
<ide> config_accessor :log_warning_on_csrf_failure
<ide> self.log_warning_on_csrf_failure = true
<ide>
<add> # Controls whether the Origin header is checked in addition to the CSRF token.
<add> config_accessor :forgery_protection_origin_check
<add> self.forgery_protection_origin_check = false
<add>
<ide> helper_method :form_authenticity_token
<ide> helper_method :protect_against_forgery?
<ide> end
<ide> def non_xhr_javascript_response?
<ide> # * Does the X-CSRF-Token header match the form_authenticity_token
<ide> def verified_request?
<ide> !protect_against_forgery? || request.get? || request.head? ||
<del> valid_authenticity_token?(session, form_authenticity_param) ||
<del> valid_authenticity_token?(session, request.headers['X-CSRF-Token'])
<add> (valid_request_origin? && any_authenticity_token_valid?)
<add> end
<add>
<add> # Checks if any of the authenticity tokens from the request are valid.
<add> def any_authenticity_token_valid?
<add> request_authenticity_tokens.any? do |token|
<add> valid_authenticity_token?(session, token)
<add> end
<add> end
<add>
<add> # Possible authenticity tokens sent in the request.
<add> def request_authenticity_tokens
<add> [form_authenticity_param, request.x_csrf_token]
<ide> end
<ide>
<ide> # Sets the token value for the current session.
<ide> def form_authenticity_param
<ide> def protect_against_forgery?
<ide> allow_forgery_protection
<ide> end
<add>
<add> # Checks if the request originated from the same origin by looking at the
<add> # Origin header.
<add> def valid_request_origin?
<add> if forgery_protection_origin_check
<add> # We accept blank origin headers because some user agents don't send it.
<add> request.origin.nil? || request.origin == request.base_url
<add> else
<add> true
<add> end
<add> end
<ide> end
<ide> end
<ide><path>actionpack/lib/action_dispatch/http/request.rb
<ide> class Request
<ide> HTTP_ACCEPT HTTP_ACCEPT_CHARSET HTTP_ACCEPT_ENCODING
<ide> HTTP_ACCEPT_LANGUAGE HTTP_CACHE_CONTROL HTTP_FROM
<ide> HTTP_NEGOTIATE HTTP_PRAGMA HTTP_CLIENT_IP
<del> HTTP_X_FORWARDED_FOR HTTP_VERSION
<del> HTTP_X_REQUEST_ID HTTP_X_FORWARDED_HOST
<add> HTTP_X_FORWARDED_FOR HTTP_ORIGIN HTTP_VERSION
<add> HTTP_X_CSRF_TOKEN HTTP_X_REQUEST_ID HTTP_X_FORWARDED_HOST
<ide> SERVER_ADDR
<ide> ].freeze
<ide>
<ide><path>actionpack/test/controller/request_forgery_protection_test.rb
<ide> def test_should_allow_put_with_token_in_header
<ide> assert_not_blocked { put :index }
<ide> end
<ide>
<add> def test_should_allow_post_with_origin_checking_and_correct_origin
<add> forgery_protection_origin_check do
<add> session[:_csrf_token] = @token
<add> @controller.stub :form_authenticity_token, @token do
<add> assert_not_blocked do
<add> @request.set_header 'HTTP_ORIGIN', 'http://test.host'
<add> post :index, params: { custom_authenticity_token: @token }
<add> end
<add> end
<add> end
<add> end
<add>
<add> def test_should_allow_post_with_origin_checking_and_no_origin
<add> forgery_protection_origin_check do
<add> session[:_csrf_token] = @token
<add> @controller.stub :form_authenticity_token, @token do
<add> assert_not_blocked do
<add> post :index, params: { custom_authenticity_token: @token }
<add> end
<add> end
<add> end
<add> end
<add>
<add> def test_should_block_post_with_origin_checking_and_wrong_origin
<add> forgery_protection_origin_check do
<add> session[:_csrf_token] = @token
<add> @controller.stub :form_authenticity_token, @token do
<add> assert_blocked do
<add> @request.set_header 'HTTP_ORIGIN', 'http://bad.host'
<add> post :index, params: { custom_authenticity_token: @token }
<add> end
<add> end
<add> end
<add> end
<add>
<ide> def test_should_warn_on_missing_csrf_token
<ide> old_logger = ActionController::Base.logger
<ide> logger = ActiveSupport::LogSubscriber::TestHelper::MockLogger.new
<ide> def assert_cross_origin_blocked
<ide> def assert_cross_origin_not_blocked
<ide> assert_not_blocked { yield }
<ide> end
<add>
<add> def forgery_protection_origin_check
<add> old_setting = ActionController::Base.forgery_protection_origin_check
<add> ActionController::Base.forgery_protection_origin_check = true
<add> begin
<add> yield
<add> ensure
<add> ActionController::Base.forgery_protection_origin_check = old_setting
<add> end
<add> end
<ide> end
<ide>
<ide> # OK let's get our test on
<ide><path>guides/source/configuring.md
<ide> The schema dumper adds one additional configuration option:
<ide>
<ide> * `config.action_controller.allow_forgery_protection` enables or disables CSRF protection. By default this is `false` in test mode and `true` in all other modes.
<ide>
<add>* `config.action_controller.forgery_protection_origin_check` configures whether the HTTP `Origin` header should be checked against the site's origin as an additional CSRF defense.
<add>
<ide> * `config.action_controller.relative_url_root` can be used to tell Rails that you are [deploying to a subdirectory](configuring.html#deploy-to-a-subdirectory-relative-url-root). The default is `ENV['RAILS_RELATIVE_URL_ROOT']`.
<ide>
<ide> * `config.action_controller.permit_all_parameters` sets all the parameters for mass assignment to be permitted by default. The default value is `false`.
<ide><path>railties/lib/rails/generators/rails/app/templates/config/initializers/request_forgery_protection.rb
<add># Be sure to restart your server when you modify this file.
<add>
<add># Enable origin-checking CSRF mitigation.
<add>Rails.application.config.action_controller.forgery_protection_origin_check = true | 5 |
Python | Python | implement new language methods and pipeline api | 212c8f071180c9ce134a74b85603e48c14199595 | <ide><path>spacy/language.py
<ide> def create_tokenizer(cls, nlp=None):
<ide> prefix_search=prefix_search, suffix_search=suffix_search,
<ide> infix_finditer=infix_finditer, token_match=token_match)
<ide>
<del> @classmethod
<del> def create_tagger(cls, nlp=None, **cfg):
<del> if nlp is None:
<del> return NeuralTagger(cls.create_vocab(nlp), **cfg)
<del> else:
<del> return NeuralTagger(nlp.vocab, **cfg)
<del>
<del> @classmethod
<del> def create_parser(cls, nlp=None, **cfg):
<del> if nlp is None:
<del> return NeuralDependencyParser(cls.create_vocab(nlp), **cfg)
<del> else:
<del> return NeuralDependencyParser(nlp.vocab, **cfg)
<del>
<del> @classmethod
<del> def create_entity(cls, nlp=None, **cfg):
<del> if nlp is None:
<del> return NeuralEntityRecognizer(cls.create_vocab(nlp), **cfg)
<del> else:
<del> return NeuralEntityRecognizer(nlp.vocab, **cfg)
<del>
<del> @classmethod
<del> def create_pipeline(cls, nlp=None, disable=tuple()):
<del> meta = nlp.meta if nlp is not None else {}
<del> # Resolve strings, like "cnn", "lstm", etc
<del> pipeline = []
<del> for entry in meta.get('pipeline', []):
<del> if entry in disable or getattr(entry, 'name', entry) in disable:
<del> continue
<del> factory = cls.Defaults.factories[entry]
<del> pipeline.append(factory(nlp, **meta.get(entry, {})))
<del> return pipeline
<del>
<del> factories = {
<del> 'make_doc': create_tokenizer,
<del> 'tensorizer': lambda nlp, **cfg: [TokenVectorEncoder(nlp.vocab, **cfg)],
<del> 'tagger': lambda nlp, **cfg: [NeuralTagger(nlp.vocab, **cfg)],
<del> 'parser': lambda nlp, **cfg: [
<del> NeuralDependencyParser(nlp.vocab, **cfg),
<del> nonproj.deprojectivize],
<del> 'ner': lambda nlp, **cfg: [NeuralEntityRecognizer(nlp.vocab, **cfg)],
<del> 'similarity': lambda nlp, **cfg: [SimilarityHook(nlp.vocab, **cfg)],
<del> 'textcat': lambda nlp, **cfg: [TextCategorizer(nlp.vocab, **cfg)],
<del> # Temporary compatibility -- delete after pivot
<del> 'token_vectors': lambda nlp, **cfg: [TokenVectorEncoder(nlp.vocab, **cfg)],
<del> 'tags': lambda nlp, **cfg: [NeuralTagger(nlp.vocab, **cfg)],
<del> 'dependencies': lambda nlp, **cfg: [
<del> NeuralDependencyParser(nlp.vocab, **cfg),
<del> nonproj.deprojectivize,
<del> ],
<del> 'entities': lambda nlp, **cfg: [NeuralEntityRecognizer(nlp.vocab, **cfg)],
<del> }
<del>
<add> pipe_names = ['tensorizer', 'tagger', 'parser', 'ner']
<ide> token_match = TOKEN_MATCH
<ide> prefixes = tuple(TOKENIZER_PREFIXES)
<ide> suffixes = tuple(TOKENIZER_SUFFIXES)
<ide> class Language(object):
<ide> Defaults = BaseDefaults
<ide> lang = None
<ide>
<del> def __init__(self, vocab=True, make_doc=True, pipeline=None,
<del> meta={}, disable=tuple(), **kwargs):
<add> factories = {
<add> 'tokenizer': lambda nlp: nlp.Defaults.create_tokenizer(nlp),
<add> 'tensorizer': lambda nlp, **cfg: TokenVectorEncoder(nlp.vocab, **cfg),
<add> 'tagger': lambda nlp, **cfg: NeuralTagger(nlp.vocab, **cfg),
<add> 'parser': lambda nlp, **cfg: NeuralDependencyParser(nlp.vocab, **cfg), # nonproj.deprojectivize,
<add> 'ner': lambda nlp, **cfg: NeuralEntityRecognizer(nlp.vocab, **cfg),
<add> 'similarity': lambda nlp, **cfg: SimilarityHook(nlp.vocab, **cfg),
<add> 'textcat': lambda nlp, **cfg: TextCategorizer(nlp.vocab, **cfg)
<add> }
<add>
<add> def __init__(self, vocab=True, make_doc=True, meta={}, **kwargs):
<ide> """Initialise a Language object.
<ide>
<ide> vocab (Vocab): A `Vocab` object. If `True`, a vocab is created via
<ide> def __init__(self, vocab=True, make_doc=True, pipeline=None,
<ide> factory = self.Defaults.create_tokenizer
<ide> make_doc = factory(self, **meta.get('tokenizer', {}))
<ide> self.tokenizer = make_doc
<del> if pipeline is True:
<del> self.pipeline = self.Defaults.create_pipeline(self, disable)
<del> elif pipeline:
<del> # Careful not to do getattr(p, 'name', None) here
<del> # If we had disable=[None], we'd disable everything!
<del> self.pipeline = [p for p in pipeline
<del> if p not in disable
<del> and getattr(p, 'name', p) not in disable]
<del> # Resolve strings, like "cnn", "lstm", etc
<del> for i, entry in enumerate(self.pipeline):
<del> if entry in self.Defaults.factories:
<del> factory = self.Defaults.factories[entry]
<del> self.pipeline[i] = factory(self, **meta.get(entry, {}))
<del> else:
<del> self.pipeline = []
<del> flat_list = []
<del> for pipe in self.pipeline:
<del> if isinstance(pipe, list):
<del> flat_list.extend(pipe)
<del> else:
<del> flat_list.append(pipe)
<del> self.pipeline = flat_list
<add> self.pipeline = []
<ide> self._optimizer = None
<ide>
<ide> @property
<ide> def meta(self):
<ide> self._meta.setdefault('email', '')
<ide> self._meta.setdefault('url', '')
<ide> self._meta.setdefault('license', '')
<del> pipeline = []
<del> for component in self.pipeline:
<del> if hasattr(component, 'name'):
<del> pipeline.append(component.name)
<del> self._meta['pipeline'] = pipeline
<add> self._meta['pipeline'] = self.pipe_names
<ide> return self._meta
<ide>
<ide> @meta.setter
<ide> def meta(self, value):
<ide> # Conveniences to access pipeline components
<ide> @property
<ide> def tensorizer(self):
<del> return self.get_component('tensorizer')
<add> return self.get_pipe('tensorizer')
<ide>
<ide> @property
<ide> def tagger(self):
<del> return self.get_component('tagger')
<add> return self.get_pipe('tagger')
<ide>
<ide> @property
<ide> def parser(self):
<del> return self.get_component('parser')
<add> return self.get_pipe('parser')
<ide>
<ide> @property
<ide> def entity(self):
<del> return self.get_component('ner')
<add> return self.get_pipe('ner')
<ide>
<ide> @property
<ide> def matcher(self):
<del> return self.get_component('matcher')
<add> return self.get_pipe('matcher')
<add>
<add> @property
<add> def pipe_names(self):
<add> """Get names of available pipeline components.
<add>
<add> RETURNS (list): List of component name strings, in order.
<add> """
<add> return [pipe_name for pipe_name, _ in self.pipeline]
<add>
<add> def get_pipe(self, name):
<add> """Get a pipeline component for a given component name.
<add>
<add> name (unicode): Name of pipeline component to get.
<add> RETURNS (callable): The pipeline component.
<add> """
<add> for pipe_name, component in self.pipeline:
<add> if pipe_name == name:
<add> return component
<add> msg = "No component '{}' found in pipeline. Available names: {}"
<add> raise KeyError(msg.format(name, self.pipe_names))
<add>
<add> def create_pipe(self, name, config=dict()):
<add> """Create a pipeline component from a factory.
<add>
<add> name (unicode): Factory name to look up in `Language.factories`.
<add> RETURNS (callable): Pipeline component.
<add> """
<add> if name not in self.factories:
<add> raise KeyError("Can't find factory for '{}'.".format(name))
<add> factory = self.factories[name]
<add> return factory(self, **config)
<add>
<add> def add_pipe(self, component, name=None, before=None, after=None,
<add> first=None, last=None):
<add> """Add a component to the processing pipeline. Valid components are
<add> callables that take a `Doc` object, modify it and return it. Only one of
<add> before, after, first or last can be set. Default behaviour is "last".
<add>
<add> component (callable): The pipeline component.
<add> name (unicode): Name of pipeline component. Overwrites existing
<add> component.name attribute if available. If no name is set and
<add> the component exposes no name attribute, component.__name__ is
<add> used. An error is raised if the name already exists in the pipeline.
<add> before (unicode): Component name to insert component directly before.
<add> after (unicode): Component name to insert component directly after.
<add> first (bool): Insert component first / not first in the pipeline.
<add> last (bool): Insert component last / not last in the pipeline.
<add>
<add> EXAMPLE:
<add> >>> nlp.add_pipe(component, before='ner')
<add> >>> nlp.add_pipe(component, name='custom_name', last=True)
<add> """
<add> if name is None:
<add> name = getattr(component, 'name', component.__name__)
<add> if name in self.pipe_names:
<add> raise ValueError("'{}' already exists in pipeline.".format(name))
<add> if sum([bool(before), bool(after), bool(first), bool(last)]) >= 2:
<add> msg = ("Invalid constraints. You can only set one of the "
<add> "following: before, after, first, last.")
<add> raise ValueError(msg)
<add> pipe = (name, component)
<add> if last or not any([first, before, after]):
<add> self.pipeline.append(pipe)
<add> elif first:
<add> self.pipeline.insert(0, pipe)
<add> elif before and before in self.pipe_names:
<add> self.pipeline.insert(self.pipe_names.index(before), pipe)
<add> elif after and after in self.pipe_names:
<add> self.pipeline.insert(self.pipe_names.index(after), pipe)
<add> else:
<add> msg = "Can't find '{}' in pipeline. Available names: {}"
<add> unfound = before or after
<add> raise ValueError(msg.format(unfound, self.pipe_names))
<ide>
<del> def get_component(self, name):
<del> if self.pipeline in (True, None):
<del> return None
<del> for proc in self.pipeline:
<del> if hasattr(proc, 'name') and proc.name.endswith(name):
<del> return proc
<del> return None
<add> def replace_pipe(self, name, component):
<add> """Replace a component in the pipeline.
<add>
<add> name (unicode): Name of the component to replace.
<add> component (callable): Pipeline component.
<add> """
<add> if name not in self.pipe_names:
<add> msg = "Can't find '{}' in pipeline. Available names: {}"
<add> raise ValueError(msg.format(name, self.pipe_names))
<add> self.pipeline[self.pipe_names.index(name)] = (name, component)
<add>
<add> def rename_pipe(self, old_name, new_name):
<add> """Rename a pipeline component.
<add>
<add> old_name (unicode): Name of the component to rename.
<add> new_name (unicode): New name of the component.
<add> """
<add> if old_name not in self.pipe_names:
<add> msg = "Can't find '{}' in pipeline. Available names: {}"
<add> raise ValueError(msg.format(old_name, self.pipe_names))
<add> if new_name in self.pipe_names:
<add> msg = "'{}' already exists in pipeline. Existing names: {}"
<add> raise ValueError(msg.format(new_name, self.pipe_names))
<add> i = self.pipe_names.index(old_name)
<add> self.pipeline[i] = (new_name, self.pipeline[i][1])
<add>
<add> def remove_pipe(self, name):
<add> """Remove a component from the pipeline.
<add>
<add> name (unicode): Name of the component to remove.
<add> RETURNS (tuple): A (name, component) tuple of the removed component.
<add> """
<add> if name not in self.pipe_names:
<add> msg = "Can't find '{}' in pipeline. Available names: {}"
<add> raise ValueError(msg.format(name, self.pipe_names))
<add> return self.pipeline.pop(self.pipe_names.index(name))
<ide>
<ide> def __call__(self, text, disable=[]):
<ide> """'Apply the pipeline to some text. The text can span multiple sentences,
<ide> def __call__(self, text, disable=[]):
<ide> ('An', 'NN')
<ide> """
<ide> doc = self.make_doc(text)
<del> for proc in self.pipeline:
<del> name = getattr(proc, 'name', None)
<add> for name, proc in self.pipeline:
<ide> if name in disable:
<ide> continue
<ide> doc = proc(doc)
<ide> def get_grads(W, dW, key=None):
<ide> grads[key] = (W, dW)
<ide> pipes = list(self.pipeline)
<ide> random.shuffle(pipes)
<del> for proc in pipes:
<add> for name, proc in pipes:
<ide> if not hasattr(proc, 'update'):
<ide> continue
<ide> proc.update(docs, golds, drop=drop, sgd=get_grads, losses=losses)
<ide> def preprocess_gold(self, docs_golds):
<ide> docs_golds (iterable): Tuples of `Doc` and `GoldParse` objects.
<ide> YIELDS (tuple): Tuples of preprocessed `Doc` and `GoldParse` objects.
<ide> """
<del> for proc in self.pipeline:
<add> for name, proc in self.pipeline:
<ide> if hasattr(proc, 'preprocess_gold'):
<ide> docs_golds = proc.preprocess_gold(docs_golds)
<ide> for doc, gold in docs_golds:
<ide> def begin_training(self, get_gold_tuples=None, **cfg):
<ide> else:
<ide> device = None
<ide> link_vectors_to_models(self.vocab)
<del> for proc in self.pipeline:
<add> for name, proc in self.pipeline:
<ide> if hasattr(proc, 'begin_training'):
<ide> context = proc.begin_training(get_gold_tuples(),
<ide> pipeline=self.pipeline)
<ide> def evaluate(self, docs_golds, verbose=False):
<ide> docs, golds = zip(*docs_golds)
<ide> docs = list(docs)
<ide> golds = list(golds)
<del> for pipe in self.pipeline:
<add> for name, pipe in self.pipeline:
<ide> if not hasattr(pipe, 'pipe'):
<ide> for doc in docs:
<ide> pipe(doc)
<ide> def use_params(self, params, **cfg):
<ide> >>> with nlp.use_params(optimizer.averages):
<ide> >>> nlp.to_disk('/tmp/checkpoint')
<ide> """
<del> contexts = [pipe.use_params(params) for pipe
<add> contexts = [pipe.use_params(params) for name, pipe
<ide> in self.pipeline if hasattr(pipe, 'use_params')]
<ide> # TODO: Having trouble with contextlib
<ide> # Workaround: these aren't actually context managers atm.
<ide> def pipe(self, texts, as_tuples=False, n_threads=2, batch_size=1000,
<ide> yield (doc, context)
<ide> return
<ide> docs = (self.make_doc(text) for text in texts)
<del> for proc in self.pipeline:
<del> name = getattr(proc, 'name', None)
<add> for name, proc in self.pipeline:
<ide> if name in disable:
<ide> continue
<ide> if hasattr(proc, 'pipe'):
<ide> def to_disk(self, path, disable=tuple()):
<ide> ('tokenizer', lambda p: self.tokenizer.to_disk(p, vocab=False)),
<ide> ('meta.json', lambda p: p.open('w').write(json_dumps(self.meta)))
<ide> ))
<del> for proc in self.pipeline:
<add> for name, proc in self.pipeline:
<ide> if not hasattr(proc, 'name'):
<ide> continue
<del> if proc.name in disable:
<add> if name in disable:
<ide> continue
<ide> if not hasattr(proc, 'to_disk'):
<ide> continue
<del> serializers[proc.name] = lambda p, proc=proc: proc.to_disk(p, vocab=False)
<add> serializers[name] = lambda p, proc=proc: proc.to_disk(p, vocab=False)
<ide> serializers['vocab'] = lambda p: self.vocab.to_disk(p)
<ide> util.to_disk(path, serializers, {p: False for p in disable})
<ide>
<ide> def from_disk(self, path, disable=tuple()):
<ide> ('tokenizer', lambda p: self.tokenizer.from_disk(p, vocab=False)),
<ide> ('meta.json', lambda p: p.open('w').write(json_dumps(self.meta)))
<ide> ))
<del> for proc in self.pipeline:
<del> if not hasattr(proc, 'name'):
<del> continue
<del> if proc.name in disable:
<add> for name, proc in self.pipeline:
<add> if name in disable:
<ide> continue
<ide> if not hasattr(proc, 'to_disk'):
<ide> continue
<del> deserializers[proc.name] = lambda p, proc=proc: proc.from_disk(p, vocab=False)
<add> deserializers[name] = lambda p, proc=proc: proc.from_disk(p, vocab=False)
<ide> exclude = {p: False for p in disable}
<ide> if not (path / 'vocab').exists():
<ide> exclude['vocab'] = True
<ide> def to_bytes(self, disable=[]):
<ide> ('tokenizer', lambda: self.tokenizer.to_bytes(vocab=False)),
<ide> ('meta', lambda: ujson.dumps(self.meta))
<ide> ))
<del> for i, proc in enumerate(self.pipeline):
<del> if getattr(proc, 'name', None) in disable:
<add> for i, (name, proc) in enumerate(self.pipeline):
<add> if name in disable:
<ide> continue
<ide> if not hasattr(proc, 'to_bytes'):
<ide> continue
<ide> def from_bytes(self, bytes_data, disable=[]):
<ide> ('tokenizer', lambda b: self.tokenizer.from_bytes(b, vocab=False)),
<ide> ('meta', lambda b: self.meta.update(ujson.loads(b)))
<ide> ))
<del> for i, proc in enumerate(self.pipeline):
<del> if getattr(proc, 'name', None) in disable:
<add> for i, (name, proc) in enumerate(self.pipeline):
<add> if name in disable:
<ide> continue
<ide> if not hasattr(proc, 'from_bytes'):
<ide> continue
<ide><path>spacy/util.py
<ide> def load_model_from_path(model_path, meta=False, **overrides):
<ide> if not meta:
<ide> meta = get_model_meta(model_path)
<ide> cls = get_lang_class(meta['lang'])
<del> nlp = cls(pipeline=meta.get('pipeline', True), meta=meta, **overrides)
<add> nlp = cls(meta=meta, **overrides)
<add> for name in meta.get('pipeline', []):
<add> config = meta.get('pipeline_args', {}).get(name, {})
<add> component = nlp.create_pipe(name, config=config)
<add> nlp.add_pipe(component, name=name)
<ide> return nlp.from_disk(model_path)
<ide>
<ide> | 2 |
PHP | PHP | apply fixes from styleci | d1c3faa3e70a5862287a10846dee5aedc13130f8 | <ide><path>src/Illuminate/Foundation/Console/DocsCommand.php
<ide> class DocsCommand extends Command
<ide> protected $help = 'If you would like to perform a content search against the documention, you may call: <fg=green>php artisan docs -- </><fg=green;options=bold;>search query here</>';
<ide>
<ide> /**
<del> * The HTTP client instance
<add> * The HTTP client instance.
<ide> *
<ide> * @var \Illuminate\Http\Client\Factory
<ide> */ | 1 |
Java | Java | fix checkstyle violations | 282a4ad2f6c5fa5ebd3ca0c4a9fda8be3611e02e | <ide><path>spring-core/src/main/java/org/springframework/util/unit/DataSize.java
<ide> * @author Stephane Nicoll
<ide> * @since 5.1
<ide> */
<del>public class DataSize implements Comparable<DataSize> {
<add>public final class DataSize implements Comparable<DataSize> {
<ide>
<ide> /**
<ide> * The pattern for parsing.
<ide><path>spring-core/src/main/java/org/springframework/util/unit/package-info.java
<del>/*
<del> * Copyright 2002-2018 the original author or authors.
<del> *
<del> * Licensed under the Apache License, Version 2.0 (the "License");
<del> * you may not use this file except in compliance with the License.
<del> * You may obtain a copy of the License at
<del> *
<del> * http://www.apache.org/licenses/LICENSE-2.0
<del> *
<del> * Unless required by applicable law or agreed to in writing, software
<del> * distributed under the License is distributed on an "AS IS" BASIS,
<del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<del> * See the License for the specific language governing permissions and
<del> * limitations under the License.
<del> */
<del>
<ide> /**
<ide> * Useful unit data types.
<ide> */
<ide> package org.springframework.util.unit;
<ide>
<ide> import org.springframework.lang.NonNullApi;
<del>import org.springframework.lang.NonNullFields;
<ide>\ No newline at end of file
<add>import org.springframework.lang.NonNullFields; | 2 |
PHP | PHP | remove time echo from index.php. whoops! | b796ef1bd7ac0448edb831cd020a85d193fc150f | <ide><path>public/index.php
<ide> * @link http://laravel.com
<ide> */
<ide>
<del>$time = microtime(true);
<del>
<ide> // --------------------------------------------------------------
<ide> // Define the framework paths.
<ide> // --------------------------------------------------------------
<ide> // --------------------------------------------------------------
<ide> // Send the response to the browser.
<ide> // --------------------------------------------------------------
<del>$response->send();
<del>
<del>echo (microtime(true) - $time) * 1000;
<ide>\ No newline at end of file
<add>$response->send();
<ide>\ No newline at end of file | 1 |
PHP | PHP | add version shell and --version | 5ce3e769c15e7257f7e77cd5be30d151710424db | <ide><path>src/Console/CommandRunner.php
<ide> use Cake\Console\Exception\StopException;
<ide> use Cake\Console\Shell;
<ide> use Cake\Http\BaseApplication;
<add>use Cake\Shell\VersionShell;
<ide> use RuntimeException;
<ide>
<ide> /**
<ide> class CommandRunner
<ide> */
<ide> protected $root;
<ide>
<add> /**
<add> * Alias mappings.
<add> *
<add> * @var array
<add> */
<add> protected $aliases = [];
<add>
<ide> /**
<ide> * Constructor
<ide> *
<ide> public function __construct(BaseApplication $app, $root = 'cake')
<ide> {
<ide> $this->app = $app;
<ide> $this->root = $root;
<add> $this->aliases = [
<add> '--version' => 'version'
<add> ];
<add> }
<add>
<add> /**
<add> * Replace the alias map for a runner.
<add> *
<add> * Aliases allow you to define alternate names for commands
<add> * in the collection. This can be useful to add top level switches
<add> * like `--version` or `-h`
<add> *
<add> * @param array $aliases The map of aliases to replace.
<add> * @return $this
<add> */
<add> public function setAliases(array $aliases)
<add> {
<add> $this->aliases = $aliases;
<add>
<add> return $this;
<ide> }
<ide>
<ide> /**
<ide> public function run(array $argv, ConsoleIo $io = null)
<ide> {
<ide> $this->app->bootstrap();
<ide>
<del> $commands = $this->app->console(new CommandCollection());
<add> $commands = new CommandCollection([
<add> 'version' => VersionShell::class,
<add> ]);
<add> $commands = $this->app->console($commands);
<ide> if (!($commands instanceof CommandCollection)) {
<ide> $type = is_object($commands) ? get_class($commands) : gettype($commands);
<ide> throw new RuntimeException(
<ide> public function run(array $argv, ConsoleIo $io = null)
<ide> */
<ide> protected function getShell(ConsoleIo $io, CommandCollection $commands, $name)
<ide> {
<add> if (isset($this->aliases[$name])) {
<add> $name = $this->aliases[$name];
<add> }
<ide> if (!$commands->has($name)) {
<ide> throw new RuntimeException(
<ide> "Unknown command `{$this->root} {$name}`." .
<ide><path>src/Shell/VersionShell.php
<add><?php
<add>/**
<add> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org)
<add> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org)
<add> *
<add> * Licensed under The MIT License
<add> * For full copyright and license information, please see the LICENSE.txt
<add> * Redistributions of files must retain the above copyright notice.
<add> *
<add> * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org)
<add> * @link https://cakephp.org CakePHP(tm) Project
<add> * @since 3.5.0
<add> * @license https://opensource.org/licenses/mit-license.php MIT License
<add> */
<add>
<add>namespace Cake\Shell;
<add>
<add>use Cake\Console\Shell;
<add>use Cake\Core\Configure;
<add>
<add>/**
<add> * Print out the version of CakePHP in use.
<add> */
<add>class VersionShell extends Shell
<add>{
<add> /**
<add> * Print out the version of CakePHP in use.
<add> *
<add> * @return void
<add> */
<add> public function main()
<add> {
<add> $this->out(Configure::version());
<add> }
<add>}
<ide><path>tests/TestCase/Console/CommandRunnerTest.php
<ide> public function testRunHelpShortOption()
<ide> *
<ide> * @return void
<ide> */
<del> public function testRunVersionLongOption()
<add> public function testRunVersionAlias()
<ide> {
<del> $this->markTestIncomplete();
<del> }
<add> $app = $this->getMockBuilder(BaseApplication::class)
<add> ->setMethods(['middleware', 'bootstrap'])
<add> ->setConstructorArgs([$this->config])
<add> ->getMock();
<ide>
<del> /**
<del> * Test using `cake -v` invokes the version command
<del> *
<del> * @return void
<del> */
<del> public function testRunVersionShortOption()
<del> {
<del> $this->markTestIncomplete();
<add> $output = new ConsoleOutput();
<add> $runner = new CommandRunner($app, 'cake');
<add> $result = $runner->run(['cake', '--version'], $this->getMockIo($output));
<add> $this->assertContains(Configure::version(), $output->messages()[0]);
<ide> }
<ide>
<ide> /** | 3 |
Ruby | Ruby | dup the env hash on header#env | c0c726849bf697f6c76b86e704ca51b04e252341 | <ide><path>actionpack/lib/action_dispatch/http/headers.rb
<ide> def merge!(headers_or_env)
<ide> end
<ide> end
<ide>
<del> def env; @req.env; end
<add> def env; @req.env.dup; end
<ide>
<ide> private
<ide> | 1 |
Javascript | Javascript | add spec for devloadingview | 35cf427365b8f58f7fec65a18c7cf9727ff54876 | <ide><path>Libraries/Utilities/HMRLoadingView.ios.js
<ide> 'use strict';
<ide>
<ide> const processColor = require('../StyleSheet/processColor');
<del>const {DevLoadingView} = require('../BatchedBridge/NativeModules');
<add>import NativeDevLoadingView from './NativeDevLoadingView';
<ide>
<ide> class HMRLoadingView {
<ide> static showMessage(message: string) {
<del> DevLoadingView.showMessage(
<del> message,
<del> processColor('#000000'),
<del> processColor('#aaaaaa'),
<del> );
<add> if (NativeDevLoadingView != null) {
<add> NativeDevLoadingView.showMessage(
<add> message,
<add> processColor('#000000'),
<add> processColor('#aaaaaa'),
<add> );
<add> }
<ide> }
<ide>
<ide> static hide() {
<del> DevLoadingView.hide();
<add> if (NativeDevLoadingView != null) {
<add> NativeDevLoadingView.hide();
<add> }
<ide> }
<ide> }
<ide>
<ide><path>Libraries/Utilities/NativeDevLoadingView.js
<add>/**
<add> * Copyright (c) Facebook, Inc. and its affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @flow
<add> * @format
<add> */
<add>
<add>'use strict';
<add>
<add>import type {TurboModule} from 'RCTExport';
<add>import * as TurboModuleRegistry from 'TurboModuleRegistry';
<add>
<add>export interface Spec extends TurboModule {
<add> +showMessage: (
<add> message: string,
<add> color: Object,
<add> backgroundColor: Object,
<add> ) => void;
<add> +hide: () => void;
<add>}
<add>
<add>export default TurboModuleRegistry.get<Spec>('DevLoadingView'); | 2 |
PHP | PHP | fix typo in class alias | 5e6983734a1232767110ad1b46a1c92532a3d4da | <ide><path>config/bootstrap.php
<ide> class_alias('Cake\Network\Exception\SocketException', 'Cake\Error\SocketExceptio
<ide> class_alias('Cake\Network\Exception\UnauthorizedException', 'Cake\Error\UnauthorizedException');
<ide> class_alias('Cake\Filesystem\File', 'Cake\Utility\File');
<ide> class_alias('Cake\Filesystem\Folder', 'Cake\Utility\Folder');
<del>class_alias('Cake\I18n\Time', 'Cake\Utiliy\Time');
<add>class_alias('Cake\I18n\Time', 'Cake\Utility\Time');
<ide>
<ide> require CAKE . 'basics.php'; | 1 |
Python | Python | remove duplicate key from python dictionary | 736a62f824d9062b52983633528e58c445d8cc56 | <ide><path>airflow/providers/apache/hive/hooks/hive.py
<ide> def max_partition(
<ide> :type filter_map: map
<ide>
<ide> >>> hh = HiveMetastoreHook()
<del> >>> filter_map = {'ds': '2015-01-01', 'ds': '2014-01-01'}
<add> >>> filter_map = {'ds': '2015-01-01'}
<ide> >>> t = 'static_babynames_partitioned'
<ide> >>> hh.max_partition(schema='airflow',\
<ide> ... table_name=t, field='ds', filter_map=filter_map) | 1 |
Ruby | Ruby | use the new resolver api in dbconsole | f6e363309a6cffacd6087f3a901c562265c3b2ed | <ide><path>railties/lib/rails/commands/dbconsole.rb
<ide> def config
<ide> @config ||= begin
<ide> require APP_PATH
<ide> ActiveRecord::ConnectionAdapters::ConnectionSpecification::Resolver.new(
<del> ENV['DATABASE_URL'],
<del> (Rails.application.config.database_configuration || {})
<del> ).spec.config.stringify_keys
<add> Rails.application.config.database_configuration || {}
<add> ).resolve(ENV["DATABASE_URL"]).config.stringify_keys
<ide> end
<ide> end
<ide> | 1 |
Mixed | Text | inspect show right logpath in json-file driver | acf025ad1b806fd9b5eb3358a8e1d75c6aae890d | <ide><path>daemon/container.go
<ide> func (container *Container) startLogging() error {
<ide> if err != nil {
<ide> return err
<ide> }
<add> container.LogPath = pth
<ide>
<ide> dl, err := jsonfilelog.New(pth)
<ide> if err != nil {
<ide><path>docs/sources/reference/api/docker_remote_api_v1.18.md
<ide> Return low-level information on the container `id`
<ide> "MaximumRetryCount": 2,
<ide> "Name": "on-failure"
<ide> },
<del> "LogConfig": { "Type": "json-file", Config: {} },
<add> "LogConfig": {
<add> "Config": null,
<add> "Type": "json-file"
<add> },
<ide> "SecurityOpt": null,
<ide> "VolumesFrom": null,
<ide> "Ulimits": [{}]
<ide><path>docs/sources/reference/api/docker_remote_api_v1.19.md
<ide> Return low-level information on the container `id`
<ide> "MaximumRetryCount": 2,
<ide> "Name": "on-failure"
<ide> },
<del> "LogConfig": { "Type": "json-file", "Config": {} },
<add> "LogConfig": {
<add> "Config": null,
<add> "Type": "json-file"
<add> },
<ide> "SecurityOpt": null,
<ide> "VolumesFrom": null,
<ide> "Ulimits": [{}] | 3 |
Ruby | Ruby | add audit for scons | 758a60155538820e11bea5c473c2171cd4df183f | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_patches
<ide> end
<ide>
<ide> def audit_text
<add> if text =~ /system\s+['"]scons/
<add> problem "use \"scons, *args\" instead of \"system 'scons', *args\""
<add> end
<add>
<ide> if text =~ /system\s+['"]xcodebuild/ && text !~ /SYMROOT=/
<ide> problem "xcodebuild should be passed an explicit \"SYMROOT\""
<ide> end | 1 |
Ruby | Ruby | remove the setter from `scope` | 5e81e6ca31aa0d2a38f8db5c8fe9bcbce98d9689 | <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb
<ide> def fetch(key, &block)
<ide> def [](key)
<ide> @hash.fetch(key) { @parent[key] }
<ide> end
<del>
<del> def []=(k,v)
<del> @hash[k] = v
<del> end
<ide> end
<ide>
<ide> def initialize(set) #:nodoc: | 1 |
Javascript | Javascript | fix a typo in current time display component. | 4658c7bad67dc0556b636aed7962d2d85f308fe8 | <ide><path>src/js/control-bar/time-controls/current-time-display.js
<ide> class CurrentTimeDisplay extends TimeDisplay {
<ide> if (!this.player_.duration()) {
<ide> return;
<ide> }
<del> this.updateFormattedTime_(this.player.duration());
<add> this.updateFormattedTime_(this.player_.duration());
<ide> }
<ide>
<ide> } | 1 |
Ruby | Ruby | fix typo in cleaner.rb | 4f8489faa109d03fbf4b4529e295bd9e7fd48920 | <ide><path>Library/Homebrew/cleaner.rb
<ide> def observe_file_removal(path)
<ide> end
<ide>
<ide> # Removes any empty directories in the formula's prefix subtree
<del> # Keeps any empty directions projected by skip_clean
<add> # Keeps any empty directions protected by skip_clean
<ide> # Removes any unresolved symlinks
<ide> def prune
<ide> dirs = [] | 1 |
Text | Text | fix dead link cli.md | b1dc0db56ac9a27d51ab2a3684ed398d7b80a80c | <ide><path>docs/sources/reference/commandline/cli.md
<ide> To run the daemon with debug output, use `docker -d -D`.
<ide>
<ide> ### Daemon socket option
<ide>
<del>The Docker daemon can listen for [Docker Remote API](reference/api/docker_remote_api/)
<add>The Docker daemon can listen for [Docker Remote API](/reference/api/docker_remote_api/)
<ide> requests via three different types of Socket: `unix`, `tcp`, and `fd`.
<ide>
<ide> By default, a `unix` domain socket (or IPC socket) is created at `/var/run/docker.sock`, | 1 |
Ruby | Ruby | freeze the middleware stack after it's built | 42f6e9fb38101c0fe199d5425bd71965f9a6dfc9 | <ide><path>actionpack/lib/action_dispatch/middleware/stack.rb
<ide> def use(*args, &block)
<ide> def build(app = nil, &block)
<ide> app ||= block
<ide> raise "MiddlewareStack#build requires an app" unless app
<del> middlewares.reverse.inject(app) { |a, e| e.build(a) }
<add> middlewares.freeze.reverse.inject(app) { |a, e| e.build(a) }
<ide> end
<ide>
<ide> protected
<ide><path>railties/test/application/middleware/exceptions_test.rb
<ide> def teardown
<ide> end
<ide>
<ide> test "show exceptions middleware filter backtrace before logging" do
<del> my_middleware = Struct.new(:app) do
<del> def call(env)
<del> raise "Failure"
<add> controller :foo, <<-RUBY
<add> class FooController < ActionController::Base
<add> def index
<add> raise 'oops'
<add> end
<ide> end
<del> end
<del>
<del> app.config.middleware.use my_middleware
<add> RUBY
<ide>
<del> stringio = StringIO.new
<del> Rails.logger = Logger.new(stringio)
<add> get "/foo"
<add> assert_equal 500, last_response.status
<ide>
<del> get "/"
<del> assert_no_match(/action_dispatch/, stringio.string)
<add> log = File.read(Rails.application.config.paths["log"].first)
<add> assert_no_match(/action_dispatch/, log, log)
<add> assert_match(/oops/, log, log)
<ide> end
<ide>
<ide> test "renders active record exceptions as 404" do
<del> my_middleware = Struct.new(:app) do
<del> def call(env)
<del> raise ActiveRecord::RecordNotFound
<add> controller :foo, <<-RUBY
<add> class FooController < ActionController::Base
<add> def index
<add> raise ActiveRecord::RecordNotFound
<add> end
<ide> end
<del> end
<del>
<del> app.config.middleware.use my_middleware
<add> RUBY
<ide>
<del> get "/"
<add> get "/foo"
<ide> assert_equal 404, last_response.status
<ide> end
<ide>
<ide><path>railties/test/application/middleware_test.rb
<ide> def app
<ide> assert_equal "Rack::Config", middleware.first
<ide> end
<ide>
<add> test "can't change middleware after it's built" do
<add> boot!
<add> assert_raise RuntimeError do
<add> app.config.middleware.use Rack::Config
<add> end
<add> end
<add>
<ide> # ConditionalGet + Etag
<ide> test "conditional get + etag middlewares handle http caching based on body" do
<ide> make_basic_app | 3 |
Go | Go | fix some tests | a08ffa0e934e5f8b95855cc5bd0cc2a6b3cf4091 | <ide><path>integration-cli/docker_cli_daemon_plugins_test.go
<ide> import (
<ide> "github.com/go-check/check"
<ide> )
<ide>
<del>var pluginName = "tiborvass/no-remove"
<del>
<ide> // TestDaemonRestartWithPluginEnabled tests state restore for an enabled plugin
<ide> func (s *DockerDaemonSuite) TestDaemonRestartWithPluginEnabled(c *check.C) {
<ide> testRequires(c, Network)
<ide> func (s *DockerDaemonSuite) TestDaemonRestartWithPluginEnabled(c *check.C) {
<ide> c.Fatalf("Could not start daemon: %v", err)
<ide> }
<ide>
<del> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
<ide> c.Fatalf("Could not install plugin: %v %s", err, out)
<ide> }
<ide>
<ide> defer func() {
<del> if out, err := s.d.Cmd("plugin", "disable", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
<ide> c.Fatalf("Could not disable plugin: %v %s", err, out)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "remove", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
<ide> c.Fatalf("Could not remove plugin: %v %s", err, out)
<ide> }
<ide> }()
<ide> func (s *DockerDaemonSuite) TestDaemonRestartWithPluginEnabled(c *check.C) {
<ide> if err != nil {
<ide> c.Fatalf("Could not list plugins: %v %s", err, out)
<ide> }
<del> c.Assert(out, checker.Contains, pluginName)
<add> c.Assert(out, checker.Contains, pName)
<ide> c.Assert(out, checker.Contains, "true")
<ide> }
<ide>
<ide> func (s *DockerDaemonSuite) TestDaemonRestartWithPluginDisabled(c *check.C) {
<ide> c.Fatalf("Could not start daemon: %v", err)
<ide> }
<ide>
<del> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pluginName, "--disable"); err != nil {
<add> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName, "--disable"); err != nil {
<ide> c.Fatalf("Could not install plugin: %v %s", err, out)
<ide> }
<ide>
<ide> defer func() {
<del> if out, err := s.d.Cmd("plugin", "remove", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
<ide> c.Fatalf("Could not remove plugin: %v %s", err, out)
<ide> }
<ide> }()
<ide> func (s *DockerDaemonSuite) TestDaemonRestartWithPluginDisabled(c *check.C) {
<ide> if err != nil {
<ide> c.Fatalf("Could not list plugins: %v %s", err, out)
<ide> }
<del> c.Assert(out, checker.Contains, pluginName)
<add> c.Assert(out, checker.Contains, pName)
<ide> c.Assert(out, checker.Contains, "false")
<ide> }
<ide>
<ide> func (s *DockerDaemonSuite) TestDaemonKillLiveRestoreWithPlugins(c *check.C) {
<ide> if err := s.d.Start("--live-restore"); err != nil {
<ide> c.Fatalf("Could not start daemon: %v", err)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
<ide> c.Fatalf("Could not install plugin: %v %s", err, out)
<ide> }
<ide> defer func() {
<ide> if err := s.d.Restart("--live-restore"); err != nil {
<ide> c.Fatalf("Could not restart daemon: %v", err)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "disable", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
<ide> c.Fatalf("Could not disable plugin: %v %s", err, out)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "remove", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
<ide> c.Fatalf("Could not remove plugin: %v %s", err, out)
<ide> }
<ide> }()
<ide> func (s *DockerDaemonSuite) TestDaemonKillLiveRestoreWithPlugins(c *check.C) {
<ide> c.Fatalf("Could not kill daemon: %v", err)
<ide> }
<ide>
<del> cmd := exec.Command("pgrep", "-f", "plugin-no-remove")
<add> cmd := exec.Command("pgrep", "-f", pluginProcessName)
<ide> if out, ec, err := runCommandWithOutput(cmd); ec != 0 {
<ide> c.Fatalf("Expected exit code '0', got %d err: %v output: %s ", ec, err, out)
<ide> }
<ide> func (s *DockerDaemonSuite) TestDaemonShutdownLiveRestoreWithPlugins(c *check.C)
<ide> if err := s.d.Start("--live-restore"); err != nil {
<ide> c.Fatalf("Could not start daemon: %v", err)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
<ide> c.Fatalf("Could not install plugin: %v %s", err, out)
<ide> }
<ide> defer func() {
<ide> if err := s.d.Restart("--live-restore"); err != nil {
<ide> c.Fatalf("Could not restart daemon: %v", err)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "disable", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
<ide> c.Fatalf("Could not disable plugin: %v %s", err, out)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "remove", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
<ide> c.Fatalf("Could not remove plugin: %v %s", err, out)
<ide> }
<ide> }()
<ide> func (s *DockerDaemonSuite) TestDaemonShutdownLiveRestoreWithPlugins(c *check.C)
<ide> c.Fatalf("Could not kill daemon: %v", err)
<ide> }
<ide>
<del> cmd := exec.Command("pgrep", "-f", "plugin-no-remove")
<add> cmd := exec.Command("pgrep", "-f", pluginProcessName)
<ide> if out, ec, err := runCommandWithOutput(cmd); ec != 0 {
<ide> c.Fatalf("Expected exit code '0', got %d err: %v output: %s ", ec, err, out)
<ide> }
<ide> func (s *DockerDaemonSuite) TestDaemonShutdownWithPlugins(c *check.C) {
<ide> if err := s.d.Start(); err != nil {
<ide> c.Fatalf("Could not start daemon: %v", err)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
<ide> c.Fatalf("Could not install plugin: %v %s", err, out)
<ide> }
<ide>
<ide> defer func() {
<ide> if err := s.d.Restart(); err != nil {
<ide> c.Fatalf("Could not restart daemon: %v", err)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "disable", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
<ide> c.Fatalf("Could not disable plugin: %v %s", err, out)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "remove", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
<ide> c.Fatalf("Could not remove plugin: %v %s", err, out)
<ide> }
<ide> }()
<ide> func (s *DockerDaemonSuite) TestDaemonShutdownWithPlugins(c *check.C) {
<ide> }
<ide> }
<ide>
<del> cmd := exec.Command("pgrep", "-f", "plugin-no-remove")
<add> cmd := exec.Command("pgrep", "-f", pluginProcessName)
<ide> if out, ec, err := runCommandWithOutput(cmd); ec != 1 {
<ide> c.Fatalf("Expected exit code '1', got %d err: %v output: %s ", ec, err, out)
<ide> }
<ide> func (s *DockerDaemonSuite) TestVolumePlugin(c *check.C) {
<ide> if err := s.d.Start(); err != nil {
<ide> c.Fatalf("Could not start daemon: %v", err)
<ide> }
<del> out, err := s.d.Cmd("plugin", "install", pluginName, "--grant-all-permissions")
<add> out, err := s.d.Cmd("plugin", "install", pName, "--grant-all-permissions")
<ide> if err != nil {
<ide> c.Fatalf("Could not install plugin: %v %s", err, out)
<ide> }
<ide> defer func() {
<del> if out, err := s.d.Cmd("plugin", "disable", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
<ide> c.Fatalf("Could not disable plugin: %v %s", err, out)
<ide> }
<del> if out, err := s.d.Cmd("plugin", "remove", pluginName); err != nil {
<add> if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
<ide> c.Fatalf("Could not remove plugin: %v %s", err, out)
<ide> }
<ide> }()
<ide>
<del> out, err = s.d.Cmd("volume", "create", "-d", pluginName, volName)
<add> out, err = s.d.Cmd("volume", "create", "-d", pName, volName)
<ide> if err != nil {
<ide> c.Fatalf("Could not create volume: %v %s", err, out)
<ide> }
<ide> func (s *DockerDaemonSuite) TestVolumePlugin(c *check.C) {
<ide> c.Fatalf("Could not list volume: %v %s", err, out)
<ide> }
<ide> c.Assert(out, checker.Contains, volName)
<del> c.Assert(out, checker.Contains, pluginName)
<add> c.Assert(out, checker.Contains, pName)
<ide>
<ide> mountPoint, err := s.d.Cmd("volume", "inspect", volName, "--format", "{{.Mountpoint}}")
<ide> if err != nil {
<ide><path>integration-cli/docker_cli_events_test.go
<ide> func (s *DockerSuite) TestEventsImageLoad(c *check.C) {
<ide> func (s *DockerSuite) TestEventsPluginOps(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide>
<del> pluginName := "tiborvass/no-remove:latest"
<ide> since := daemonUnixTime(c)
<ide>
<del> dockerCmd(c, "plugin", "install", pluginName, "--grant-all-permissions")
<del> dockerCmd(c, "plugin", "disable", pluginName)
<del> dockerCmd(c, "plugin", "remove", pluginName)
<add> dockerCmd(c, "plugin", "install", pNameWithTag, "--grant-all-permissions")
<add> dockerCmd(c, "plugin", "disable", pNameWithTag)
<add> dockerCmd(c, "plugin", "remove", pNameWithTag)
<ide>
<ide> out, _ := dockerCmd(c, "events", "--since", since, "--until", daemonUnixTime(c))
<ide> events := strings.Split(out, "\n")
<ide> func (s *DockerSuite) TestEventsPluginOps(c *check.C) {
<ide> nEvents := len(events)
<ide> c.Assert(nEvents, checker.GreaterOrEqualThan, 4)
<ide>
<del> pluginEvents := eventActionsByIDAndType(c, events, pluginName, "plugin")
<add> pluginEvents := eventActionsByIDAndType(c, events, pNameWithTag, "plugin")
<ide> c.Assert(pluginEvents, checker.HasLen, 4, check.Commentf("events: %v", events))
<ide>
<ide> c.Assert(pluginEvents[0], checker.Equals, "pull", check.Commentf(out))
<ide><path>integration-cli/docker_cli_network_unix_test.go
<ide> func (s *DockerNetworkSuite) TestDockerPluginV2NetworkDriver(c *check.C) {
<ide> testRequires(c, DaemonIsLinux, Network, IsAmd64)
<ide>
<ide> var (
<del> npName = "mavenugo/test-docker-netplugin"
<add> npName = "tiborvass/test-docker-netplugin"
<ide> npTag = "latest"
<ide> npNameWithTag = npName + ":" + npTag
<ide> )
<ide><path>integration-cli/docker_cli_plugins_test.go
<ide> import (
<ide> )
<ide>
<ide> var (
<del> pName = "tiborvass/no-remove"
<del> pTag = "latest"
<del> pNameWithTag = pName + ":" + pTag
<add> pluginProcessName = "no-remove"
<add> pName = "tiborvass/no-remove"
<add> pTag = "latest"
<add> pNameWithTag = pName + ":" + pTag
<ide> )
<ide>
<ide> func (s *DockerSuite) TestPluginBasicOps(c *check.C) { | 4 |
Python | Python | support all unix wildcards in s3keysensor | 12133861ecefd28f1d569cf2d190c2f26f6fd2fb | <ide><path>airflow/providers/amazon/aws/hooks/s3.py
<ide> def get_wildcard_key(
<ide> :return: the key object from the bucket or None if none has been found.
<ide> :rtype: boto3.s3.Object
<ide> """
<del> prefix = re.split(r'[*]', wildcard_key, 1)[0]
<add> prefix = re.split(r'[\[\*\?]', wildcard_key, 1)[0]
<ide> key_list = self.list_keys(bucket_name, prefix=prefix, delimiter=delimiter)
<ide> key_matches = [k for k in key_list if fnmatch.fnmatch(k, wildcard_key)]
<ide> if key_matches:
<ide><path>airflow/providers/amazon/aws/sensors/s3_key.py
<ide> def get_files(self, s3_hook: S3Hook, delimiter: Optional[str] = '/') -> List:
<ide> 'MaxItems': None,
<ide> }
<ide> if self.wildcard_match:
<del> prefix = re.split(r'[*]', self.bucket_key, 1)[0]
<add> prefix = re.split(r'[\[\*\?]', self.bucket_key, 1)[0]
<ide>
<ide> paginator = s3_hook.get_conn().get_paginator('list_objects_v2')
<ide> response = paginator.paginate(
<ide><path>tests/providers/amazon/aws/hooks/test_s3.py
<ide> def test_check_for_wildcard_key(self, s3_bucket):
<ide> bucket = hook.get_bucket(s3_bucket)
<ide> bucket.put_object(Key='abc', Body=b'a')
<ide> bucket.put_object(Key='a/b', Body=b'a')
<add> bucket.put_object(Key='foo_5.txt', Body=b'a')
<ide>
<ide> assert hook.check_for_wildcard_key('a*', s3_bucket) is True
<ide> assert hook.check_for_wildcard_key('abc', s3_bucket) is True
<ide> def test_check_for_wildcard_key(self, s3_bucket):
<ide> assert hook.check_for_wildcard_key(f's3://{s3_bucket}//a') is False
<ide> assert hook.check_for_wildcard_key(f's3://{s3_bucket}//b') is False
<ide>
<add> assert hook.get_wildcard_key('a?b', s3_bucket).key == 'a/b'
<add> assert hook.get_wildcard_key('a?c', s3_bucket, delimiter='/').key == 'abc'
<add> assert hook.get_wildcard_key('foo_[0-9].txt', s3_bucket, delimiter='/').key == 'foo_5.txt'
<add> assert hook.get_wildcard_key(f's3://{s3_bucket}/foo_[0-9].txt', delimiter='/').key == 'foo_5.txt'
<add>
<ide> def test_get_wildcard_key(self, s3_bucket):
<ide> hook = S3Hook()
<ide> bucket = hook.get_bucket(s3_bucket)
<ide> bucket.put_object(Key='abc', Body=b'a')
<ide> bucket.put_object(Key='a/b', Body=b'a')
<add> bucket.put_object(Key='foo_5.txt', Body=b'a')
<ide>
<ide> # The boto3 Class API is _odd_, and we can't do an isinstance check as
<ide> # each instance is a different class, so lets just check one property
<ide> def test_get_wildcard_key(self, s3_bucket):
<ide> assert hook.get_wildcard_key(f's3://{s3_bucket}/a') is None
<ide> assert hook.get_wildcard_key(f's3://{s3_bucket}/b') is None
<ide>
<add> assert hook.get_wildcard_key('a?b', s3_bucket).key == 'a/b'
<add> assert hook.get_wildcard_key('a?c', s3_bucket, delimiter='/').key == 'abc'
<add> assert hook.get_wildcard_key('foo_[0-9].txt', s3_bucket, delimiter='/').key == 'foo_5.txt'
<add> assert hook.get_wildcard_key(f's3://{s3_bucket}/foo_[0-9].txt', delimiter='/').key == 'foo_5.txt'
<add>
<ide> def test_load_string(self, s3_bucket):
<ide> hook = S3Hook()
<ide> hook.load_string("Contént", "my_key", s3_bucket)
<ide><path>tests/providers/amazon/aws/sensors/test_s3_key.py
<ide> def test_poke(self, paginate_return_value, poke_return_value, mock_hook):
<ide> mock_paginator.paginate.return_value = [paginate_return_value]
<ide> assert op.poke(None) is poke_return_value
<ide> mock_check_for_key.assert_called_once_with(op.bucket_key, op.bucket_name)
<add>
<add> @mock.patch('airflow.providers.amazon.aws.sensors.s3_key.S3KeySizeSensor.get_files', return_value=[])
<add> @mock.patch('airflow.providers.amazon.aws.sensors.s3_key.S3Hook')
<add> def test_poke_wildcard(self, mock_hook, mock_get_files):
<add> op = S3KeySizeSensor(task_id='s3_key_sensor', bucket_key='s3://test_bucket/file', wildcard_match=True)
<add>
<add> mock_check_for_wildcard_key = mock_hook.return_value.check_for_wildcard_key
<add> mock_check_for_wildcard_key.return_value = False
<add> assert not op.poke(None)
<add> mock_check_for_wildcard_key.assert_called_once_with(op.bucket_key, op.bucket_name) | 4 |
Text | Text | page_size addition in 3.1 | c90cf828aeda3ebb1a599064ee6c39c03eb04242 | <ide><path>docs/api-guide/settings.md
<ide> See the pagination documentation for further guidance on [setting the pagination
<ide>
<ide> ---
<ide>
<del>The name of a query parameter, which can be used by the client to override the default page size to use for pagination. If set to `None`, clients may not override the default page size.
<add>#### PAGE_SIZE
<ide>
<del>For example, given the following settings:
<del>
<del> REST_FRAMEWORK = {
<del> 'PAGINATE_BY': 10,
<del> 'PAGINATE_BY_PARAM': 'page_size',
<del> }
<del>
<del>A client would be able to modify the pagination size by using the `page_size` query parameter. For example:
<del>
<del> GET http://example.com/api/accounts?page_size=25
<add>The default page size to use for pagination. If set to `None`, pagination is disabled by default.
<ide>
<ide> Default: `None`
<ide> | 1 |
Mixed | Javascript | improve error messages when nesting view in text | 963e6e9d36bb8f88caaeccc1497fd7b68c26e2a6 | <ide><path>Libraries/Components/View/View.js
<ide> const ReactNativeViewAttributes = require('ReactNativeViewAttributes');
<ide> const StyleSheetPropType = require('StyleSheetPropType');
<ide> const ViewStylePropTypes = require('ViewStylePropTypes');
<ide>
<add>const invariant = require('invariant');
<add>
<ide> var TVViewPropTypes = {};
<ide> if (Platform.isTVOS) {
<ide> TVViewPropTypes = require('TVViewPropTypes');
<ide> const View = React.createClass({
<ide> needsOffscreenAlphaCompositing: PropTypes.bool,
<ide> },
<ide>
<add> contextTypes: {
<add> isInAParentText: React.PropTypes.bool,
<add> },
<add>
<ide> render: function() {
<add> invariant(
<add> !(this.context.isInAParentText && Platform.OS === 'android'),
<add> 'Nesting of <View> within <Text> is not supported on Android.');
<add>
<ide> // WARNING: This method will not be used in production mode as in that mode we
<ide> // replace wrapper component View with generated native wrapper RCTView. Avoid
<ide> // adding functionality this component that you'd want to be available in both
<ide><path>ReactAndroid/src/main/java/com/facebook/react/uimanager/ReactShadowNode.java
<ide> public void addChildAt(ReactShadowNode child, int i) {
<ide> YogaNode childYogaNode = child.mYogaNode;
<ide> if (childYogaNode == null) {
<ide> throw new RuntimeException(
<del> "Cannot add a child that doesn't have a CSS node to a node without a measure function!");
<add> "Cannot add a child that doesn't have a YogaNode to a parent without a measure " +
<add> "function! (Trying to add a '" + child.getClass().getSimpleName() + "' to a '" +
<add> getClass().getSimpleName() + "')");
<ide> }
<ide> mYogaNode.addChildAt(childYogaNode, i);
<ide> } | 2 |
Text | Text | explain custom configuration debugging gotcha | b1892e609937d1107f729822423330cc70dfc0d2 | <ide><path>docs/EmbeddedAppIOS.md
<ide> Now compile and run your app. You shall now see your React Native app running in
<ide>
<ide> 
<ide>
<del>Live reload and all of the debugging tools will work from the simulator (make sure that DEBUG=1 is set under Build Settings -> Preprocessor Macros). You've got a simple React component totally encapsulated behind an Objective-C `UIView` subclass.
<add>Live reload and all of the debugging tools will work from the simulator. Just make sure that `DEBUG=1` is set under Build Settings -> Preprocessor Macros. If you're using Cocoapods and a custom configuration (that is, not "Debug"), make sure you've specified it to be a debug configuration using the [`xcodeproj` setting](https://guides.cocoapods.org/syntax/podfile.html#xcodeproj) in your Podfile. Also make sure you've got a simple React component totally encapsulated behind an Objective-C `UIView` subclass.
<ide>
<ide> ## Conclusion
<ide> | 1 |
Javascript | Javascript | fix jshint warning | 3cf1cfa3de538f13f64f31e5071a4fb40a8482ad | <ide><path>src/controllers/controller.polarArea.js
<ide> module.exports = function(Chart) {
<ide> updateElement: function(arc, index, reset) {
<ide> var me = this;
<ide> var chart = me.chart;
<del> var chartArea = chart.chartArea;
<ide> var dataset = me.getDataset();
<ide> var opts = chart.options;
<ide> var animationOpts = opts.animation; | 1 |
Text | Text | start coffeescript styleguide | 095320680f9dbd11183351a4f81f7c451cb48ada | <ide><path>CONTRIBUTING.md
<ide> in the proper package's repository.
<ide>
<ide> ## Pull Requests
<ide> * Include screenshots and animated GIFs whenever possible.
<del> * Follow the [JavaScript](https://github.com/styleguide/javascript) and
<del> [CSS](https://github.com/styleguide/css) styleguides
<add> * Follow the [CoffeeScript](#coffeescript-styleguide),
<add> [JavaScript](https://github.com/styleguide/javascript),
<add> and [CSS](https://github.com/styleguide/css) styleguides
<ide> * Include thoughtfully worded [Jasmine](http://pivotal.github.com/jasmine/)
<ide> specs
<ide> * Avoid placing files in `vendor`. 3rd-party packages should be added as a
<ide> in the proper package's repository.
<ide> * :racehorse: when improving performance
<ide> * :non-potable_water: when plugging memory leaks
<ide> * :memo: when writing docs
<add>
<add>## CoffeeScript Styleguide
<add>
<add>* Set parameter defaults without spaces around the equal sign
<add> * `clear = (count=1) ->` instead of `clear = (count = 1) ->` | 1 |
Text | Text | clarify timing of socket.connecting | ccc06a3c3291546c777da74cd3bdff903b33c607 | <ide><path>doc/api/net.md
<ide> added: v6.1.0
<ide> -->
<ide>
<ide> If `true`,
<add>[`socket.connect(options[, connectListener])`][`socket.connect(options)`] was
<add>called and has not yet finished. It will stay `true` until the socket becomes
<add>connected, then it is set to `false` and the `'connect'` event is emitted. Note
<add>that the
<ide> [`socket.connect(options[, connectListener])`][`socket.connect(options)`]
<del>was called and has not yet finished. Will be set to `true` before emitting
<del>`'connect'` event and/or calling
<del>[`socket.connect(options[, connectListener])`][`socket.connect(options)`]'s
<del>callback.
<add>callback is a listener for the `'connect'` event.
<ide>
<ide> ### socket.destroy([exception])
<ide> <!-- YAML | 1 |
Python | Python | add default 'aws_conn_id' to sagemaker operators | 5d1e6ff19ab4a63259a2c5aed02b601ca055a289 | <ide><path>airflow/providers/amazon/aws/operators/sagemaker.py
<ide> if TYPE_CHECKING:
<ide> from airflow.utils.context import Context
<ide>
<add>DEFAULT_CONN_ID = 'aws_default'
<add>CHECK_INTERVAL_SECOND = 30
<add>
<ide>
<ide> class SageMakerBaseOperator(BaseOperator):
<ide> """This is the base operator for all SageMaker operators.
<ide>
<ide> :param config: The configuration necessary to start a training job (templated)
<del> :param aws_conn_id: The AWS connection ID to use.
<ide> """
<ide>
<ide> template_fields: Sequence[str] = ('config',)
<ide> class SageMakerBaseOperator(BaseOperator):
<ide> ui_color = '#ededed'
<ide> integer_fields: List[List[Any]] = []
<ide>
<del> def __init__(self, *, config: dict, aws_conn_id: str = 'aws_default', **kwargs):
<add> def __init__(self, *, config: dict, **kwargs):
<ide> super().__init__(**kwargs)
<del> self.aws_conn_id = aws_conn_id
<ide> self.config = config
<ide>
<ide> def parse_integer(self, config, field):
<ide> class SageMakerProcessingOperator(SageMakerBaseOperator):
<ide>
<ide> :param config: The configuration necessary to start a processing job (templated).
<ide> For details of the configuration parameter see :py:meth:`SageMaker.Client.create_processing_job`
<add> :param aws_conn_id: The AWS connection ID to use.
<ide> :param wait_for_completion: If wait is set to True, the time interval, in seconds,
<ide> that the operation waits to check the status of the processing job.
<ide> :param print_log: if the operator should print the cloudwatch log during processing
<ide> def __init__(
<ide> self,
<ide> *,
<ide> config: dict,
<add> aws_conn_id: str = DEFAULT_CONN_ID,
<ide> wait_for_completion: bool = True,
<ide> print_log: bool = True,
<del> check_interval: int = 30,
<add> check_interval: int = CHECK_INTERVAL_SECOND,
<ide> max_ingestion_time: Optional[int] = None,
<ide> action_if_job_exists: str = 'increment',
<ide> **kwargs,
<ide> def __init__(
<ide> Provided value: '{action_if_job_exists}'."
<ide> )
<ide> self.action_if_job_exists = action_if_job_exists
<add> self.aws_conn_id = aws_conn_id
<ide> self.wait_for_completion = wait_for_completion
<ide> self.print_log = print_log
<ide> self.check_interval = check_interval
<ide> class SageMakerEndpointConfigOperator(SageMakerBaseOperator):
<ide>
<ide> integer_fields = [['ProductionVariants', 'InitialInstanceCount']]
<ide>
<del> def __init__(self, *, config: dict, **kwargs):
<add> def __init__(
<add> self,
<add> *,
<add> config: dict,
<add> aws_conn_id: str = DEFAULT_CONN_ID,
<add> **kwargs,
<add> ):
<ide> super().__init__(config=config, **kwargs)
<ide> self.config = config
<add> self.aws_conn_id = aws_conn_id
<ide>
<ide> def execute(self, context: 'Context') -> dict:
<ide> self.preprocess_config()
<ide> class SageMakerEndpointOperator(SageMakerBaseOperator):
<ide> :param max_ingestion_time: If wait is set to True, this operation fails if the endpoint creation doesn't
<ide> finish within max_ingestion_time seconds. If you set this parameter to None it never times out.
<ide> :param operation: Whether to create an endpoint or update an endpoint. Must be either 'create or 'update'.
<add> :param aws_conn_id: The AWS connection ID to use.
<ide> :return Dict: Returns The ARN of the endpoint created in Amazon SageMaker.
<ide> """
<ide>
<ide> def __init__(
<ide> self,
<ide> *,
<ide> config: dict,
<add> aws_conn_id: str = DEFAULT_CONN_ID,
<ide> wait_for_completion: bool = True,
<del> check_interval: int = 30,
<add> check_interval: int = CHECK_INTERVAL_SECOND,
<ide> max_ingestion_time: Optional[int] = None,
<ide> operation: str = 'create',
<ide> **kwargs,
<ide> ):
<ide> super().__init__(config=config, **kwargs)
<ide> self.config = config
<add> self.aws_conn_id = aws_conn_id
<ide> self.wait_for_completion = wait_for_completion
<ide> self.check_interval = check_interval
<ide> self.max_ingestion_time = max_ingestion_time
<ide> class SageMakerTransformOperator(SageMakerBaseOperator):
<ide> For details of the configuration parameter of model_config, See:
<ide> :py:meth:`SageMaker.Client.create_model`
<ide>
<add> :param aws_conn_id: The AWS connection ID to use.
<ide> :param wait_for_completion: Set to True to wait until the transform job finishes.
<ide> :param check_interval: If wait is set to True, the time interval, in seconds,
<ide> that this operation waits to check the status of the transform job.
<ide> def __init__(
<ide> self,
<ide> *,
<ide> config: dict,
<add> aws_conn_id: str = DEFAULT_CONN_ID,
<ide> wait_for_completion: bool = True,
<del> check_interval: int = 30,
<add> check_interval: int = CHECK_INTERVAL_SECOND,
<ide> max_ingestion_time: Optional[int] = None,
<ide> **kwargs,
<ide> ):
<ide> super().__init__(config=config, **kwargs)
<ide> self.config = config
<add> self.aws_conn_id = aws_conn_id
<ide> self.wait_for_completion = wait_for_completion
<ide> self.check_interval = check_interval
<ide> self.max_ingestion_time = max_ingestion_time
<ide> class SageMakerTuningOperator(SageMakerBaseOperator):
<ide>
<ide> For details of the configuration parameter see
<ide> :py:meth:`SageMaker.Client.create_hyper_parameter_tuning_job`
<add> :param aws_conn_id: The AWS connection ID to use.
<ide> :param wait_for_completion: Set to True to wait until the tuning job finishes.
<ide> :param check_interval: If wait is set to True, the time interval, in seconds,
<ide> that this operation waits to check the status of the tuning job.
<ide> def __init__(
<ide> self,
<ide> *,
<ide> config: dict,
<add> aws_conn_id: str = DEFAULT_CONN_ID,
<ide> wait_for_completion: bool = True,
<del> check_interval: int = 30,
<add> check_interval: int = CHECK_INTERVAL_SECOND,
<ide> max_ingestion_time: Optional[int] = None,
<ide> **kwargs,
<ide> ):
<ide> super().__init__(config=config, **kwargs)
<ide> self.config = config
<add> self.aws_conn_id = aws_conn_id
<ide> self.wait_for_completion = wait_for_completion
<ide> self.check_interval = check_interval
<ide> self.max_ingestion_time = max_ingestion_time
<ide> class SageMakerModelOperator(SageMakerBaseOperator):
<ide> :param config: The configuration necessary to create a model.
<ide>
<ide> For details of the configuration parameter see :py:meth:`SageMaker.Client.create_model`
<add> :param aws_conn_id: The AWS connection ID to use.
<ide> :return Dict: Returns The ARN of the model created in Amazon SageMaker.
<ide> """
<ide>
<del> def __init__(self, *, config, **kwargs):
<add> def __init__(self, *, config, aws_conn_id: str = DEFAULT_CONN_ID, **kwargs):
<ide> super().__init__(config=config, **kwargs)
<ide> self.config = config
<add> self.aws_conn_id = aws_conn_id
<ide>
<ide> def expand_role(self) -> None:
<ide> if 'ExecutionRoleArn' in self.config:
<ide> class SageMakerTrainingOperator(SageMakerBaseOperator):
<ide> :param config: The configuration necessary to start a training job (templated).
<ide>
<ide> For details of the configuration parameter see :py:meth:`SageMaker.Client.create_training_job`
<add> :param aws_conn_id: The AWS connection ID to use.
<ide> :param wait_for_completion: If wait is set to True, the time interval, in seconds,
<ide> that the operation waits to check the status of the training job.
<ide> :param print_log: if the operator should print the cloudwatch log during training
<ide> def __init__(
<ide> self,
<ide> *,
<ide> config: dict,
<add> aws_conn_id: str = DEFAULT_CONN_ID,
<ide> wait_for_completion: bool = True,
<ide> print_log: bool = True,
<del> check_interval: int = 30,
<add> check_interval: int = CHECK_INTERVAL_SECOND,
<ide> max_ingestion_time: Optional[int] = None,
<ide> check_if_job_exists: bool = True,
<ide> action_if_job_exists: str = 'increment',
<ide> **kwargs,
<ide> ):
<ide> super().__init__(config=config, **kwargs)
<add> self.aws_conn_id = aws_conn_id
<ide> self.wait_for_completion = wait_for_completion
<ide> self.print_log = print_log
<ide> self.check_interval = check_interval
<ide> class SageMakerDeleteModelOperator(SageMakerBaseOperator):
<ide>
<ide> :param config: The configuration necessary to delete the model.
<ide> For details of the configuration parameter see :py:meth:`SageMaker.Client.delete_model`
<add> :param aws_conn_id: The AWS connection ID to use.
<ide> """
<ide>
<del> def __init__(self, *, config, **kwargs):
<add> def __init__(self, *, config, aws_conn_id: str = DEFAULT_CONN_ID, **kwargs):
<ide> super().__init__(config=config, **kwargs)
<ide> self.config = config
<add> self.aws_conn_id = aws_conn_id
<ide>
<ide> def execute(self, context: 'Context') -> Any:
<ide> sagemaker_hook = SageMakerHook(aws_conn_id=self.aws_conn_id)
<ide><path>tests/providers/amazon/aws/operators/test_sagemaker_base.py
<ide>
<ide> class TestSageMakerBaseOperator(unittest.TestCase):
<ide> def setUp(self):
<del> self.sagemaker = SageMakerBaseOperator(
<del> task_id='test_sagemaker_operator', aws_conn_id='sagemaker_test_id', config=config
<del> )
<add> self.sagemaker = SageMakerBaseOperator(task_id='test_sagemaker_operator', config=config)
<ide>
<ide> def test_parse_integer(self):
<ide> self.sagemaker.integer_fields = [['key1'], ['key2', 'key3'], ['key2', 'key4'], ['key5', 'key6']] | 2 |
Javascript | Javascript | fix bug in io benchmark | 6e5217d3208f6792a500fc408b4dcef0a17c5412 | <ide><path>benchmark/io.js
<ide> function readtest(size, bsize) {
<ide>
<ide> function wt(tsize, bsize, done) {
<ide> var start = Date.now();
<del> s = writetest(tsize, bsizes[0]);
<add> s = writetest(tsize, bsize);
<ide> s.addListener('close', function() {
<ide> var end = Date.now();
<ide> var diff = end - start;
<ide> function wt(tsize, bsize, done) {
<ide>
<ide> function rt(tsize, bsize, done) {
<ide> var start = Date.now();
<del> s = readtest(tsize, bsizes[0]);
<add> s = readtest(tsize, bsize);
<ide> s.addListener('close', function() {
<ide> var end = Date.now();
<ide> var diff = end - start; | 1 |
Python | Python | add a test for ndarray.diagonal() | d403fed2423caec4149937fe48781ac68b21fddb | <ide><path>numpy/core/tests/test_multiarray.py
<ide> def test_dot(self):
<ide> assert_equal(np.dot(a, b), a.dot(b))
<ide> assert_equal(np.dot(np.dot(a, b), c), a.dot(b).dot(c))
<ide>
<add> def test_diagonal(self):
<add> a = np.arange(12).reshape((3, 4))
<add> assert_equal(a.diagonal(), [0, 5, 10])
<add> assert_equal(a.diagonal(0), [0, 5, 10])
<add> assert_equal(a.diagonal(1), [1, 6, 11])
<add> assert_equal(a.diagonal(-1), [4, 9])
<add>
<add> b = np.arange(8).reshape((2, 2, 2))
<add> assert_equal(b.diagonal(), [[0, 6], [1, 7]])
<add> assert_equal(b.diagonal(0), [[0, 6], [1, 7]])
<add> assert_equal(b.diagonal(1), [[2], [3]])
<add> assert_equal(b.diagonal(-1), [[4], [5]])
<add> assert_raises(ValueError, b.diagonal, axis1=0, axis2=0)
<add> assert_equal(b.diagonal(0, 1, 2), [[0, 3], [4, 7]])
<add> assert_equal(b.diagonal(0, 0, 1), [[0, 6], [1, 7]])
<add> assert_equal(b.diagonal(offset=1, axis1=0, axis2=2), [[1], [3]])
<add> # Order of axis argument doesn't matter:
<add> assert_equal(b.diagonal(0, 2, 1), [[0, 3], [4, 7]])
<add>
<ide> def test_ravel(self):
<ide> a = np.array([[0,1],[2,3]])
<ide> assert_equal(a.ravel(), [0,1,2,3]) | 1 |
Javascript | Javascript | change authentication method | 62bab604c8472a4c31acf7233545d6bd05a60ccf | <ide><path>script/vsts/lib/release-notes.js
<ide> const childProcess = require('child_process');
<ide> module.exports.getRelease = async function(releaseVersion, githubToken) {
<ide> if (githubToken) {
<ide> octokit.authenticate({
<del> type: 'oauth',
<add> type: 'token',
<ide> token: githubToken
<ide> });
<ide> } | 1 |
PHP | PHP | convert cacheengine to use instanceconfigsuite | 27baf04149d22f5b7a4e1213108013ac6c90131b | <ide><path>src/Cache/CacheEngine.php
<ide> */
<ide> namespace Cake\Cache;
<ide>
<add>use Cake\Core\InstanceConfigTrait;
<ide> use Cake\Utility\Inflector;
<ide>
<ide> /**
<ide> */
<ide> abstract class CacheEngine {
<ide>
<del>/**
<del> * Runtime config
<del> *
<del> * This is the config of a particular instance
<del> *
<del> * @var array
<del> */
<del> protected $_config = [];
<add> use InstanceConfigTrait;
<ide>
<ide> /**
<ide> * The default cache configuration is overriden in most cache adapters. These are
<ide> abstract class CacheEngine {
<ide> * @return boolean True if the engine has been successfully initialized, false if not
<ide> */
<ide> public function init($config = []) {
<del> $this->_config = $config + $this->_defaultConfig;
<add> $this->config($config);
<add>
<ide> if (!empty($this->_config['groups'])) {
<ide> sort($this->_config['groups']);
<ide> $this->_groupPrefix = str_repeat('%s_', count($this->_config['groups']));
<ide> }
<ide> if (!is_numeric($this->_config['duration'])) {
<ide> $this->_config['duration'] = strtotime($this->_config['duration']) - time();
<ide> }
<add>
<ide> return true;
<ide> }
<ide>
<ide> public function groups() {
<ide> return $this->_config['groups'];
<ide> }
<ide>
<del>/**
<del> * Cache Engine config
<del> *
<del> * If called with no arguments, returns the full config array
<del> * Otherwise returns the config for the specified key
<del> *
<del> * Usage:
<del> * {{{
<del> * $instance->config(); will return full config
<del> * $instance->config('duration'); will return configured duration
<del> * $instance->config('notset'); will return null
<del> * }}}
<del> *
<del> * @param string|null $key to return
<del> * @return mixed array or config value
<del> */
<del> public function config($key = null) {
<del> if ($key === null) {
<del> return $this->_config;
<del> }
<del> return isset($this->_config[$key]) ? $this->_config[$key] : null;
<del> }
<del>
<ide> /**
<ide> * Generates a safe key for use with cache engine storage engines.
<ide> * | 1 |
Java | Java | improve documentation of cacheable | 3a61930bd08ff85175b356ebf6e7cbed53588a98 | <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/Cacheable.java
<ide>
<ide> /**
<ide> * Spring Expression Language (SpEL) expression used for making the method
<del> * caching conditional.
<add> * caching conditional. If condition is evaluated to {@code true}, result
<add> * is cached.
<ide> * <p>Default is {@code ""}, meaning the method result is always cached.
<ide> * <p>The SpEL expression evaluates against a dedicated context that provides the
<ide> * following meta-data: | 1 |
Ruby | Ruby | replace test for checking formulae tap | ce7ab823ed5b93ae4588e754c3cc64974c0882f7 | <ide><path>Library/Homebrew/test/cmd/info_spec.rb
<ide> require "cmd/shared_examples/args_parse"
<ide>
<ide> describe "brew info" do
<del> let(:tarball) do
<del> if OS.linux?
<del> TEST_FIXTURE_DIR/"tarballs/testball-0.1-linux.tbz"
<del> else
<del> TEST_FIXTURE_DIR/"tarballs/testball-0.1.tbz"
<del> end
<del> end
<del> let(:expected_output) {
<del> <<~EOS
<del> {"formulae":[{"name":"testball","full_name":"testball","tap":"homebrew/core","oldname":null,"aliases":[],"versioned_formulae":[],"desc":"Some test","license":null,"homepage":"https://brew.sh/testball","versions":{"stable":"0.1","head":null,"bottle":false},"urls":{"stable":{"url":"file://#{tarball}","tag":null,"revision":null}},"revision":0,"version_scheme":0,"bottle":{},"keg_only":false,"bottle_disabled":false,"options":[{"option":"--with-foo","description":"Build with foo"}],"build_dependencies":[],"dependencies":[],"recommended_dependencies":[],"optional_dependencies":[],"uses_from_macos":[],"requirements":[],"conflicts_with":[],"caveats":null,"installed":[],"linked_keg":null,"pinned":false,"outdated":false,"deprecated":false,"deprecation_date":null,"deprecation_reason":null,"disabled":false,"disable_date":null,"disable_reason":null}],"casks":[]}
<del> EOS
<del> }
<del>
<ide> it_behaves_like "parseable arguments"
<ide>
<ide> it "prints as json with the --json=v1 flag", :integration_test do
<ide> .and be_a_success
<ide> end
<ide>
<del> it "check --json=v2 format", :integration_test do
<del> setup_test_formula "testball"
<del>
<del> expect { brew "info", "testball", "--json=v2" }
<del> .to output(expected_output).to_stdout
<del> end
<del>
<ide> describe Homebrew do
<ide> describe "::github_remote_path" do
<ide> let(:remote) { "https://github.com/Homebrew/homebrew-core" }
<ide><path>Library/Homebrew/test/formula_spec.rb
<ide> expect(h).to be_a(Hash)
<ide> expect(h["name"]).to eq("foo")
<ide> expect(h["full_name"]).to eq("foo")
<add> expect(h["tap"]).to eq("homebrew/core")
<ide> expect(h["versions"]["stable"]).to eq("1.0")
<ide> expect(h["versions"]["bottle"]).to be_truthy
<ide> end | 2 |
Python | Python | remove unittest dependencies in numpy/lib/tests | 8eee1b06b330f6ff23fd3604444ba20a8d2d6416 | <ide><path>numpy/lib/tests/test__datasource.py
<ide> from shutil import rmtree
<ide>
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, SkipTest
<add> run_module_suite, assert_, assert_equal, assert_raises, SkipTest,
<ide> )
<ide> import numpy.lib._datasource as datasource
<ide>
<ide> def teardown():
<ide> magic_line = b'three is the magic number'
<ide>
<ide>
<del># Utility functions used by many TestCases
<add># Utility functions used by many tests
<ide> def valid_textfile(filedir):
<ide> # Generate and return a valid temporary file.
<ide> fd, path = mkstemp(suffix='.txt', prefix='dstmp_', dir=filedir, text=True)
<ide> def invalid_httpfile():
<ide> return http_fakefile
<ide>
<ide>
<del>class TestDataSourceOpen(TestCase):
<del> def setUp(self):
<add>class TestDataSourceOpen(object):
<add> def setup(self):
<ide> self.tmpdir = mkdtemp()
<ide> self.ds = datasource.DataSource(self.tmpdir)
<ide>
<del> def tearDown(self):
<add> def teardown(self):
<ide> rmtree(self.tmpdir)
<ide> del self.ds
<ide>
<ide> def test_ValidHTTP(self):
<ide>
<ide> def test_InvalidHTTP(self):
<ide> url = invalid_httpurl()
<del> self.assertRaises(IOError, self.ds.open, url)
<add> assert_raises(IOError, self.ds.open, url)
<ide> try:
<ide> self.ds.open(url)
<ide> except IOError as e:
<ide> # Regression test for bug fixed in r4342.
<ide> assert_(e.errno is None)
<ide>
<ide> def test_InvalidHTTPCacheURLError(self):
<del> self.assertRaises(URLError, self.ds._cache, invalid_httpurl())
<add> assert_raises(URLError, self.ds._cache, invalid_httpurl())
<ide>
<ide> def test_ValidFile(self):
<ide> local_file = valid_textfile(self.tmpdir)
<ide> def test_ValidFile(self):
<ide>
<ide> def test_InvalidFile(self):
<ide> invalid_file = invalid_textfile(self.tmpdir)
<del> self.assertRaises(IOError, self.ds.open, invalid_file)
<add> assert_raises(IOError, self.ds.open, invalid_file)
<ide>
<ide> def test_ValidGzipFile(self):
<ide> try:
<ide> def test_ValidGzipFile(self):
<ide> fp = self.ds.open(filepath)
<ide> result = fp.readline()
<ide> fp.close()
<del> self.assertEqual(magic_line, result)
<add> assert_equal(magic_line, result)
<ide>
<ide> def test_ValidBz2File(self):
<ide> try:
<ide> def test_ValidBz2File(self):
<ide> fp = self.ds.open(filepath)
<ide> result = fp.readline()
<ide> fp.close()
<del> self.assertEqual(magic_line, result)
<add> assert_equal(magic_line, result)
<ide>
<ide>
<del>class TestDataSourceExists(TestCase):
<del> def setUp(self):
<add>class TestDataSourceExists(object):
<add> def setup(self):
<ide> self.tmpdir = mkdtemp()
<ide> self.ds = datasource.DataSource(self.tmpdir)
<ide>
<del> def tearDown(self):
<add> def teardown(self):
<ide> rmtree(self.tmpdir)
<ide> del self.ds
<ide>
<ide> def test_ValidHTTP(self):
<ide> assert_(self.ds.exists(valid_httpurl()))
<ide>
<ide> def test_InvalidHTTP(self):
<del> self.assertEqual(self.ds.exists(invalid_httpurl()), False)
<add> assert_equal(self.ds.exists(invalid_httpurl()), False)
<ide>
<ide> def test_ValidFile(self):
<ide> # Test valid file in destpath
<ide> def test_ValidFile(self):
<ide>
<ide> def test_InvalidFile(self):
<ide> tmpfile = invalid_textfile(self.tmpdir)
<del> self.assertEqual(self.ds.exists(tmpfile), False)
<add> assert_equal(self.ds.exists(tmpfile), False)
<ide>
<ide>
<del>class TestDataSourceAbspath(TestCase):
<del> def setUp(self):
<add>class TestDataSourceAbspath(object):
<add> def setup(self):
<ide> self.tmpdir = os.path.abspath(mkdtemp())
<ide> self.ds = datasource.DataSource(self.tmpdir)
<ide>
<del> def tearDown(self):
<add> def teardown(self):
<ide> rmtree(self.tmpdir)
<ide> del self.ds
<ide>
<ide> def test_ValidHTTP(self):
<ide> scheme, netloc, upath, pms, qry, frg = urlparse(valid_httpurl())
<ide> local_path = os.path.join(self.tmpdir, netloc,
<ide> upath.strip(os.sep).strip('/'))
<del> self.assertEqual(local_path, self.ds.abspath(valid_httpurl()))
<add> assert_equal(local_path, self.ds.abspath(valid_httpurl()))
<ide>
<ide> def test_ValidFile(self):
<ide> tmpfile = valid_textfile(self.tmpdir)
<ide> tmpfilename = os.path.split(tmpfile)[-1]
<ide> # Test with filename only
<del> self.assertEqual(tmpfile, self.ds.abspath(tmpfilename))
<add> assert_equal(tmpfile, self.ds.abspath(tmpfilename))
<ide> # Test filename with complete path
<del> self.assertEqual(tmpfile, self.ds.abspath(tmpfile))
<add> assert_equal(tmpfile, self.ds.abspath(tmpfile))
<ide>
<ide> def test_InvalidHTTP(self):
<ide> scheme, netloc, upath, pms, qry, frg = urlparse(invalid_httpurl())
<ide> invalidhttp = os.path.join(self.tmpdir, netloc,
<ide> upath.strip(os.sep).strip('/'))
<del> self.assertNotEqual(invalidhttp, self.ds.abspath(valid_httpurl()))
<add> assert_(invalidhttp != self.ds.abspath(valid_httpurl()))
<ide>
<ide> def test_InvalidFile(self):
<ide> invalidfile = valid_textfile(self.tmpdir)
<ide> tmpfile = valid_textfile(self.tmpdir)
<ide> tmpfilename = os.path.split(tmpfile)[-1]
<ide> # Test with filename only
<del> self.assertNotEqual(invalidfile, self.ds.abspath(tmpfilename))
<add> assert_(invalidfile != self.ds.abspath(tmpfilename))
<ide> # Test filename with complete path
<del> self.assertNotEqual(invalidfile, self.ds.abspath(tmpfile))
<add> assert_(invalidfile != self.ds.abspath(tmpfile))
<ide>
<ide> def test_sandboxing(self):
<ide> tmpfile = valid_textfile(self.tmpdir)
<ide> def test_windows_os_sep(self):
<ide> os.sep = orig_os_sep
<ide>
<ide>
<del>class TestRepositoryAbspath(TestCase):
<del> def setUp(self):
<add>class TestRepositoryAbspath(object):
<add> def setup(self):
<ide> self.tmpdir = os.path.abspath(mkdtemp())
<ide> self.repos = datasource.Repository(valid_baseurl(), self.tmpdir)
<ide>
<del> def tearDown(self):
<add> def teardown(self):
<ide> rmtree(self.tmpdir)
<ide> del self.repos
<ide>
<ide> def test_ValidHTTP(self):
<ide> local_path = os.path.join(self.repos._destpath, netloc,
<ide> upath.strip(os.sep).strip('/'))
<ide> filepath = self.repos.abspath(valid_httpfile())
<del> self.assertEqual(local_path, filepath)
<add> assert_equal(local_path, filepath)
<ide>
<ide> def test_sandboxing(self):
<ide> tmp_path = lambda x: os.path.abspath(self.repos.abspath(x))
<ide> def test_windows_os_sep(self):
<ide> os.sep = orig_os_sep
<ide>
<ide>
<del>class TestRepositoryExists(TestCase):
<del> def setUp(self):
<add>class TestRepositoryExists(object):
<add> def setup(self):
<ide> self.tmpdir = mkdtemp()
<ide> self.repos = datasource.Repository(valid_baseurl(), self.tmpdir)
<ide>
<del> def tearDown(self):
<add> def teardown(self):
<ide> rmtree(self.tmpdir)
<ide> del self.repos
<ide>
<ide> def test_ValidFile(self):
<ide>
<ide> def test_InvalidFile(self):
<ide> tmpfile = invalid_textfile(self.tmpdir)
<del> self.assertEqual(self.repos.exists(tmpfile), False)
<add> assert_equal(self.repos.exists(tmpfile), False)
<ide>
<ide> def test_RemoveHTTPFile(self):
<ide> assert_(self.repos.exists(valid_httpurl()))
<ide> def test_CachedHTTPFile(self):
<ide> assert_(self.repos.exists(tmpfile))
<ide>
<ide>
<del>class TestOpenFunc(TestCase):
<del> def setUp(self):
<add>class TestOpenFunc(object):
<add> def setup(self):
<ide> self.tmpdir = mkdtemp()
<ide>
<del> def tearDown(self):
<add> def teardown(self):
<ide> rmtree(self.tmpdir)
<ide>
<ide> def test_DataSourceOpen(self):
<ide><path>numpy/lib/tests/test__iotools.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_allclose,
<del> assert_raises
<add> run_module_suite, assert_, assert_equal, assert_allclose, assert_raises,
<ide> )
<ide> from numpy.lib._iotools import (
<ide> LineSplitter, NameValidator, StringConverter,
<ide> has_nested_fields, easy_dtype, flatten_dtype
<ide> )
<ide>
<ide>
<del>class TestLineSplitter(TestCase):
<add>class TestLineSplitter(object):
<ide> "Tests the LineSplitter class."
<ide>
<ide> def test_no_delimiter(self):
<ide> def test_variable_fixed_width(self):
<ide> # -----------------------------------------------------------------------------
<ide>
<ide>
<del>class TestNameValidator(TestCase):
<add>class TestNameValidator(object):
<ide>
<ide> def test_case_sensitivity(self):
<ide> "Test case sensitivity"
<ide> def _bytes_to_date(s):
<ide> return date(*time.strptime(s, "%Y-%m-%d")[:3])
<ide>
<ide>
<del>class TestStringConverter(TestCase):
<add>class TestStringConverter(object):
<ide> "Test StringConverter"
<ide>
<ide> def test_creation(self):
<ide> def test_uint64_dtype(self):
<ide> assert_(converter(val) == 9223372043271415339)
<ide>
<ide>
<del>class TestMiscFunctions(TestCase):
<add>class TestMiscFunctions(object):
<ide>
<ide> def test_has_nested_dtype(self):
<ide> "Test has_nested_dtype"
<ide><path>numpy/lib/tests/test_arraypad.py
<ide> from __future__ import division, absolute_import, print_function
<ide>
<ide> import numpy as np
<del>from numpy.testing import (assert_array_equal, assert_raises, assert_allclose,
<del> TestCase)
<add>from numpy.testing import (assert_array_equal, assert_raises, assert_allclose,)
<ide> from numpy.lib import pad
<ide>
<ide>
<del>class TestConditionalShortcuts(TestCase):
<add>class TestConditionalShortcuts(object):
<ide> def test_zero_padding_shortcuts(self):
<ide> test = np.arange(120).reshape(4, 5, 6)
<ide> pad_amt = [(0, 0) for axis in test.shape]
<ide> def test_clip_statistic_range(self):
<ide> pad(test, pad_amt, mode=mode, stat_length=30))
<ide>
<ide>
<del>class TestStatistic(TestCase):
<add>class TestStatistic(object):
<ide> def test_check_mean_stat_length(self):
<ide> a = np.arange(100).astype('f')
<ide> a = pad(a, ((25, 20), ), 'mean', stat_length=((2, 3), ))
<ide> def test_check_mean_2(self):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestConstant(TestCase):
<add>class TestConstant(object):
<ide> def test_check_constant(self):
<ide> a = np.arange(100)
<ide> a = pad(a, (25, 20), 'constant', constant_values=(10, 20))
<ide> def test_check_constant_pad_2d(self):
<ide> assert_allclose(test, expected)
<ide>
<ide>
<del>class TestLinearRamp(TestCase):
<add>class TestLinearRamp(object):
<ide> def test_check_simple(self):
<ide> a = np.arange(100).astype('f')
<ide> a = pad(a, (25, 20), 'linear_ramp', end_values=(4, 5))
<ide> def test_check_2d(self):
<ide> assert_allclose(test, expected)
<ide>
<ide>
<del>class TestReflect(TestCase):
<add>class TestReflect(object):
<ide> def test_check_simple(self):
<ide> a = np.arange(100)
<ide> a = pad(a, (25, 20), 'reflect')
<ide> def test_check_03(self):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestSymmetric(TestCase):
<add>class TestSymmetric(object):
<ide> def test_check_simple(self):
<ide> a = np.arange(100)
<ide> a = pad(a, (25, 20), 'symmetric')
<ide> def test_check_03(self):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestWrap(TestCase):
<add>class TestWrap(object):
<ide> def test_check_simple(self):
<ide> a = np.arange(100)
<ide> a = pad(a, (25, 20), 'wrap')
<ide> def test_check_02(self):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestStatLen(TestCase):
<add>class TestStatLen(object):
<ide> def test_check_simple(self):
<ide> a = np.arange(30)
<ide> a = np.reshape(a, (6, 5))
<ide> def test_check_simple(self):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestEdge(TestCase):
<add>class TestEdge(object):
<ide> def test_check_simple(self):
<ide> a = np.arange(12)
<ide> a = np.reshape(a, (4, 3))
<ide> def test_check_width_shape_1_2(self):
<ide> assert_array_equal(padded, expected)
<ide>
<ide>
<del>class TestZeroPadWidth(TestCase):
<add>class TestZeroPadWidth(object):
<ide> def test_zero_pad_width(self):
<ide> arr = np.arange(30)
<ide> arr = np.reshape(arr, (6, 5))
<ide> for pad_width in (0, (0, 0), ((0, 0), (0, 0))):
<ide> assert_array_equal(arr, pad(arr, pad_width, mode='constant'))
<ide>
<ide>
<del>class TestLegacyVectorFunction(TestCase):
<add>class TestLegacyVectorFunction(object):
<ide> def test_legacy_vector_functionality(self):
<ide> def _padwithtens(vector, pad_width, iaxis, kwargs):
<ide> vector[:pad_width[0]] = 10
<ide> def _padwithtens(vector, pad_width, iaxis, kwargs):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestNdarrayPadWidth(TestCase):
<add>class TestNdarrayPadWidth(object):
<ide> def test_check_simple(self):
<ide> a = np.arange(12)
<ide> a = np.reshape(a, (4, 3))
<ide> def test_check_simple(self):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestUnicodeInput(TestCase):
<add>class TestUnicodeInput(object):
<ide> def test_unicode_mode(self):
<ide> constant_mode = u'constant'
<ide> a = np.pad([1], 2, mode=constant_mode)
<ide> b = np.array([0, 0, 1, 0, 0])
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class ValueError1(TestCase):
<add>class TestValueError1(object):
<ide> def test_check_simple(self):
<ide> arr = np.arange(30)
<ide> arr = np.reshape(arr, (6, 5))
<ide> def test_check_negative_pad_width(self):
<ide> **kwargs)
<ide>
<ide>
<del>class ValueError2(TestCase):
<add>class TestValueError2(object):
<ide> def test_check_negative_pad_amount(self):
<ide> arr = np.arange(30)
<ide> arr = np.reshape(arr, (6, 5))
<ide> def test_check_negative_pad_amount(self):
<ide> **kwargs)
<ide>
<ide>
<del>class ValueError3(TestCase):
<add>class TestValueError3(object):
<ide> def test_check_kwarg_not_allowed(self):
<ide> arr = np.arange(30).reshape(5, 6)
<ide> assert_raises(ValueError, pad, arr, 4, mode='mean',
<ide> def test_pad_too_many_axes(self):
<ide> mode='constant')
<ide>
<ide>
<del>class TypeError1(TestCase):
<add>class TestTypeError1(object):
<ide> def test_float(self):
<ide> arr = np.arange(30)
<ide> assert_raises(TypeError, pad, arr, ((-2.1, 3), (3, 2)))
<ide><path>numpy/lib/tests/test_arraysetops.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_array_equal, assert_equal, assert_raises
<add> run_module_suite, assert_array_equal, assert_equal, assert_raises,
<ide> )
<ide> from numpy.lib.arraysetops import (
<ide> ediff1d, intersect1d, setxor1d, union1d, setdiff1d, unique, in1d, isin
<ide> )
<ide>
<ide>
<del>class TestSetOps(TestCase):
<add>class TestSetOps(object):
<ide>
<ide> def test_intersect1d(self):
<ide> # unique inputs
<ide> def assert_isin_equal(a, b):
<ide> x = isin(a, b)
<ide> y = isin_slow(a, b)
<ide> assert_array_equal(x, y)
<del>
<add>
<ide> #multidimensional arrays in both arguments
<ide> a = np.arange(24).reshape([2, 3, 4])
<ide> b = np.array([[10, 20, 30], [0, 1, 3], [11, 22, 33]])
<ide> assert_isin_equal(a, b)
<del>
<add>
<ide> #array-likes as both arguments
<ide> c = [(9, 8), (7, 6)]
<ide> d = (9, 7)
<ide> assert_isin_equal(c, d)
<del>
<add>
<ide> #zero-d array:
<ide> f = np.array(3)
<ide> assert_isin_equal(f, b)
<ide> assert_isin_equal(a, f)
<ide> assert_isin_equal(f, f)
<del>
<add>
<ide> #scalar:
<ide> assert_isin_equal(5, b)
<ide> assert_isin_equal(a, 6)
<ide> assert_isin_equal(5, 6)
<del>
<add>
<ide> #empty array-like:
<ide> x = []
<ide> assert_isin_equal(x, b)
<ide> def test_manyways(self):
<ide> assert_array_equal(c1, c2)
<ide>
<ide>
<del>class TestUnique(TestCase):
<add>class TestUnique(object):
<ide>
<ide> def test_unique_1d(self):
<ide>
<ide><path>numpy/lib/tests/test_financial.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_almost_equal,
<del> assert_allclose, assert_equal
<add> run_module_suite, assert_, assert_almost_equal, assert_allclose,
<add> assert_equal
<ide> )
<ide>
<ide>
<del>class TestFinancial(TestCase):
<add>class TestFinancial(object):
<ide> def test_rate(self):
<ide> assert_almost_equal(np.rate(10, 0, -3500, 10000),
<ide> 0.1107, 4)
<ide><path>numpy/lib/tests/test_function_base.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
<add> run_module_suite, assert_, assert_equal, assert_array_equal,
<ide> assert_almost_equal, assert_array_almost_equal, assert_raises,
<del> assert_allclose, assert_array_max_ulp, assert_warns,
<del> assert_raises_regex, dec, suppress_warnings, HAS_REFCOUNT,
<add> assert_allclose, assert_array_max_ulp, assert_warns, assert_raises_regex,
<add> dec, suppress_warnings, HAS_REFCOUNT,
<ide> )
<ide> import numpy.lib.function_base as nfb
<ide> from numpy.random import rand
<ide> def get_mat(n):
<ide> return data
<ide>
<ide>
<del>class TestRot90(TestCase):
<add>class TestRot90(object):
<ide> def test_basic(self):
<del> self.assertRaises(ValueError, rot90, np.ones(4))
<add> assert_raises(ValueError, rot90, np.ones(4))
<ide> assert_raises(ValueError, rot90, np.ones((2,2,2)), axes=(0,1,2))
<ide> assert_raises(ValueError, rot90, np.ones((2,2)), axes=(0,2))
<ide> assert_raises(ValueError, rot90, np.ones((2,2)), axes=(1,1))
<ide> def test_rotation_axes(self):
<ide> rot90(a_rot90_20, k=k-1, axes=(2, 0)))
<ide>
<ide>
<del>class TestFlip(TestCase):
<add>class TestFlip(object):
<ide>
<ide> def test_axes(self):
<del> self.assertRaises(ValueError, np.flip, np.ones(4), axis=1)
<del> self.assertRaises(ValueError, np.flip, np.ones((4, 4)), axis=2)
<del> self.assertRaises(ValueError, np.flip, np.ones((4, 4)), axis=-3)
<add> assert_raises(ValueError, np.flip, np.ones(4), axis=1)
<add> assert_raises(ValueError, np.flip, np.ones((4, 4)), axis=2)
<add> assert_raises(ValueError, np.flip, np.ones((4, 4)), axis=-3)
<ide>
<ide> def test_basic_lr(self):
<ide> a = get_mat(4)
<ide> def test_4d(self):
<ide> np.flipud(a.swapaxes(0, i)).swapaxes(i, 0))
<ide>
<ide>
<del>class TestAny(TestCase):
<add>class TestAny(object):
<ide>
<ide> def test_basic(self):
<ide> y1 = [0, 0, 1, 0]
<ide> def test_nd(self):
<ide> assert_array_equal(np.sometrue(y1, axis=1), [0, 1, 1])
<ide>
<ide>
<del>class TestAll(TestCase):
<add>class TestAll(object):
<ide>
<ide> def test_basic(self):
<ide> y1 = [0, 1, 1, 0]
<ide> def test_nd(self):
<ide> assert_array_equal(np.alltrue(y1, axis=1), [0, 0, 1])
<ide>
<ide>
<del>class TestCopy(TestCase):
<add>class TestCopy(object):
<ide>
<ide> def test_basic(self):
<ide> a = np.array([[1, 2], [3, 4]])
<ide> def test_order(self):
<ide> assert_(a_fort_copy.flags.f_contiguous)
<ide>
<ide>
<del>class TestAverage(TestCase):
<add>class TestAverage(object):
<ide>
<ide> def test_basic(self):
<ide> y1 = np.array([1, 2, 3])
<ide> def test_object_dtype(self):
<ide> a = np.array([decimal.Decimal(x) for x in range(10)])
<ide> w = np.array([decimal.Decimal(1) for _ in range(10)])
<ide> w /= w.sum()
<del> assert_almost_equal(a.mean(0), average(a, weights=w))
<add> assert_almost_equal(a.mean(0), average(a, weights=w))
<ide>
<del>class TestSelect(TestCase):
<add>class TestSelect(object):
<ide> choices = [np.array([1, 2, 3]),
<ide> np.array([4, 5, 6]),
<ide> np.array([7, 8, 9])]
<ide> def test_many_arguments(self):
<ide> select(conditions, choices)
<ide>
<ide>
<del>class TestInsert(TestCase):
<add>class TestInsert(object):
<ide>
<ide> def test_basic(self):
<ide> a = [1, 2, 3]
<ide> def test_structured_array(self):
<ide> assert_array_equal(b[[0, 3]], np.array(val, dtype=b.dtype))
<ide>
<ide>
<del>class TestAmax(TestCase):
<add>class TestAmax(object):
<ide>
<ide> def test_basic(self):
<ide> a = [3, 4, 5, 10, -3, -5, 6.0]
<ide> def test_basic(self):
<ide> assert_equal(np.amax(b, axis=1), [9.0, 10.0, 8.0])
<ide>
<ide>
<del>class TestAmin(TestCase):
<add>class TestAmin(object):
<ide>
<ide> def test_basic(self):
<ide> a = [3, 4, 5, 10, -3, -5, 6.0]
<ide> def test_basic(self):
<ide> assert_equal(np.amin(b, axis=1), [3.0, 4.0, 2.0])
<ide>
<ide>
<del>class TestPtp(TestCase):
<add>class TestPtp(object):
<ide>
<ide> def test_basic(self):
<ide> a = np.array([3, 4, 5, 10, -3, -5, 6.0])
<ide> def test_basic(self):
<ide> assert_equal(b.ptp(axis=-1), [6.0, 6.0, 6.0])
<ide>
<ide>
<del>class TestCumsum(TestCase):
<add>class TestCumsum(object):
<ide>
<ide> def test_basic(self):
<ide> ba = [1, 2, 10, 11, 6, 5, 4]
<ide> def test_basic(self):
<ide> assert_array_equal(np.cumsum(a2, axis=1), tgt)
<ide>
<ide>
<del>class TestProd(TestCase):
<add>class TestProd(object):
<ide>
<ide> def test_basic(self):
<ide> ba = [1, 2, 10, 11, 6, 5, 4]
<ide> def test_basic(self):
<ide> a = np.array(ba, ctype)
<ide> a2 = np.array(ba2, ctype)
<ide> if ctype in ['1', 'b']:
<del> self.assertRaises(ArithmeticError, np.prod, a)
<del> self.assertRaises(ArithmeticError, np.prod, a2, 1)
<add> assert_raises(ArithmeticError, np.prod, a)
<add> assert_raises(ArithmeticError, np.prod, a2, 1)
<ide> else:
<ide> assert_equal(a.prod(axis=0), 26400)
<ide> assert_array_equal(a2.prod(axis=0),
<ide> def test_basic(self):
<ide> np.array([24, 1890, 600], ctype))
<ide>
<ide>
<del>class TestCumprod(TestCase):
<add>class TestCumprod(object):
<ide>
<ide> def test_basic(self):
<ide> ba = [1, 2, 10, 11, 6, 5, 4]
<ide> def test_basic(self):
<ide> a = np.array(ba, ctype)
<ide> a2 = np.array(ba2, ctype)
<ide> if ctype in ['1', 'b']:
<del> self.assertRaises(ArithmeticError, np.cumprod, a)
<del> self.assertRaises(ArithmeticError, np.cumprod, a2, 1)
<del> self.assertRaises(ArithmeticError, np.cumprod, a)
<add> assert_raises(ArithmeticError, np.cumprod, a)
<add> assert_raises(ArithmeticError, np.cumprod, a2, 1)
<add> assert_raises(ArithmeticError, np.cumprod, a)
<ide> else:
<ide> assert_array_equal(np.cumprod(a, axis=-1),
<ide> np.array([1, 2, 20, 220,
<ide> def test_basic(self):
<ide> [10, 30, 120, 600]], ctype))
<ide>
<ide>
<del>class TestDiff(TestCase):
<add>class TestDiff(object):
<ide>
<ide> def test_basic(self):
<ide> x = [1, 4, 6, 7, 12]
<ide> def test_nd(self):
<ide> assert_array_equal(diff(x, n=2, axis=0), out4)
<ide>
<ide>
<del>class TestDelete(TestCase):
<add>class TestDelete(object):
<ide>
<del> def setUp(self):
<add> def setup(self):
<ide> self.a = np.arange(5)
<ide> self.nd_a = np.arange(5).repeat(2).reshape(1, 5, 2)
<ide>
<ide> def test_array_order_preserve(self):
<ide> assert_equal(m.flags.f_contiguous, k.flags.f_contiguous)
<ide>
<ide>
<del>class TestGradient(TestCase):
<add>class TestGradient(object):
<ide>
<ide> def test_basic(self):
<ide> v = [[1, 1], [3, 4]]
<ide> def test_basic(self):
<ide> assert_array_equal(gradient(x), dx)
<ide> assert_array_equal(gradient(v), dx)
<ide>
<del> def test_args(self):
<add> def test_args(self):
<ide> dx = np.cumsum(np.ones(5))
<ide> dx_uneven = [1., 2., 5., 9., 11.]
<ide> f_2d = np.arange(25).reshape(5, 5)
<ide> def test_second_order_accurate(self):
<ide>
<ide> def test_spacing(self):
<ide> f = np.array([0, 2., 3., 4., 5., 5.])
<del> f = np.tile(f, (6,1)) + f.reshape(-1, 1)
<add> f = np.tile(f, (6,1)) + f.reshape(-1, 1)
<ide> x_uneven = np.array([0., 0.5, 1., 3., 5., 7.])
<ide> x_even = np.arange(6.)
<del>
<add>
<ide> fdx_even_ord1 = np.tile([2., 1.5, 1., 1., 0.5, 0.], (6,1))
<ide> fdx_even_ord2 = np.tile([2.5, 1.5, 1., 1., 0.5, -0.5], (6,1))
<ide> fdx_uneven_ord1 = np.tile([4., 3., 1.7, 0.5, 0.25, 0.], (6,1))
<ide> fdx_uneven_ord2 = np.tile([5., 3., 1.7, 0.5, 0.25, -0.25], (6,1))
<del>
<add>
<ide> # evenly spaced
<ide> for edge_order, exp_res in [(1, fdx_even_ord1), (2, fdx_even_ord2)]:
<ide> res1 = gradient(f, 1., axis=(0,1), edge_order=edge_order)
<ide> def test_spacing(self):
<ide> axis=None, edge_order=edge_order)
<ide> assert_array_equal(res1, res2)
<ide> assert_array_equal(res2, res3)
<del> assert_almost_equal(res1[0], exp_res.T)
<del> assert_almost_equal(res1[1], exp_res)
<del>
<add> assert_almost_equal(res1[0], exp_res.T)
<add> assert_almost_equal(res1[1], exp_res)
<add>
<ide> res1 = gradient(f, 1., axis=0, edge_order=edge_order)
<ide> res2 = gradient(f, x_even, axis=0, edge_order=edge_order)
<ide> assert_(res1.shape == res2.shape)
<ide> assert_almost_equal(res2, exp_res.T)
<del>
<add>
<ide> res1 = gradient(f, 1., axis=1, edge_order=edge_order)
<ide> res2 = gradient(f, x_even, axis=1, edge_order=edge_order)
<ide> assert_(res1.shape == res2.shape)
<ide> assert_array_equal(res2, exp_res)
<del>
<add>
<ide> # unevenly spaced
<ide> for edge_order, exp_res in [(1, fdx_uneven_ord1), (2, fdx_uneven_ord2)]:
<ide> res1 = gradient(f, x_uneven, x_uneven,
<ide> def test_spacing(self):
<ide> assert_array_equal(res1, res2)
<ide> assert_almost_equal(res1[0], exp_res.T)
<ide> assert_almost_equal(res1[1], exp_res)
<del>
<add>
<ide> res1 = gradient(f, x_uneven, axis=0, edge_order=edge_order)
<ide> assert_almost_equal(res1, exp_res.T)
<del>
<add>
<ide> res1 = gradient(f, x_uneven, axis=1, edge_order=edge_order)
<ide> assert_almost_equal(res1, exp_res)
<del>
<add>
<ide> # mixed
<ide> res1 = gradient(f, x_even, x_uneven, axis=(0,1), edge_order=1)
<ide> res2 = gradient(f, x_uneven, x_even, axis=(1,0), edge_order=1)
<ide> assert_array_equal(res1[0], res2[1])
<ide> assert_array_equal(res1[1], res2[0])
<ide> assert_almost_equal(res1[0], fdx_even_ord1.T)
<ide> assert_almost_equal(res1[1], fdx_uneven_ord1)
<del>
<add>
<ide> res1 = gradient(f, x_even, x_uneven, axis=(0,1), edge_order=2)
<ide> res2 = gradient(f, x_uneven, x_even, axis=(1,0), edge_order=2)
<ide> assert_array_equal(res1[0], res2[1])
<ide> assert_array_equal(res1[1], res2[0])
<ide> assert_almost_equal(res1[0], fdx_even_ord2.T)
<ide> assert_almost_equal(res1[1], fdx_uneven_ord2)
<del>
<add>
<ide> def test_specific_axes(self):
<ide> # Testing that gradient can work on a given axis only
<ide> v = [[1, 1], [3, 4]]
<ide> def test_specific_axes(self):
<ide> assert_raises(np.AxisError, gradient, x, axis=3)
<ide> assert_raises(np.AxisError, gradient, x, axis=-3)
<ide> # assert_raises(TypeError, gradient, x, axis=[1,])
<del>
<add>
<ide> def test_timedelta64(self):
<ide> # Make sure gradient() can handle special types like timedelta64
<ide> x = np.array(
<ide> def test_values(self):
<ide> gradient(np.arange(2), edge_order=1)
<ide> # needs at least 3 points for edge_order ==1
<ide> gradient(np.arange(3), edge_order=2)
<del>
<add>
<ide> assert_raises(ValueError, gradient, np.arange(0), edge_order=1)
<ide> assert_raises(ValueError, gradient, np.arange(0), edge_order=2)
<ide> assert_raises(ValueError, gradient, np.arange(1), edge_order=1)
<ide> assert_raises(ValueError, gradient, np.arange(1), edge_order=2)
<ide> assert_raises(ValueError, gradient, np.arange(2), edge_order=2)
<ide>
<ide>
<del>class TestAngle(TestCase):
<add>class TestAngle(object):
<ide>
<ide> def test_basic(self):
<ide> x = [1 + 3j, np.sqrt(2) / 2.0 + 1j * np.sqrt(2) / 2,
<ide> def test_basic(self):
<ide> assert_array_almost_equal(z, zo, 11)
<ide>
<ide>
<del>class TestTrimZeros(TestCase):
<add>class TestTrimZeros(object):
<ide>
<ide> """
<ide> Only testing for integer splits.
<ide> def test_trailing_skip(self):
<ide> assert_array_equal(res, np.array([1, 0, 2, 3, 0, 4]))
<ide>
<ide>
<del>class TestExtins(TestCase):
<add>class TestExtins(object):
<ide>
<ide> def test_basic(self):
<ide> a = np.array([1, 3, 2, 1, 2, 3, 3])
<ide> def test_both(self):
<ide> assert_array_equal(a, ac)
<ide>
<ide>
<del>class TestVectorize(TestCase):
<add>class TestVectorize(object):
<ide>
<ide> def test_simple(self):
<ide> def addsubtract(a, b):
<ide> def test_size_zero_output(self):
<ide> f(x)
<ide>
<ide>
<del>class TestDigitize(TestCase):
<add>class TestDigitize(object):
<ide>
<ide> def test_forward(self):
<ide> x = np.arange(-6, 5)
<ide> class A(np.ndarray):
<ide> assert_(not isinstance(digitize(b, a, True), A))
<ide>
<ide>
<del>class TestUnwrap(TestCase):
<add>class TestUnwrap(object):
<ide>
<ide> def test_simple(self):
<ide> # check that unwrap removes jumps greather that 2*pi
<ide> def test_simple(self):
<ide> assert_(np.all(diff(unwrap(rand(10) * 100)) < np.pi))
<ide>
<ide>
<del>class TestFilterwindows(TestCase):
<add>class TestFilterwindows(object):
<ide>
<ide> def test_hanning(self):
<ide> # check symmetry
<ide> def test_blackman(self):
<ide> assert_almost_equal(np.sum(w, axis=0), 3.7800, 4)
<ide>
<ide>
<del>class TestTrapz(TestCase):
<add>class TestTrapz(object):
<ide>
<ide> def test_simple(self):
<ide> x = np.arange(-10, 10, .1)
<ide> def test_matrix(self):
<ide> assert_almost_equal(mr, r)
<ide>
<ide>
<del>class TestSinc(TestCase):
<add>class TestSinc(object):
<ide>
<ide> def test_simple(self):
<ide> assert_(sinc(0) == 1)
<ide> def test_array_like(self):
<ide> assert_array_equal(y1, y3)
<ide>
<ide>
<del>class TestHistogram(TestCase):
<add>class TestHistogram(object):
<ide>
<del> def setUp(self):
<add> def setup(self):
<ide> pass
<ide>
<del> def tearDown(self):
<add> def teardown(self):
<ide> pass
<ide>
<ide> def test_simple(self):
<ide> def test_bin_edge_cases(self):
<ide> left_edges = edges[:-1][mask]
<ide> right_edges = edges[1:][mask]
<ide> for x, left, right in zip(arr, left_edges, right_edges):
<del> self.assertGreaterEqual(x, left)
<del> self.assertLess(x, right)
<add> assert_(x >= left)
<add> assert_(x < right)
<ide>
<ide> def test_last_bin_inclusive_range(self):
<ide> arr = np.array([0., 0., 0., 1., 2., 3., 3., 4., 5.])
<ide> hist, edges = np.histogram(arr, bins=30, range=(-0.5, 5))
<del> self.assertEqual(hist[-1], 1)
<add> assert_equal(hist[-1], 1)
<ide>
<ide>
<del>class TestHistogramOptimBinNums(TestCase):
<add>class TestHistogramOptimBinNums(object):
<ide> """
<ide> Provide test coverage when using provided estimators for optimal number of
<ide> bins
<ide> def test_simple_range(self):
<ide> completely ignored. All test values have been precomputed and
<ide> the shouldn't change.
<ide> """
<del> # some basic sanity checking, with some fixed data.
<add> # some basic sanity checking, with some fixed data.
<ide> # Checking for the correct number of bins
<ide> basic_test = {
<ide> 50: {'fd': 8, 'scott': 8, 'rice': 15,
<ide> def test_simple_range(self):
<ide> }
<ide>
<ide> for testlen, expectedResults in basic_test.items():
<del> # create some sort of non uniform data to test with
<add> # create some sort of non uniform data to test with
<ide> # (3 peak uniform mixture)
<ide> x1 = np.linspace(-10, -1, testlen // 5 * 2)
<ide> x2 = np.linspace(1, 10, testlen // 5 * 3)
<ide> def test_simple_weighted(self):
<ide> """
<ide> estimator_list = ['fd', 'scott', 'rice', 'sturges', 'auto']
<ide> for estimator in estimator_list:
<del> assert_raises(TypeError, histogram, [1, 2, 3],
<add> assert_raises(TypeError, histogram, [1, 2, 3],
<ide> estimator, weights=[1, 2, 3])
<ide>
<ide>
<del>class TestHistogramdd(TestCase):
<add>class TestHistogramdd(object):
<ide>
<ide> def test_simple(self):
<ide> x = np.array([[-.5, .5, 1.5], [-.5, 1.5, 2.5], [-.5, 2.5, .5],
<ide> def test_finite_range(self):
<ide> range=[[0.0, 1.0], [np.nan, 0.75], [0.25, 0.5]])
<ide>
<ide>
<del>class TestUnique(TestCase):
<add>class TestUnique(object):
<ide>
<ide> def test_simple(self):
<ide> x = np.array([4, 3, 2, 1, 1, 2, 3, 4, 0])
<ide> def test_simple(self):
<ide> assert_(np.all(unique(x) == [1 + 1j, 1 + 10j, 5 + 6j, 10]))
<ide>
<ide>
<del>class TestCheckFinite(TestCase):
<add>class TestCheckFinite(object):
<ide>
<ide> def test_simple(self):
<ide> a = [1, 2, 3]
<ide> def test_dtype_order(self):
<ide> assert_(a.dtype == np.float64)
<ide>
<ide>
<del>class TestCorrCoef(TestCase):
<add>class TestCorrCoef(object):
<ide> A = np.array(
<ide> [[0.15391142, 0.18045767, 0.14197213],
<ide> [0.70461506, 0.96474128, 0.27906989],
<ide> def test_extreme(self):
<ide> assert_(np.all(np.abs(c) <= 1.0))
<ide>
<ide>
<del>class TestCov(TestCase):
<add>class TestCov(object):
<ide> x1 = np.array([[0, 2], [1, 1], [2, 0]]).T
<ide> res1 = np.array([[1., -1.], [-1., 1.]])
<ide> x2 = np.array([0.0, 1.0, 2.0], ndmin=2)
<ide> def test_unit_fweights_and_aweights(self):
<ide> self.res1)
<ide>
<ide>
<del>class Test_I0(TestCase):
<add>class Test_I0(object):
<ide>
<ide> def test_simple(self):
<ide> assert_almost_equal(
<ide> def test_simple(self):
<ide> [1.05884290, 1.06432317]]))
<ide>
<ide>
<del>class TestKaiser(TestCase):
<add>class TestKaiser(object):
<ide>
<ide> def test_simple(self):
<ide> assert_(np.isfinite(kaiser(1, 1.0)))
<ide> def test_int_beta(self):
<ide> kaiser(3, 4)
<ide>
<ide>
<del>class TestMsort(TestCase):
<add>class TestMsort(object):
<ide>
<ide> def test_simple(self):
<ide> A = np.array([[0.44567325, 0.79115165, 0.54900530],
<ide> def test_simple(self):
<ide> [0.64864341, 0.79115165, 0.96098397]]))
<ide>
<ide>
<del>class TestMeshgrid(TestCase):
<add>class TestMeshgrid(object):
<ide>
<ide> def test_simple(self):
<ide> [X, Y] = meshgrid([1, 2, 3], [4, 5, 6, 7])
<ide> def test_writeback(self):
<ide> assert_equal(x[1, :], X)
<ide>
<ide>
<del>class TestPiecewise(TestCase):
<add>class TestPiecewise(object):
<ide>
<ide> def test_simple(self):
<ide> # Condition is single bool list
<ide> def test_multidimensional_extrafunc(self):
<ide> [3., 3., 1.]]))
<ide>
<ide>
<del>class TestBincount(TestCase):
<add>class TestBincount(object):
<ide>
<ide> def test_simple(self):
<ide> y = np.bincount(np.arange(4))
<ide> def test_dtype_reference_leaks(self):
<ide> assert_equal(sys.getrefcount(np.dtype(np.double)), double_refcount)
<ide>
<ide>
<del>class TestInterp(TestCase):
<add>class TestInterp(object):
<ide>
<ide> def test_exceptions(self):
<ide> assert_raises(ValueError, interp, 0, [], [])
<ide> def compare_results(res, desired):
<ide> assert_array_equal(res[i], desired[i])
<ide>
<ide>
<del>class TestPercentile(TestCase):
<add>class TestPercentile(object):
<ide>
<ide> def test_basic(self):
<ide> x = np.arange(8) * 0.5
<ide> def test_scalar_q(self):
<ide> # test for no empty dimensions for compatibility with old percentile
<ide> x = np.arange(12).reshape(3, 4)
<ide> assert_equal(np.percentile(x, 50), 5.5)
<del> self.assertTrue(np.isscalar(np.percentile(x, 50)))
<add> assert_(np.isscalar(np.percentile(x, 50)))
<ide> r0 = np.array([4., 5., 6., 7.])
<ide> assert_equal(np.percentile(x, 50, axis=0), r0)
<ide> assert_equal(np.percentile(x, 50, axis=0).shape, r0.shape)
<ide> def test_scalar_q(self):
<ide> # test for no empty dimensions for compatibility with old percentile
<ide> x = np.arange(12).reshape(3, 4)
<ide> assert_equal(np.percentile(x, 50, interpolation='lower'), 5.)
<del> self.assertTrue(np.isscalar(np.percentile(x, 50)))
<add> assert_(np.isscalar(np.percentile(x, 50)))
<ide> r0 = np.array([4., 5., 6., 7.])
<ide> c0 = np.percentile(x, 50, interpolation='lower', axis=0)
<ide> assert_equal(c0, r0)
<ide> def test_nan_behavior(self):
<ide> a, [0.3, 0.6], (0, 2), interpolation='nearest'), b)
<ide>
<ide>
<del>class TestMedian(TestCase):
<add>class TestMedian(object):
<ide>
<ide> def test_basic(self):
<ide> a0 = np.array(1)
<ide> def test_keepdims(self):
<ide> (1, 1, 7, 1))
<ide>
<ide>
<del>class TestAdd_newdoc_ufunc(TestCase):
<add>class TestAdd_newdoc_ufunc(object):
<ide>
<ide> def test_ufunc_arg(self):
<ide> assert_raises(TypeError, add_newdoc_ufunc, 2, "blah")
<ide> def test_string_arg(self):
<ide> assert_raises(TypeError, add_newdoc_ufunc, np.add, 3)
<ide>
<ide>
<del>class TestAdd_newdoc(TestCase):
<add>class TestAdd_newdoc(object):
<ide>
<ide> @dec.skipif(sys.flags.optimize == 2)
<ide> def test_add_doc(self):
<ide> # test np.add_newdoc
<ide> tgt = "Current flat index into the array."
<del> self.assertEqual(np.core.flatiter.index.__doc__[:len(tgt)], tgt)
<del> self.assertTrue(len(np.core.ufunc.identity.__doc__) > 300)
<del> self.assertTrue(len(np.lib.index_tricks.mgrid.__doc__) > 300)
<add> assert_equal(np.core.flatiter.index.__doc__[:len(tgt)], tgt)
<add> assert_(len(np.core.ufunc.identity.__doc__) > 300)
<add> assert_(len(np.lib.index_tricks.mgrid.__doc__) > 300)
<ide>
<ide>
<ide> if __name__ == "__main__":
<ide><path>numpy/lib/tests/test_index_tricks.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
<add> run_module_suite, assert_, assert_equal, assert_array_equal,
<ide> assert_almost_equal, assert_array_almost_equal, assert_raises
<ide> )
<ide> from numpy.lib.index_tricks import (
<ide> )
<ide>
<ide>
<del>class TestRavelUnravelIndex(TestCase):
<add>class TestRavelUnravelIndex(object):
<ide> def test_basic(self):
<ide> assert_equal(np.unravel_index(2, (2, 2)), (1, 0))
<ide> assert_equal(np.ravel_multi_index((1, 0), (2, 2)), 2)
<ide> def test_clipmodes(self):
<ide> def test_writeability(self):
<ide> # See gh-7269
<ide> x, y = np.unravel_index([1, 2, 3], (4, 5))
<del> self.assertTrue(x.flags.writeable)
<del> self.assertTrue(y.flags.writeable)
<add> assert_(x.flags.writeable)
<add> assert_(y.flags.writeable)
<ide>
<ide>
<del>class TestGrid(TestCase):
<add>class TestGrid(object):
<ide> def test_basic(self):
<ide> a = mgrid[-1:1:10j]
<ide> b = mgrid[-1:1:0.1]
<ide> def test_nd(self):
<ide> 0.2*np.ones(20, 'd'), 11)
<ide>
<ide>
<del>class TestConcatenator(TestCase):
<add>class TestConcatenator(object):
<ide> def test_1d(self):
<ide> assert_array_equal(r_[1, 2, 3, 4, 5, 6], np.array([1, 2, 3, 4, 5, 6]))
<ide> b = np.ones(5)
<ide> def test_matrix_builder(self):
<ide> assert_equal(type(actual), type(expected))
<ide>
<ide>
<del>class TestNdenumerate(TestCase):
<add>class TestNdenumerate(object):
<ide> def test_basic(self):
<ide> a = np.array([[1, 2], [3, 4]])
<ide> assert_equal(list(ndenumerate(a)),
<ide> [((0, 0), 1), ((0, 1), 2), ((1, 0), 3), ((1, 1), 4)])
<ide>
<ide>
<del>class TestIndexExpression(TestCase):
<add>class TestIndexExpression(object):
<ide> def test_regression_1(self):
<ide> # ticket #1196
<ide> a = np.arange(2)
<ide> def test_simple_1(self):
<ide> assert_equal(a[:, :3, [1, 2]], a[s_[:, :3, [1, 2]]])
<ide>
<ide>
<del>class TestIx_(TestCase):
<add>class TestIx_(object):
<ide> def test_regression_1(self):
<ide> # Test empty inputs create ouputs of indexing type, gh-5804
<ide> # Test both lists and arrays
<ide><path>numpy/lib/tests/test_io.py
<ide> from numpy.compat import asbytes, bytes, unicode, Path
<ide> from numpy.ma.testutils import assert_equal
<ide> from numpy.testing import (
<del> TestCase, run_module_suite, assert_warns, assert_,
<del> assert_raises_regex, assert_raises, assert_allclose,
<del> assert_array_equal, temppath, dec, IS_PYPY, suppress_warnings
<add> run_module_suite, assert_warns, assert_, assert_raises_regex,
<add> assert_raises, assert_allclose, assert_array_equal, temppath, dec, IS_PYPY,
<add> suppress_warnings
<ide> )
<ide>
<ide>
<ide> def test_format_2_0(self):
<ide> self.check_roundtrips(a)
<ide>
<ide>
<del>class TestSaveLoad(RoundtripTest, TestCase):
<add>class TestSaveLoad(RoundtripTest):
<ide> def roundtrip(self, *args, **kwargs):
<ide> RoundtripTest.roundtrip(self, np.save, *args, **kwargs)
<ide> assert_equal(self.arr[0], self.arr_reloaded)
<ide> assert_equal(self.arr[0].dtype, self.arr_reloaded.dtype)
<ide> assert_equal(self.arr[0].flags.fnc, self.arr_reloaded.flags.fnc)
<ide>
<ide>
<del>class TestSavezLoad(RoundtripTest, TestCase):
<add>class TestSavezLoad(RoundtripTest):
<ide> def roundtrip(self, *args, **kwargs):
<ide> RoundtripTest.roundtrip(self, np.savez, *args, **kwargs)
<ide> try:
<ide> def test_closing_zipfile_after_load(self):
<ide> assert_(fp.closed)
<ide>
<ide>
<del>class TestSaveTxt(TestCase):
<add>class TestSaveTxt(object):
<ide> def test_array(self):
<ide> a = np.array([[1, 2], [3, 4]], float)
<ide> fmt = "%.18e"
<ide> def write(self, text):
<ide> assert_array_equal(a, b)
<ide>
<ide>
<del>class TestLoadTxt(TestCase):
<add>class TestLoadTxt(object):
<ide> def test_record(self):
<ide> c = TextIO()
<ide> c.write('1 2\n3 4')
<ide> def test_none_as_string(self):
<ide> np.loadtxt(c, delimiter=',', dtype=dt, comments=None) # Should succeed
<ide>
<ide>
<del>class Testfromregex(TestCase):
<add>class Testfromregex(object):
<ide> # np.fromregex expects files opened in binary mode.
<ide> def test_record(self):
<ide> c = TextIO()
<ide> def test_record_3(self):
<ide> #####--------------------------------------------------------------------------
<ide>
<ide>
<del>class TestFromTxt(TestCase):
<add>class TestFromTxt(object):
<ide> #
<ide> def test_record(self):
<ide> # Test w/ explicit dtype
<ide> def test_recfromtxt(self):
<ide> test = np.recfromtxt(data, **kwargs)
<ide> control = np.array([(0, 1), (2, 3)],
<ide> dtype=[('A', np.int), ('B', np.int)])
<del> self.assertTrue(isinstance(test, np.recarray))
<add> assert_(isinstance(test, np.recarray))
<ide> assert_equal(test, control)
<ide> #
<ide> data = TextIO('A,B\n0,1\n2,N/A')
<ide> def test_recfromcsv(self):
<ide> test = np.recfromcsv(data, dtype=None, **kwargs)
<ide> control = np.array([(0, 1), (2, 3)],
<ide> dtype=[('A', np.int), ('B', np.int)])
<del> self.assertTrue(isinstance(test, np.recarray))
<add> assert_(isinstance(test, np.recarray))
<ide> assert_equal(test, control)
<ide> #
<ide> data = TextIO('A,B\n0,1\n2,N/A')
<ide> def test_recfromcsv(self):
<ide> test = np.recfromcsv(data, missing_values='N/A',)
<ide> control = np.array([(0, 1), (2, 3)],
<ide> dtype=[('a', np.int), ('b', np.int)])
<del> self.assertTrue(isinstance(test, np.recarray))
<add> assert_(isinstance(test, np.recarray))
<ide> assert_equal(test, control)
<ide> #
<ide> data = TextIO('A,B\n0,1\n2,3')
<ide> dtype = [('a', np.int), ('b', np.float)]
<ide> test = np.recfromcsv(data, missing_values='N/A', dtype=dtype)
<ide> control = np.array([(0, 1), (2, 3)],
<ide> dtype=dtype)
<del> self.assertTrue(isinstance(test, np.recarray))
<add> assert_(isinstance(test, np.recarray))
<ide> assert_equal(test, control)
<ide>
<ide> def test_max_rows(self):
<ide> def test_auto_dtype_largeint(self):
<ide> assert_equal(test['f2'], 1024)
<ide>
<ide>
<del>class TestPathUsage(TestCase):
<add>class TestPathUsage(object):
<ide> # Test that pathlib.Path can be used
<ide> @np.testing.dec.skipif(Path is None, "No pathlib.Path")
<ide> def test_loadtxt(self):
<ide> def test_recfromtxt(self):
<ide> test = np.recfromtxt(path, **kwargs)
<ide> control = np.array([(0, 1), (2, 3)],
<ide> dtype=[('A', np.int), ('B', np.int)])
<del> self.assertTrue(isinstance(test, np.recarray))
<add> assert_(isinstance(test, np.recarray))
<ide> assert_equal(test, control)
<ide>
<ide> @np.testing.dec.skipif(Path is None, "No pathlib.Path")
<ide> def test_recfromcsv(self):
<ide> test = np.recfromcsv(path, dtype=None, **kwargs)
<ide> control = np.array([(0, 1), (2, 3)],
<ide> dtype=[('A', np.int), ('B', np.int)])
<del> self.assertTrue(isinstance(test, np.recarray))
<add> assert_(isinstance(test, np.recarray))
<ide> assert_equal(test, control)
<ide>
<ide>
<ide><path>numpy/lib/tests/test_mixins.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> TestCase, run_module_suite, assert_, assert_equal, assert_raises)
<add> run_module_suite, assert_, assert_equal, assert_raises
<add> )
<ide>
<ide>
<ide> PY2 = sys.version_info.major < 3
<ide> def _assert_equal_type_and_value(result, expected, err_msg=None):
<ide> ]
<ide>
<ide>
<del>class TestNDArrayOperatorsMixin(TestCase):
<add>class TestNDArrayOperatorsMixin(object):
<ide>
<ide> def test_array_like_add(self):
<ide>
<ide><path>numpy/lib/tests/test_nanfunctions.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_almost_equal,
<add> run_module_suite, assert_, assert_equal, assert_almost_equal,
<ide> assert_no_warnings, assert_raises, assert_array_equal, suppress_warnings
<ide> )
<ide>
<ide> [0.1610, 0.0, 0.0, 0.1859, 0.3146, 0.0]])
<ide>
<ide>
<del>class TestNanFunctions_MinMax(TestCase):
<add>class TestNanFunctions_MinMax(object):
<ide>
<ide> nanfuncs = [np.nanmin, np.nanmax]
<ide> stdfuncs = [np.min, np.max]
<ide> def test_object_array(self):
<ide> assert_(issubclass(w[0].category, RuntimeWarning))
<ide>
<ide>
<del>class TestNanFunctions_ArgminArgmax(TestCase):
<add>class TestNanFunctions_ArgminArgmax(object):
<ide>
<ide> nanfuncs = [np.nanargmin, np.nanargmax]
<ide>
<ide> def test_matrices(self):
<ide> assert_(np.isscalar(res))
<ide>
<ide>
<del>class TestNanFunctions_IntTypes(TestCase):
<add>class TestNanFunctions_IntTypes(object):
<ide>
<ide> int_types = (np.int8, np.int16, np.int32, np.int64, np.uint8,
<ide> np.uint16, np.uint32, np.uint64)
<ide> def test_matrices(self):
<ide> assert_(np.isscalar(res))
<ide>
<ide>
<del>class TestNanFunctions_SumProd(TestCase, SharedNanFunctionsTestsMixin):
<add>class TestNanFunctions_SumProd(SharedNanFunctionsTestsMixin):
<ide>
<ide> nanfuncs = [np.nansum, np.nanprod]
<ide> stdfuncs = [np.sum, np.prod]
<ide> def test_empty(self):
<ide> assert_equal(res, tgt)
<ide>
<ide>
<del>class TestNanFunctions_CumSumProd(TestCase, SharedNanFunctionsTestsMixin):
<add>class TestNanFunctions_CumSumProd(SharedNanFunctionsTestsMixin):
<ide>
<ide> nanfuncs = [np.nancumsum, np.nancumprod]
<ide> stdfuncs = [np.cumsum, np.cumprod]
<ide> def test_out(self):
<ide> assert_almost_equal(res, tgt)
<ide>
<ide>
<del>class TestNanFunctions_MeanVarStd(TestCase, SharedNanFunctionsTestsMixin):
<add>class TestNanFunctions_MeanVarStd(SharedNanFunctionsTestsMixin):
<ide>
<ide> nanfuncs = [np.nanmean, np.nanvar, np.nanstd]
<ide> stdfuncs = [np.mean, np.var, np.std]
<ide> def test_empty(self):
<ide> assert_(len(w) == 0)
<ide>
<ide>
<del>class TestNanFunctions_Median(TestCase):
<add>class TestNanFunctions_Median(object):
<ide>
<ide> def test_mutation(self):
<ide> # Check that passed array is not modified.
<ide> def test_float_special(self):
<ide> ([np.nan] * i) + [-inf] * j)
<ide>
<ide>
<del>class TestNanFunctions_Percentile(TestCase):
<add>class TestNanFunctions_Percentile(object):
<ide>
<ide> def test_mutation(self):
<ide> # Check that passed array is not modified.
<ide><path>numpy/lib/tests/test_polynomial.py
<ide> '''
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
<add> run_module_suite, assert_, assert_equal, assert_array_equal,
<ide> assert_almost_equal, assert_array_almost_equal, assert_raises, rundocs
<ide> )
<ide>
<ide>
<del>class TestDocs(TestCase):
<add>class TestDocs(object):
<ide> def test_doctests(self):
<ide> return rundocs()
<ide>
<ide><path>numpy/lib/tests/test_recfunctions.py
<ide> from numpy.ma.mrecords import MaskedRecords
<ide> from numpy.ma.testutils import assert_equal
<ide> from numpy.testing import (
<del> TestCase, run_module_suite, assert_, assert_raises, dec
<del>)
<add> run_module_suite, assert_, assert_raises, dec
<add> )
<ide> from numpy.lib.recfunctions import (
<ide> drop_fields, rename_fields, get_fieldstructure, recursive_fill_fields,
<ide> find_duplicates, merge_arrays, append_fields, stack_arrays, join_by
<ide> zip_descr = np.lib.recfunctions.zip_descr
<ide>
<ide>
<del>class TestRecFunctions(TestCase):
<add>class TestRecFunctions(object):
<ide> # Misc tests
<ide>
<del> def setUp(self):
<add> def setup(self):
<ide> x = np.array([1, 2, ])
<ide> y = np.array([10, 20, 30])
<ide> z = np.array([('A', 1.), ('B', 2.)],
<ide> def test_find_duplicates_ignoremask(self):
<ide> assert_equal(test[0], a[test[-1]])
<ide>
<ide>
<del>class TestRecursiveFillFields(TestCase):
<add>class TestRecursiveFillFields(object):
<ide> # Test recursive_fill_fields.
<ide> def test_simple_flexible(self):
<ide> # Test recursive_fill_fields on flexible-array
<ide> def test_masked_flexible(self):
<ide> assert_equal(test, control)
<ide>
<ide>
<del>class TestMergeArrays(TestCase):
<add>class TestMergeArrays(object):
<ide> # Test merge_arrays
<ide>
<del> def setUp(self):
<add> def setup(self):
<ide> x = np.array([1, 2, ])
<ide> y = np.array([10, 20, 30])
<ide> z = np.array(
<ide> def test_singlerecord(self):
<ide> assert_equal(test, control)
<ide>
<ide>
<del>class TestAppendFields(TestCase):
<add>class TestAppendFields(object):
<ide> # Test append_fields
<ide>
<del> def setUp(self):
<add> def setup(self):
<ide> x = np.array([1, 2, ])
<ide> y = np.array([10, 20, 30])
<ide> z = np.array(
<ide> def test_append_on_nested(self):
<ide> assert_equal(test, control)
<ide>
<ide>
<del>class TestStackArrays(TestCase):
<add>class TestStackArrays(object):
<ide> # Test stack_arrays
<del> def setUp(self):
<add> def setup(self):
<ide> x = np.array([1, 2, ])
<ide> y = np.array([10, 20, 30])
<ide> z = np.array(
<ide> def test_solo(self):
<ide> (_, x, _, _) = self.data
<ide> test = stack_arrays((x,))
<ide> assert_equal(test, x)
<del> self.assertTrue(test is x)
<add> assert_(test is x)
<ide>
<ide> test = stack_arrays(x)
<ide> assert_equal(test, x)
<del> self.assertTrue(test is x)
<add> assert_(test is x)
<ide>
<ide> def test_unnamed_fields(self):
<ide> # Tests combinations of arrays w/o named fields
<ide> def test_subdtype(self):
<ide> assert_equal(res.mask, expected.mask)
<ide>
<ide>
<del>class TestJoinBy(TestCase):
<del> def setUp(self):
<add>class TestJoinBy(object):
<add> def setup(self):
<ide> self.a = np.array(list(zip(np.arange(10), np.arange(50, 60),
<ide> np.arange(100, 110))),
<ide> dtype=[('a', int), ('b', int), ('c', int)])
<ide> def test_padded_dtype(self):
<ide> assert_equal(res.dtype, expected_dtype)
<ide>
<ide>
<del>class TestJoinBy2(TestCase):
<add>class TestJoinBy2(object):
<ide> @classmethod
<del> def setUp(cls):
<add> def setup(cls):
<ide> cls.a = np.array(list(zip(np.arange(10), np.arange(50, 60),
<ide> np.arange(100, 110))),
<ide> dtype=[('a', int), ('b', int), ('c', int)])
<ide> def test_no_r1postfix(self):
<ide> assert_equal(test, control)
<ide>
<ide> def test_no_postfix(self):
<del> self.assertRaises(ValueError, join_by, 'a', self.a, self.b,
<del> r1postfix='', r2postfix='')
<add> assert_raises(ValueError, join_by, 'a', self.a, self.b,
<add> r1postfix='', r2postfix='')
<ide>
<ide> def test_no_r2postfix(self):
<ide> # Basic test of join_by no_r2postfix
<ide> def test_two_keys_two_vars(self):
<ide> assert_equal(test.dtype, control.dtype)
<ide> assert_equal(test, control)
<ide>
<del>class TestAppendFieldsObj(TestCase):
<add>class TestAppendFieldsObj(object):
<ide> """
<ide> Test append_fields with arrays containing objects
<ide> """
<ide> # https://github.com/numpy/numpy/issues/2346
<ide>
<del> def setUp(self):
<add> def setup(self):
<ide> from datetime import date
<ide> self.data = dict(obj=date(2000, 1, 1))
<ide>
<ide><path>numpy/lib/tests/test_regression.py
<ide>
<ide> import numpy as np
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
<add> run_module_suite, assert_, assert_equal, assert_array_equal,
<ide> assert_array_almost_equal, assert_raises, _assert_valid_refcount,
<ide> )
<ide> from numpy.compat import unicode
<ide>
<ide> rlevel = 1
<ide>
<ide>
<del>class TestRegression(TestCase):
<add>class TestRegression(object):
<ide> def test_poly1d(self, level=rlevel):
<ide> # Ticket #28
<ide> assert_equal(np.poly1d([1]) - np.poly1d([1, 0]),
<ide> def p(x, y):
<ide> def test_poly1d_nan_roots(self, level=rlevel):
<ide> # Ticket #396
<ide> p = np.poly1d([np.nan, np.nan, 1], r=0)
<del> self.assertRaises(np.linalg.LinAlgError, getattr, p, "r")
<add> assert_raises(np.linalg.LinAlgError, getattr, p, "r")
<ide>
<ide> def test_mem_polymul(self, level=rlevel):
<ide> # Ticket #448
<ide> def dp2():
<ide> i = np.random.randint(0, n, size=thesize)
<ide> a[np.ix_(i, i, i, i, i)]
<ide>
<del> self.assertRaises(ValueError, dp)
<del> self.assertRaises(ValueError, dp2)
<add> assert_raises(ValueError, dp)
<add> assert_raises(ValueError, dp2)
<ide>
<ide> def test_void_coercion(self, level=rlevel):
<ide> dt = np.dtype([('a', 'f4'), ('b', 'i4')])
<ide><path>numpy/lib/tests/test_shape_base.py
<ide> vsplit, dstack, column_stack, kron, tile, expand_dims,
<ide> )
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
<del> assert_raises, assert_warns
<add> run_module_suite, assert_, assert_equal, assert_array_equal, assert_raises,
<add> assert_warns
<ide> )
<ide>
<ide>
<del>class TestApplyAlongAxis(TestCase):
<add>class TestApplyAlongAxis(object):
<ide> def test_simple(self):
<ide> a = np.ones((20, 10), 'd')
<ide> assert_array_equal(
<ide> def test_with_iterable_object(self):
<ide> assert_equal(type(actual[i]), type(expected[i]))
<ide>
<ide>
<del>class TestApplyOverAxes(TestCase):
<add>class TestApplyOverAxes(object):
<ide> def test_simple(self):
<ide> a = np.arange(24).reshape(2, 3, 4)
<ide> aoa_a = apply_over_axes(np.sum, a, [0, 2])
<ide> assert_array_equal(aoa_a, np.array([[[60], [92], [124]]]))
<ide>
<ide>
<del>class TestExpandDims(TestCase):
<add>class TestExpandDims(object):
<ide> def test_functionality(self):
<ide> s = (2, 3, 4, 5)
<ide> a = np.empty(s)
<ide> def test_deprecations(self):
<ide> assert_warns(DeprecationWarning, expand_dims, a, 5)
<ide>
<ide>
<del>class TestArraySplit(TestCase):
<add>class TestArraySplit(object):
<ide> def test_integer_0_split(self):
<ide> a = np.arange(10)
<ide> assert_raises(ValueError, array_split, a, 0)
<ide> def test_index_split_high_bound(self):
<ide> compare_results(res, desired)
<ide>
<ide>
<del>class TestSplit(TestCase):
<add>class TestSplit(object):
<ide> # The split function is essentially the same as array_split,
<ide> # except that it test if splitting will result in an
<ide> # equal split. Only test for this case.
<ide> def test_unequal_split(self):
<ide> a = np.arange(10)
<ide> assert_raises(ValueError, split, a, 3)
<ide>
<del>class TestColumnStack(TestCase):
<add>class TestColumnStack(object):
<ide> def test_non_iterable(self):
<ide> assert_raises(TypeError, column_stack, 1)
<ide>
<ide>
<del>class TestDstack(TestCase):
<add>class TestDstack(object):
<ide> def test_non_iterable(self):
<ide> assert_raises(TypeError, dstack, 1)
<ide>
<ide> def test_2D_array2(self):
<ide>
<ide> # array_split has more comprehensive test of splitting.
<ide> # only do simple test on hsplit, vsplit, and dsplit
<del>class TestHsplit(TestCase):
<add>class TestHsplit(object):
<ide> """Only testing for integer splits.
<ide>
<ide> """
<ide> def test_2D_array(self):
<ide> compare_results(res, desired)
<ide>
<ide>
<del>class TestVsplit(TestCase):
<add>class TestVsplit(object):
<ide> """Only testing for integer splits.
<ide>
<ide> """
<ide> def test_2D_array(self):
<ide> compare_results(res, desired)
<ide>
<ide>
<del>class TestDsplit(TestCase):
<add>class TestDsplit(object):
<ide> # Only testing for integer splits.
<ide> def test_non_iterable(self):
<ide> assert_raises(ValueError, dsplit, 1, 1)
<ide> def test_3D_array(self):
<ide> compare_results(res, desired)
<ide>
<ide>
<del>class TestSqueeze(TestCase):
<add>class TestSqueeze(object):
<ide> def test_basic(self):
<ide> from numpy.random import rand
<ide>
<ide> def test_basic(self):
<ide> assert_equal(type(res), np.ndarray)
<ide>
<ide>
<del>class TestKron(TestCase):
<add>class TestKron(object):
<ide> def test_return_type(self):
<ide> a = np.ones([2, 2])
<ide> m = np.asmatrix(a)
<ide> class myarray(np.ndarray):
<ide> assert_equal(type(kron(ma, a)), myarray)
<ide>
<ide>
<del>class TestTile(TestCase):
<add>class TestTile(object):
<ide> def test_basic(self):
<ide> a = np.array([0, 1, 2])
<ide> b = [[1, 2], [3, 4]]
<ide> def test_kroncompare(self):
<ide> assert_equal(large, klarge)
<ide>
<ide>
<del>class TestMayShareMemory(TestCase):
<add>class TestMayShareMemory(object):
<ide> def test_basic(self):
<ide> d = np.ones((50, 60))
<ide> d2 = np.ones((30, 60, 6))
<del> self.assertTrue(np.may_share_memory(d, d))
<del> self.assertTrue(np.may_share_memory(d, d[::-1]))
<del> self.assertTrue(np.may_share_memory(d, d[::2]))
<del> self.assertTrue(np.may_share_memory(d, d[1:, ::-1]))
<del>
<del> self.assertFalse(np.may_share_memory(d[::-1], d2))
<del> self.assertFalse(np.may_share_memory(d[::2], d2))
<del> self.assertFalse(np.may_share_memory(d[1:, ::-1], d2))
<del> self.assertTrue(np.may_share_memory(d2[1:, ::-1], d2))
<add> assert_(np.may_share_memory(d, d))
<add> assert_(np.may_share_memory(d, d[::-1]))
<add> assert_(np.may_share_memory(d, d[::2]))
<add> assert_(np.may_share_memory(d, d[1:, ::-1]))
<add>
<add> assert_(not np.may_share_memory(d[::-1], d2))
<add> assert_(not np.may_share_memory(d[::2], d2))
<add> assert_(not np.may_share_memory(d[1:, ::-1], d2))
<add> assert_(np.may_share_memory(d2[1:, ::-1], d2))
<ide>
<ide>
<ide> # Utility
<ide><path>numpy/lib/tests/test_twodim_base.py
<ide> from __future__ import division, absolute_import, print_function
<ide>
<ide> from numpy.testing import (
<del> TestCase, run_module_suite, assert_equal, assert_array_equal,
<del> assert_array_max_ulp, assert_array_almost_equal, assert_raises,
<add> run_module_suite, assert_equal, assert_array_equal, assert_array_max_ulp,
<add> assert_array_almost_equal, assert_raises,
<ide> )
<ide>
<ide> from numpy import (
<ide> def get_mat(n):
<ide> return data
<ide>
<ide>
<del>class TestEye(TestCase):
<add>class TestEye(object):
<ide> def test_basic(self):
<ide> assert_equal(eye(4),
<ide> array([[1, 0, 0, 0],
<ide> def test_bool(self):
<ide> assert_equal(eye(2, 2, dtype=bool), [[True, False], [False, True]])
<ide>
<ide>
<del>class TestDiag(TestCase):
<add>class TestDiag(object):
<ide> def test_vector(self):
<ide> vals = (100 * arange(5)).astype('l')
<ide> b = zeros((5, 5))
<ide> def test_diag_bounds(self):
<ide> assert_equal(diag(A, k=-3), [])
<ide>
<ide> def test_failure(self):
<del> self.assertRaises(ValueError, diag, [[[1]]])
<add> assert_raises(ValueError, diag, [[[1]]])
<ide>
<ide>
<del>class TestFliplr(TestCase):
<add>class TestFliplr(object):
<ide> def test_basic(self):
<del> self.assertRaises(ValueError, fliplr, ones(4))
<add> assert_raises(ValueError, fliplr, ones(4))
<ide> a = get_mat(4)
<ide> b = a[:, ::-1]
<ide> assert_equal(fliplr(a), b)
<ide> def test_basic(self):
<ide> assert_equal(fliplr(a), b)
<ide>
<ide>
<del>class TestFlipud(TestCase):
<add>class TestFlipud(object):
<ide> def test_basic(self):
<ide> a = get_mat(4)
<ide> b = a[::-1, :]
<ide> def test_basic(self):
<ide> assert_equal(flipud(a), b)
<ide>
<ide>
<del>class TestHistogram2d(TestCase):
<add>class TestHistogram2d(object):
<ide> def test_simple(self):
<ide> x = array(
<ide> [0.41702200, 0.72032449, 1.1437481e-4, 0.302332573, 0.146755891])
<ide> def test_binparameter_combination(self):
<ide> assert_array_equal(xe, array([0., 0.25, 0.5, 0.75, 1]))
<ide>
<ide>
<del>class TestTri(TestCase):
<add>class TestTri(object):
<ide> def test_dtype(self):
<ide> out = array([[1, 0, 0],
<ide> [1, 1, 0],
<ide> def test_mask_indices():
<ide> # simple test without offset
<ide> iu = mask_indices(3, np.triu)
<ide> a = np.arange(9).reshape(3, 3)
<del> yield (assert_array_equal, a[iu], array([0, 1, 2, 4, 5, 8]))
<add> assert_array_equal(a[iu], array([0, 1, 2, 4, 5, 8]))
<ide> # Now with an offset
<ide> iu1 = mask_indices(3, np.triu, 1)
<del> yield (assert_array_equal, a[iu1], array([1, 2, 5]))
<add> assert_array_equal(a[iu1], array([1, 2, 5]))
<ide>
<ide>
<ide> def test_tril_indices():
<ide> def test_tril_indices():
<ide> b = np.arange(1, 21).reshape(4, 5)
<ide>
<ide> # indexing:
<del> yield (assert_array_equal, a[il1],
<del> array([1, 5, 6, 9, 10, 11, 13, 14, 15, 16]))
<del> yield (assert_array_equal, b[il3],
<del> array([1, 6, 7, 11, 12, 13, 16, 17, 18, 19]))
<add> assert_array_equal(a[il1],
<add> array([1, 5, 6, 9, 10, 11, 13, 14, 15, 16]))
<add> assert_array_equal(b[il3],
<add> array([1, 6, 7, 11, 12, 13, 16, 17, 18, 19]))
<ide>
<ide> # And for assigning values:
<ide> a[il1] = -1
<del> yield (assert_array_equal, a,
<del> array([[-1, 2, 3, 4],
<del> [-1, -1, 7, 8],
<del> [-1, -1, -1, 12],
<del> [-1, -1, -1, -1]]))
<add> assert_array_equal(a,
<add> array([[-1, 2, 3, 4],
<add> [-1, -1, 7, 8],
<add> [-1, -1, -1, 12],
<add> [-1, -1, -1, -1]]))
<ide> b[il3] = -1
<del> yield (assert_array_equal, b,
<del> array([[-1, 2, 3, 4, 5],
<del> [-1, -1, 8, 9, 10],
<del> [-1, -1, -1, 14, 15],
<del> [-1, -1, -1, -1, 20]]))
<add> assert_array_equal(b,
<add> array([[-1, 2, 3, 4, 5],
<add> [-1, -1, 8, 9, 10],
<add> [-1, -1, -1, 14, 15],
<add> [-1, -1, -1, -1, 20]]))
<ide> # These cover almost the whole array (two diagonals right of the main one):
<ide> a[il2] = -10
<del> yield (assert_array_equal, a,
<del> array([[-10, -10, -10, 4],
<del> [-10, -10, -10, -10],
<del> [-10, -10, -10, -10],
<del> [-10, -10, -10, -10]]))
<add> assert_array_equal(a,
<add> array([[-10, -10, -10, 4],
<add> [-10, -10, -10, -10],
<add> [-10, -10, -10, -10],
<add> [-10, -10, -10, -10]]))
<ide> b[il4] = -10
<del> yield (assert_array_equal, b,
<del> array([[-10, -10, -10, 4, 5],
<del> [-10, -10, -10, -10, 10],
<del> [-10, -10, -10, -10, -10],
<del> [-10, -10, -10, -10, -10]]))
<add> assert_array_equal(b,
<add> array([[-10, -10, -10, 4, 5],
<add> [-10, -10, -10, -10, 10],
<add> [-10, -10, -10, -10, -10],
<add> [-10, -10, -10, -10, -10]]))
<ide>
<ide>
<ide> class TestTriuIndices(object):
<ide> def test_triu_indices(self):
<ide> b = np.arange(1, 21).reshape(4, 5)
<ide>
<ide> # Both for indexing:
<del> yield (assert_array_equal, a[iu1],
<del> array([1, 2, 3, 4, 6, 7, 8, 11, 12, 16]))
<del> yield (assert_array_equal, b[iu3],
<del> array([1, 2, 3, 4, 5, 7, 8, 9, 10, 13, 14, 15, 19, 20]))
<add> assert_array_equal(a[iu1],
<add> array([1, 2, 3, 4, 6, 7, 8, 11, 12, 16]))
<add> assert_array_equal(b[iu3],
<add> array([1, 2, 3, 4, 5, 7, 8, 9,
<add> 10, 13, 14, 15, 19, 20]))
<ide>
<ide> # And for assigning values:
<ide> a[iu1] = -1
<del> yield (assert_array_equal, a,
<del> array([[-1, -1, -1, -1],
<del> [5, -1, -1, -1],
<del> [9, 10, -1, -1],
<del> [13, 14, 15, -1]]))
<add> assert_array_equal(a,
<add> array([[-1, -1, -1, -1],
<add> [5, -1, -1, -1],
<add> [9, 10, -1, -1],
<add> [13, 14, 15, -1]]))
<ide> b[iu3] = -1
<del> yield (assert_array_equal, b,
<del> array([[-1, -1, -1, -1, -1],
<del> [6, -1, -1, -1, -1],
<del> [11, 12, -1, -1, -1],
<del> [16, 17, 18, -1, -1]]))
<add> assert_array_equal(b,
<add> array([[-1, -1, -1, -1, -1],
<add> [6, -1, -1, -1, -1],
<add> [11, 12, -1, -1, -1],
<add> [16, 17, 18, -1, -1]]))
<ide>
<ide> # These cover almost the whole array (two diagonals right of the
<ide> # main one):
<ide> a[iu2] = -10
<del> yield (assert_array_equal, a,
<del> array([[-1, -1, -10, -10],
<del> [5, -1, -1, -10],
<del> [9, 10, -1, -1],
<del> [13, 14, 15, -1]]))
<add> assert_array_equal(a,
<add> array([[-1, -1, -10, -10],
<add> [5, -1, -1, -10],
<add> [9, 10, -1, -1],
<add> [13, 14, 15, -1]]))
<ide> b[iu4] = -10
<del> yield (assert_array_equal, b,
<del> array([[-1, -1, -10, -10, -10],
<del> [6, -1, -1, -10, -10],
<del> [11, 12, -1, -1, -10],
<del> [16, 17, 18, -1, -1]]))
<add> assert_array_equal(b,
<add> array([[-1, -1, -10, -10, -10],
<add> [6, -1, -1, -10, -10],
<add> [11, 12, -1, -1, -10],
<add> [16, 17, 18, -1, -1]]))
<ide>
<ide>
<ide> class TestTrilIndicesFrom(object):
<ide><path>numpy/lib/tests/test_type_check.py
<ide> import numpy as np
<ide> from numpy.compat import long
<ide> from numpy.testing import (
<del> TestCase, assert_, assert_equal, assert_array_equal, run_module_suite
<add> assert_, assert_equal, assert_array_equal, run_module_suite
<ide> )
<ide> from numpy.lib.type_check import (
<ide> common_type, mintypecode, isreal, iscomplex, isposinf, isneginf,
<ide> def assert_all(x):
<ide> assert_(np.all(x), x)
<ide>
<ide>
<del>class TestCommonType(TestCase):
<add>class TestCommonType(object):
<ide> def test_basic(self):
<ide> ai32 = np.array([[1, 2], [3, 4]], dtype=np.int32)
<ide> af16 = np.array([[1, 2], [3, 4]], dtype=np.float16)
<ide> def test_basic(self):
<ide> assert_(common_type(acd) == np.cdouble)
<ide>
<ide>
<del>class TestMintypecode(TestCase):
<add>class TestMintypecode(object):
<ide>
<ide> def test_default_1(self):
<ide> for itype in '1bcsuwil':
<ide> def test_default_3(self):
<ide> assert_equal(mintypecode('idD'), 'D')
<ide>
<ide>
<del>class TestIsscalar(TestCase):
<add>class TestIsscalar(object):
<ide>
<ide> def test_basic(self):
<ide> assert_(np.isscalar(3))
<ide> def test_basic(self):
<ide> assert_(np.isscalar(4.0))
<ide>
<ide>
<del>class TestReal(TestCase):
<add>class TestReal(object):
<ide>
<ide> def test_real(self):
<ide> y = np.random.rand(10,)
<ide> def test_cmplx(self):
<ide> assert_(not isinstance(out, np.ndarray))
<ide>
<ide>
<del>class TestImag(TestCase):
<add>class TestImag(object):
<ide>
<ide> def test_real(self):
<ide> y = np.random.rand(10,)
<ide> def test_cmplx(self):
<ide> assert_(not isinstance(out, np.ndarray))
<ide>
<ide>
<del>class TestIscomplex(TestCase):
<add>class TestIscomplex(object):
<ide>
<ide> def test_fail(self):
<ide> z = np.array([-1, 0, 1])
<ide> def test_pass(self):
<ide> assert_array_equal(res, [1, 0, 0])
<ide>
<ide>
<del>class TestIsreal(TestCase):
<add>class TestIsreal(object):
<ide>
<ide> def test_pass(self):
<ide> z = np.array([-1, 0, 1j])
<ide> def test_fail(self):
<ide> assert_array_equal(res, [0, 1, 1])
<ide>
<ide>
<del>class TestIscomplexobj(TestCase):
<add>class TestIscomplexobj(object):
<ide>
<ide> def test_basic(self):
<ide> z = np.array([-1, 0, 1])
<ide> def dtype(self):
<ide> assert_(iscomplexobj(a))
<ide>
<ide>
<del>class TestIsrealobj(TestCase):
<add>class TestIsrealobj(object):
<ide> def test_basic(self):
<ide> z = np.array([-1, 0, 1])
<ide> assert_(isrealobj(z))
<ide> z = np.array([-1j, 0, -1])
<ide> assert_(not isrealobj(z))
<ide>
<ide>
<del>class TestIsnan(TestCase):
<add>class TestIsnan(object):
<ide>
<ide> def test_goodvalues(self):
<ide> z = np.array((-1., 0., 1.))
<ide> def test_complex1(self):
<ide> assert_all(np.isnan(np.array(0+0j)/0.) == 1)
<ide>
<ide>
<del>class TestIsfinite(TestCase):
<add>class TestIsfinite(object):
<ide> # Fixme, wrong place, isfinite now ufunc
<ide>
<ide> def test_goodvalues(self):
<ide> def test_complex1(self):
<ide> assert_all(np.isfinite(np.array(1+1j)/0.) == 0)
<ide>
<ide>
<del>class TestIsinf(TestCase):
<add>class TestIsinf(object):
<ide> # Fixme, wrong place, isinf now ufunc
<ide>
<ide> def test_goodvalues(self):
<ide> def test_ind(self):
<ide> assert_all(np.isinf(np.array((0.,))/0.) == 0)
<ide>
<ide>
<del>class TestIsposinf(TestCase):
<add>class TestIsposinf(object):
<ide>
<ide> def test_generic(self):
<ide> with np.errstate(divide='ignore', invalid='ignore'):
<ide> def test_generic(self):
<ide> assert_(vals[2] == 1)
<ide>
<ide>
<del>class TestIsneginf(TestCase):
<add>class TestIsneginf(object):
<ide>
<ide> def test_generic(self):
<ide> with np.errstate(divide='ignore', invalid='ignore'):
<ide> def test_generic(self):
<ide> assert_(vals[2] == 0)
<ide>
<ide>
<del>class TestNanToNum(TestCase):
<add>class TestNanToNum(object):
<ide>
<ide> def test_generic(self):
<ide> with np.errstate(divide='ignore', invalid='ignore'):
<ide> def test_complex_bad2(self):
<ide> #assert_all(vals.real < -1e10) and assert_all(np.isfinite(vals))
<ide>
<ide>
<del>class TestRealIfClose(TestCase):
<add>class TestRealIfClose(object):
<ide>
<ide> def test_basic(self):
<ide> a = np.random.rand(10)
<ide> def test_basic(self):
<ide> assert_all(isrealobj(b))
<ide>
<ide>
<del>class TestArrayConversion(TestCase):
<add>class TestArrayConversion(object):
<ide>
<ide> def test_asfarray(self):
<ide> a = asfarray(np.array([1, 2, 3]))
<ide><path>numpy/lib/tests/test_ufunclike.py
<ide> import numpy.core as nx
<ide> import numpy.lib.ufunclike as ufl
<ide> from numpy.testing import (
<del> run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
<del> assert_warns
<add> run_module_suite, assert_, assert_equal, assert_array_equal, assert_warns
<ide> )
<ide>
<ide>
<del>class TestUfunclike(TestCase):
<add>class TestUfunclike(object):
<ide>
<ide> def test_isposinf(self):
<ide> a = nx.array([nx.inf, -nx.inf, nx.nan, 0.0, 3.0, -3.0]) | 17 |
Text | Text | propose the usual way of styling buttons | 6671151e03d3230a2bd40ad6467be03d5f1a4525 | <ide><path>guide/english/css/css-buttons/index.md
<ide> To animate a button on click use 'button:active':
<ide> }
<ide> ```
<ide>
<add>### Button Links
<add>
<add>In many cases buttons will have to link to an url. As we can't add an href attribute to the HTML button element without some JavaScript workarounds we need to style our link like a button. This way we have a working <a> element disguised as a button, yet we can still use this class to style buttons. For example:
<add>
<add>```
<add>.btn {
<add> display: inline-block;
<add> text-decoration: none;
<add> min-height: 40px;
<add> padding: 20px 10px;
<add> background-color: lightblue;
<add>}
<add>
<add>// Prevent visited links from turning purple
<add>.btn:visited {
<add> color: black;
<add>}
<add>```
<add>
<ide> #### More Information:
<ide> * https://www.w3schools.com/css/css3_buttons.asp
<ide> * https://www.w3schools.com/howto/howto_css_animate_buttons.asp | 1 |
Python | Python | support empty arrays in setxor1d and setdiff1d | bdadc1ba29ea2852a31233aa521b59c7e223f31f | <ide><path>numpy/lib/arraysetops.py
<ide> def intersect1d_nu( ar1, ar2 ):
<ide> def setxor1d( ar1, ar2 ):
<ide> """Set exclusive-or of 1D arrays with unique elements."""
<ide> aux = numpy.concatenate((ar1, ar2))
<add> if aux.size == 0:
<add> return aux
<add>
<ide> aux.sort()
<ide> flag = ediff1d(aux, to_end = 1, to_begin = 1) == 0
<ide> flag2 = ediff1d(flag) == 0
<ide> def union1d( ar1, ar2 ):
<ide> # 03.11.2005, c
<ide> def setdiff1d( ar1, ar2 ):
<ide> """Set difference of 1D arrays with unique elements."""
<del> aux = setmember1d( ar1, ar2 )
<del> return ar1.compress(aux == 0)
<add> aux = setmember1d(ar1,ar2)
<add> if aux.size == 0:
<add> return aux
<add> else:
<add> return numpy.asarray(ar1).compress(aux == 0)
<ide>
<ide> ##
<ide> # 02.11.2005, c | 1 |
Javascript | Javascript | add unit test for possible state | e29fbe0e225ed9a76a1f128ede3542f4f3b10bf5 | <ide><path>packages/sproutcore-touch/tests/system/gesture_support.js
<ide> var set = SC.set;
<ide> var get = SC.get;
<ide> var application = null;
<add>var view;
<ide>
<ide> function generateTouchEvent(touches) {
<ide>
<ide> module("Test Gesture Recognizer",{
<ide>
<ide> teardown: function() {
<ide> application.destroy();
<add> if(view) view.destroy();
<ide> }
<ide> });
<ide>
<ide> test("gesturable views that implement pinch methods get a pinch recognizer", fun
<ide>
<ide> test("when finger touches inside, gesture should be in waiting state", function() {
<ide> var numStart = 0;
<del> var view = SC.View.create({
<add> view = SC.View.create({
<ide> elementId: 'gestureTest',
<ide>
<ide> pinchStart: function(evt) {
<ide> test("when finger touches inside, gesture should be in waiting state", function(
<ide> equals(gestures.length,1);
<ide> equals(get(gestures[0], 'state'),SC.Gesture.WAITING_FOR_TOUCHES, "gesture should be waiting");
<ide> });
<add>
<add>test("when 2 fingers touch inside, gesture should be in possible state", function() {
<add> var numStart = 0;
<add> view = SC.View.create({
<add> elementId: 'gestureTest',
<add>
<add> pinchStart: function(evt) {
<add> console.log('pinchstart in view');
<add> numStart++;
<add> },
<add>
<add> touchStart: function(evt) {
<add> console.log('2: touchStart in view');
<add> numStart++;
<add> }
<add> });
<add>
<add> SC.run(function(){
<add> view.append();
<add> });
<add>
<add> var touchEvent = new jQuery.Event();
<add>
<add> touchEvent.type='touchstart';
<add> touchEvent['originalEvent'] = {
<add> targetTouches: [{
<add> pageX: 0,
<add> pageY: 0
<add> },
<add> {
<add> pageX: 10,
<add> pageY: 10
<add> }]
<add> };
<add> view.$().trigger(touchEvent);
<add>
<add> var gestures = get(get(view, 'eventManager'), 'gestures');
<add>
<add> ok(gestures);
<add> equals(gestures.length,1);
<add> equals(get(gestures[0], 'state'),SC.Gesture.POSSIBLE, "gesture should be possible");
<add>}); | 1 |
Javascript | Javascript | fix typo in _resetsubcontrollers | 5f43e5bee2ac922240ad4d2c4bd0e218777e784e | <ide><path>packages/ember-runtime/lib/controllers/array_controller.js
<ide> Ember.ArrayController = Ember.ArrayProxy.extend(Ember.ControllerMixin,
<ide> var subControllers = get(this, '_subControllers');
<ide>
<ide> forEach(subControllers, function(subController) {
<del> if (subControllers) { subController.destroy(); }
<add> if (subController) { subController.destroy(); }
<ide> });
<ide>
<ide> this.set('_subControllers', Ember.A()); | 1 |
Python | Python | fix mypy issues in airflow/jobs | 9844b910470feac9e524832a3affdc657c340bcd | <ide><path>airflow/jobs/backfill_job.py
<ide> from airflow.utils import helpers, timezone
<ide> from airflow.utils.configuration import conf as airflow_conf, tmp_configuration_copy
<ide> from airflow.utils.session import provide_session
<del>from airflow.utils.state import State
<add>from airflow.utils.state import DagRunState, State, TaskInstanceState
<ide> from airflow.utils.types import DagRunType
<ide>
<ide>
<ide> def _update_counters(self, ti_status, session=None):
<ide> for ti in refreshed_tis:
<ide> # Here we remake the key by subtracting 1 to match in memory information
<ide> reduced_key = ti.key.reduced
<del> if ti.state == State.SUCCESS:
<add> if ti.state == TaskInstanceState.SUCCESS:
<ide> ti_status.succeeded.add(reduced_key)
<ide> self.log.debug("Task instance %s succeeded. Don't rerun.", ti)
<ide> ti_status.running.pop(reduced_key)
<ide> continue
<del> if ti.state == State.SKIPPED:
<add> if ti.state == TaskInstanceState.SKIPPED:
<ide> ti_status.skipped.add(reduced_key)
<ide> self.log.debug("Task instance %s skipped. Don't rerun.", ti)
<ide> ti_status.running.pop(reduced_key)
<ide> continue
<del> if ti.state == State.FAILED:
<add> if ti.state == TaskInstanceState.FAILED:
<ide> self.log.error("Task instance %s failed", ti)
<ide> ti_status.failed.add(reduced_key)
<ide> ti_status.running.pop(reduced_key)
<ide> continue
<ide> # special case: if the task needs to run again put it back
<del> if ti.state == State.UP_FOR_RETRY:
<add> if ti.state == TaskInstanceState.UP_FOR_RETRY:
<ide> self.log.warning("Task instance %s is up for retry", ti)
<ide> ti_status.running.pop(reduced_key)
<ide> ti_status.to_run[ti.key] = ti
<ide> # special case: if the task needs to be rescheduled put it back
<del> elif ti.state == State.UP_FOR_RESCHEDULE:
<add> elif ti.state == TaskInstanceState.UP_FOR_RESCHEDULE:
<ide> self.log.warning("Task instance %s is up for reschedule", ti)
<ide> # During handling of reschedule state in ti._handle_reschedule, try number is reduced
<ide> # by one, so we should not use reduced_key to avoid key error
<ide> def _update_counters(self, ti_status, session=None):
<ide> if tis_to_be_scheduled:
<ide> filter_for_tis = TI.filter_for_tis(tis_to_be_scheduled)
<ide> session.query(TI).filter(filter_for_tis).update(
<del> values={TI.state: State.SCHEDULED}, synchronize_session=False
<add> values={TI.state: TaskInstanceState.SCHEDULED}, synchronize_session=False
<ide> )
<ide>
<ide> def _manage_executor_state(self, running):
<ide> def _manage_executor_state(self, running):
<ide>
<ide> self.log.debug("Executor state: %s task %s", state, ti)
<ide>
<del> if state in (State.FAILED, State.SUCCESS) and ti.state in self.STATES_COUNT_AS_RUNNING:
<add> if (
<add> state in (TaskInstanceState.FAILED, TaskInstanceState.SUCCESS)
<add> and ti.state in self.STATES_COUNT_AS_RUNNING
<add> ):
<ide> msg = (
<ide> f"Executor reports task instance {ti} finished ({state}) although the task says its "
<ide> f"{ti.state}. Was the task killed externally? Info: {info}"
<ide> def _get_dag_run(self, dagrun_info: DagRunInfo, dag: DAG, session: Session = Non
<ide> run: Optional[DagRun]
<ide> if runs:
<ide> run = runs[0]
<del> if run.state == State.RUNNING:
<add> if run.state == DagRunState.RUNNING:
<ide> respect_dag_max_active_limit = False
<ide> else:
<ide> run = None
<ide> def _get_dag_run(self, dagrun_info: DagRunInfo, dag: DAG, session: Session = Non
<ide> execution_date=run_date,
<ide> data_interval=dagrun_info.data_interval,
<ide> start_date=timezone.utcnow(),
<del> state=State.RUNNING,
<add> state=DagRunState.RUNNING,
<ide> external_trigger=False,
<ide> session=session,
<ide> conf=self.conf,
<ide> def _get_dag_run(self, dagrun_info: DagRunInfo, dag: DAG, session: Session = Non
<ide> run.dag = dag
<ide>
<ide> # explicitly mark as backfill and running
<del> run.state = State.RUNNING
<add> run.state = DagRunState.RUNNING
<ide> run.run_type = DagRunType.BACKFILL_JOB
<ide> run.verify_integrity(session=session)
<ide> return run
<ide> def _task_instances_for_dag_run(self, dag_run, session=None):
<ide> for ti in dag_run.get_task_instances():
<ide> # all tasks part of the backfill are scheduled to run
<ide> if ti.state == State.NONE:
<del> ti.set_state(State.SCHEDULED, session=session)
<del> if ti.state != State.REMOVED:
<add> ti.set_state(TaskInstanceState.SCHEDULED, session=session)
<add> if ti.state != TaskInstanceState.REMOVED:
<ide> tasks_to_run[ti.key] = ti
<ide> session.commit()
<ide> except Exception:
<ide> def _per_task_process(key, ti: TaskInstance, session=None):
<ide>
<ide> # The task was already marked successful or skipped by a
<ide> # different Job. Don't rerun it.
<del> if ti.state == State.SUCCESS:
<add> if ti.state == TaskInstanceState.SUCCESS:
<ide> ti_status.succeeded.add(key)
<ide> self.log.debug("Task instance %s succeeded. Don't rerun.", ti)
<ide> ti_status.to_run.pop(key)
<ide> if key in ti_status.running:
<ide> ti_status.running.pop(key)
<ide> return
<del> elif ti.state == State.SKIPPED:
<add> elif ti.state == TaskInstanceState.SKIPPED:
<ide> ti_status.skipped.add(key)
<ide> self.log.debug("Task instance %s skipped. Don't rerun.", ti)
<ide> ti_status.to_run.pop(key)
<ide> def _per_task_process(key, ti: TaskInstance, session=None):
<ide> self.log.warning(
<ide> "FIXME: Task instance %s state was set to None externally. This should not happen", ti
<ide> )
<del> ti.set_state(State.SCHEDULED, session=session)
<add> ti.set_state(TaskInstanceState.SCHEDULED, session=session)
<ide> if self.rerun_failed_tasks:
<ide> # Rerun failed tasks or upstreamed failed tasks
<del> if ti.state in (State.FAILED, State.UPSTREAM_FAILED):
<add> if ti.state in (TaskInstanceState.FAILED, TaskInstanceState.UPSTREAM_FAILED):
<ide> self.log.error("Task instance %s with state %s", ti, ti.state)
<ide> if key in ti_status.running:
<ide> ti_status.running.pop(key)
<ide> # Reset the failed task in backfill to scheduled state
<del> ti.set_state(State.SCHEDULED, session=session)
<add> ti.set_state(TaskInstanceState.SCHEDULED, session=session)
<ide> else:
<ide> # Default behaviour which works for subdag.
<del> if ti.state in (State.FAILED, State.UPSTREAM_FAILED):
<add> if ti.state in (TaskInstanceState.FAILED, TaskInstanceState.UPSTREAM_FAILED):
<ide> self.log.error("Task instance %s with state %s", ti, ti.state)
<ide> ti_status.failed.add(key)
<ide> ti_status.to_run.pop(key)
<ide> def _per_task_process(key, ti: TaskInstance, session=None):
<ide> else:
<ide> self.log.debug('Sending %s to executor', ti)
<ide> # Skip scheduled state, we are executing immediately
<del> ti.state = State.QUEUED
<add> ti.state = TaskInstanceState.QUEUED
<ide> ti.queued_by_job_id = self.id
<ide> ti.queued_dttm = timezone.utcnow()
<ide> session.merge(ti)
<ide> def _per_task_process(key, ti: TaskInstance, session=None):
<ide> session.commit()
<ide> return
<ide>
<del> if ti.state == State.UPSTREAM_FAILED:
<add> if ti.state == TaskInstanceState.UPSTREAM_FAILED:
<ide> self.log.error("Task instance %s upstream failed", ti)
<ide> ti_status.failed.add(key)
<ide> ti_status.to_run.pop(key)
<ide> def _per_task_process(key, ti: TaskInstance, session=None):
<ide> return
<ide>
<ide> # special case
<del> if ti.state == State.UP_FOR_RETRY:
<add> if ti.state == TaskInstanceState.UP_FOR_RETRY:
<ide> self.log.debug("Task instance %s retry period not expired yet", ti)
<ide> if key in ti_status.running:
<ide> ti_status.running.pop(key)
<ide> ti_status.to_run[key] = ti
<ide> return
<ide>
<ide> # special case
<del> if ti.state == State.UP_FOR_RESCHEDULE:
<add> if ti.state == TaskInstanceState.UP_FOR_RESCHEDULE:
<ide> self.log.debug("Task instance %s reschedule period not expired yet", ti)
<ide> if key in ti_status.running:
<ide> ti_status.running.pop(key)
<ide> def _set_unfinished_dag_runs_to_failed(self, dag_runs, session=None):
<ide> for dag_run in dag_runs:
<ide> dag_run.update_state()
<ide> if dag_run.state not in State.finished:
<del> dag_run.set_state(State.FAILED)
<add> dag_run.set_state(DagRunState.FAILED)
<ide> session.merge(dag_run)
<ide>
<ide> @provide_session
<ide> def reset_state_for_orphaned_tasks(self, filter_by_dag_run=None, session=None):
<ide> running_tis = self.executor.running
<ide>
<ide> # Can't use an update here since it doesn't support joins.
<del> resettable_states = [State.SCHEDULED, State.QUEUED]
<add> resettable_states = [TaskInstanceState.SCHEDULED, TaskInstanceState.QUEUED]
<ide> if filter_by_dag_run is None:
<ide> resettable_tis = (
<ide> session.query(TaskInstance)
<ide> .join(TaskInstance.dag_run)
<ide> .filter(
<del> DagRun.state == State.RUNNING,
<add> DagRun.state == DagRunState.RUNNING,
<ide> DagRun.run_type != DagRunType.BACKFILL_JOB,
<ide> TaskInstance.state.in_(resettable_states),
<ide> )
<ide><path>airflow/jobs/local_task_job.py
<ide> def __init__(
<ide> self.pickle_id = pickle_id
<ide> self.mark_success = mark_success
<ide> self.external_executor_id = external_executor_id
<del> self.task_runner = None
<ide>
<ide> # terminating state is used so that a job don't try to
<ide> # terminate multiple times
<ide><path>airflow/jobs/scheduler_job.py
<ide> # specific language governing permissions and limitations
<ide> # under the License.
<ide> #
<del>import datetime
<ide> import itertools
<ide> import logging
<ide> import multiprocessing
<ide> def __init__(
<ide> num_times_parse_dags: int = -1,
<ide> scheduler_idle_sleep_time: float = conf.getfloat('scheduler', 'scheduler_idle_sleep_time'),
<ide> do_pickle: bool = False,
<del> log: logging.Logger = None,
<add> log: Optional[logging.Logger] = None,
<ide> processor_poll_interval: Optional[float] = None,
<ide> *args,
<ide> **kwargs,
<ide> def _executable_task_instances_to_queued(self, max_tis: int, session: Session =
<ide> .filter(DR.run_type != DagRunType.BACKFILL_JOB, DR.state == DagRunState.RUNNING)
<ide> .join(TI.dag_model)
<ide> .filter(not_(DM.is_paused))
<del> .filter(TI.state == State.SCHEDULED)
<add> .filter(TI.state == TaskInstanceState.SCHEDULED)
<ide> .options(selectinload('dag_model'))
<ide> .order_by(-TI.priority_weight, DR.execution_date)
<ide> )
<ide> def _executable_task_instances_to_queued(self, max_tis: int, session: Session =
<ide> session.query(TI).filter(filter_for_tis).update(
<ide> # TODO[ha]: should we use func.now()? How does that work with DB timezone
<ide> # on mysql when it's not UTC?
<del> {TI.state: State.QUEUED, TI.queued_dttm: timezone.utcnow(), TI.queued_by_job_id: self.id},
<add> {
<add> TI.state: TaskInstanceState.QUEUED,
<add> TI.queued_dttm: timezone.utcnow(),
<add> TI.queued_by_job_id: self.id,
<add> },
<ide> synchronize_session=False,
<ide> )
<ide>
<ide> def _process_executor_events(self, session: Session = None) -> int:
<ide> """Respond to executor events."""
<ide> if not self.processor_agent:
<ide> raise ValueError("Processor agent is not started.")
<del> ti_primary_key_to_try_number_map: Dict[Tuple[str, str, datetime.datetime], int] = {}
<add> ti_primary_key_to_try_number_map: Dict[Tuple[str, str, str], int] = {}
<ide> event_buffer = self.executor.get_event_buffer()
<ide> tis_with_right_state: List[TaskInstanceKey] = []
<ide>
<ide> def _process_executor_events(self, session: Session = None) -> int:
<ide> state,
<ide> ti_key.try_number,
<ide> )
<del> if state in (State.FAILED, State.SUCCESS, State.QUEUED):
<add> if state in (TaskInstanceState.FAILED, TaskInstanceState.SUCCESS, TaskInstanceState.QUEUED):
<ide> tis_with_right_state.append(ti_key)
<ide>
<ide> # Return if no finished tasks
<ide> def _process_executor_events(self, session: Session = None) -> int:
<ide> state, info = event_buffer.pop(buffer_key)
<ide>
<ide> # TODO: should we fail RUNNING as well, as we do in Backfills?
<del> if state == State.QUEUED:
<add> if state == TaskInstanceState.QUEUED:
<ide> ti.external_executor_id = info
<ide> self.log.info("Setting external_id for %s to %s", ti, info)
<ide> continue
<ide> def _run_scheduler_loop(self) -> None:
<ide> # If the scheduler is doing things, don't sleep. This means when there is work to do, the
<ide> # scheduler will run "as quick as possible", but when it's stopped, it can sleep, dropping CPU
<ide> # usage when "idle"
<del> time.sleep(min(self._scheduler_idle_sleep_time, next_event))
<add> time.sleep(min(self._scheduler_idle_sleep_time, next_event if next_event else 0))
<ide>
<ide> if loop_count >= self.num_runs > 0:
<ide> self.log.info(
<ide> def _do_scheduling(self, session) -> int:
<ide>
<ide> self._start_queued_dagruns(session)
<ide> guard.commit()
<del> dag_runs = self._get_next_dagruns_to_examine(State.RUNNING, session)
<add> dag_runs = self._get_next_dagruns_to_examine(DagRunState.RUNNING, session)
<ide> # Bulk fetch the currently active dag runs for the dags we are
<ide> # examining, rather than making one query per DagRun
<ide>
<ide> def _create_dag_runs(self, dag_models: Collection[DagModel], session: Session) -
<ide> dag.create_dagrun(
<ide> run_type=DagRunType.SCHEDULED,
<ide> execution_date=dag_model.next_dagrun,
<del> state=State.QUEUED,
<add> state=DagRunState.QUEUED,
<ide> data_interval=data_interval,
<ide> external_trigger=False,
<ide> session=session,
<ide> def _should_update_dag_next_dagruns(self, dag, dag_model: DagModel, total_active
<ide> def _start_queued_dagruns(
<ide> self,
<ide> session: Session,
<del> ) -> int:
<add> ) -> None:
<ide> """Find DagRuns in queued state and decide moving them to running state"""
<del> dag_runs = self._get_next_dagruns_to_examine(State.QUEUED, session)
<add> dag_runs = self._get_next_dagruns_to_examine(DagRunState.QUEUED, session)
<ide>
<ide> active_runs_of_dags = defaultdict(
<ide> int,
<ide> DagRun.active_runs_of_dags((dr.dag_id for dr in dag_runs), only_running=True, session=session),
<ide> )
<ide>
<ide> def _update_state(dag: DAG, dag_run: DagRun):
<del> dag_run.state = State.RUNNING
<add> dag_run.state = DagRunState.RUNNING
<ide> dag_run.start_date = timezone.utcnow()
<ide> if dag.timetable.periodic:
<ide> # TODO: Logically, this should be DagRunInfo.run_after, but the
<ide> def _schedule_dag_run(
<ide> :param dag_run: The DagRun to schedule
<ide> :return: Callback that needs to be executed
<ide> """
<add> callback: Optional[DagCallbackRequest] = None
<add>
<ide> dag = dag_run.dag = self.dagbag.get_dag(dag_run.dag_id, session=session)
<ide>
<ide> if not dag:
<ide> self.log.error("Couldn't find dag %s in DagBag/DB!", dag_run.dag_id)
<del> return 0
<add> return callback
<ide> dag_model = DM.get_dagmodel(dag.dag_id, session)
<ide>
<ide> if (
<ide> dag_run.start_date
<ide> and dag.dagrun_timeout
<ide> and dag_run.start_date < timezone.utcnow() - dag.dagrun_timeout
<ide> ):
<del> dag_run.set_state(State.FAILED)
<add> dag_run.set_state(DagRunState.FAILED)
<ide> unfinished_task_instances = (
<ide> session.query(TI)
<ide> .filter(TI.dag_id == dag_run.dag_id)
<ide> .filter(TI.run_id == dag_run.run_id)
<ide> .filter(TI.state.in_(State.unfinished))
<ide> )
<ide> for task_instance in unfinished_task_instances:
<del> task_instance.state = State.SKIPPED
<add> task_instance.state = TaskInstanceState.SKIPPED
<ide> session.merge(task_instance)
<ide> session.flush()
<ide> self.log.info("Run %s of %s has timed-out", dag_run.run_id, dag_run.dag_id)
<ide> def _schedule_dag_run(
<ide>
<ide> # Send SLA & DAG Success/Failure Callbacks to be executed
<ide> self._send_dag_callbacks_to_processor(dag, callback_to_execute)
<del>
<del> return 0
<add> # Because we send the callback here, we need to return None
<add> return callback
<ide>
<ide> if dag_run.execution_date > timezone.utcnow() and not dag.allow_future_exec_dates:
<ide> self.log.error("Execution date is in future: %s", dag_run.execution_date)
<del> return 0
<add> return callback
<ide>
<ide> self._verify_integrity_if_dag_changed(dag_run=dag_run, session=session)
<ide> # TODO[HA]: Rename update_state -> schedule_dag_run, ?? something else?
<ide> def _emit_pool_metrics(self, session: Session = None) -> None:
<ide> pools = models.Pool.slots_stats(session=session)
<ide> for pool_name, slot_stats in pools.items():
<ide> Stats.gauge(f'pool.open_slots.{pool_name}', slot_stats["open"])
<del> Stats.gauge(f'pool.queued_slots.{pool_name}', slot_stats[State.QUEUED]) # type: ignore
<del> Stats.gauge(f'pool.running_slots.{pool_name}', slot_stats[State.RUNNING]) # type: ignore
<add> Stats.gauge(f'pool.queued_slots.{pool_name}', slot_stats[TaskInstanceState.QUEUED])
<add> Stats.gauge(f'pool.running_slots.{pool_name}', slot_stats[TaskInstanceState.RUNNING])
<ide>
<ide> @provide_session
<ide> def heartbeat_callback(self, session: Session = None) -> None:
<ide> def adopt_or_reset_orphaned_tasks(self, session: Session = None):
<ide> self.log.info("Marked %d SchedulerJob instances as failed", num_failed)
<ide> Stats.incr(self.__class__.__name__.lower() + '_end', num_failed)
<ide>
<del> resettable_states = [State.QUEUED, State.RUNNING]
<add> resettable_states = [TaskInstanceState.QUEUED, TaskInstanceState.RUNNING]
<ide> query = (
<ide> session.query(TI)
<ide> .filter(TI.state.in_(resettable_states))
<ide> def check_trigger_timeouts(self, session: Session = None):
<ide> """
<ide> num_timed_out_tasks = (
<ide> session.query(TaskInstance)
<del> .filter(TaskInstance.state == State.DEFERRED, TaskInstance.trigger_timeout < timezone.utcnow())
<add> .filter(
<add> TaskInstance.state == TaskInstanceState.DEFERRED,
<add> TaskInstance.trigger_timeout < timezone.utcnow(),
<add> )
<ide> .update(
<ide> # We have to schedule these to fail themselves so it doesn't
<ide> # happen inside the scheduler.
<ide> {
<del> "state": State.SCHEDULED,
<add> "state": TaskInstanceState.SCHEDULED,
<ide> "next_method": "__fail__",
<ide> "next_kwargs": {"error": "Trigger/execution timeout"},
<ide> "trigger_id": None,
<ide><path>airflow/models/dagrun.py
<ide> def get_task_instances(
<ide> tis = tis.filter(TI.state == state)
<ide> else:
<ide> # this is required to deal with NULL values
<del> if TaskInstanceState.NONE in state:
<add> if State.NONE in state:
<ide> if all(x is None for x in state):
<ide> tis = tis.filter(TI.state.is_(None))
<ide> else:
<ide><path>airflow/models/taskinstance.py
<ide> def key(self) -> TaskInstanceKey:
<ide> return TaskInstanceKey(self.dag_id, self.task_id, self.run_id, self.try_number)
<ide>
<ide> @provide_session
<del> def set_state(self, state: str, session=NEW_SESSION):
<add> def set_state(self, state: Optional[str], session=NEW_SESSION):
<ide> """
<ide> Set TaskInstance state.
<ide>
<ide> def _handle_reschedule(
<ide> @provide_session
<ide> def handle_failure(
<ide> self,
<del> error: Union[str, BaseException],
<add> error: Optional[Union[str, BaseException]] = None,
<ide> test_mode: Optional[bool] = None,
<ide> force_fail: bool = False,
<ide> error_file: Optional[str] = None, | 5 |
Go | Go | defer creation of trust key file until needed | 0eed1f4d8da893d7c91be811197032a064053155 | <ide><path>api/client/cli.go
<ide> import (
<ide> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/pkg/term"
<ide> "github.com/docker/docker/registry"
<del> "github.com/docker/libtrust"
<ide> )
<ide>
<ide> type DockerCli struct {
<ide> type DockerCli struct {
<ide> in io.ReadCloser
<ide> out io.Writer
<ide> err io.Writer
<del> key libtrust.PrivateKey
<add> keyFile string
<ide> tlsConfig *tls.Config
<ide> scheme string
<ide> // inFd holds file descriptor of the client's STDIN, if it's a valid file
<ide> func (cli *DockerCli) CheckTtyInput(attachStdin, ttyMode bool) error {
<ide> return nil
<ide> }
<ide>
<del>func NewDockerCli(in io.ReadCloser, out, err io.Writer, key libtrust.PrivateKey, proto, addr string, tlsConfig *tls.Config) *DockerCli {
<add>func NewDockerCli(in io.ReadCloser, out, err io.Writer, keyFile string, proto, addr string, tlsConfig *tls.Config) *DockerCli {
<ide> var (
<ide> inFd uintptr
<ide> outFd uintptr
<ide> func NewDockerCli(in io.ReadCloser, out, err io.Writer, key libtrust.PrivateKey,
<ide> in: in,
<ide> out: out,
<ide> err: err,
<del> key: key,
<add> keyFile: keyFile,
<ide> inFd: inFd,
<ide> outFd: outFd,
<ide> isTerminalIn: isTerminalIn,
<ide><path>api/client/commands.go
<ide> func (cli *DockerCli) CmdPush(args ...string) error {
<ide> name := cmd.Arg(0)
<ide>
<ide> cli.LoadConfigFile()
<add> trustKey, err := api.LoadOrCreateTrustKey(cli.keyFile)
<add> if err != nil {
<add> log.Fatal(err)
<add> }
<ide>
<ide> remote, tag := parsers.ParseRepositoryTag(name)
<ide>
<ide> func (cli *DockerCli) CmdPush(args ...string) error {
<ide> if err != nil {
<ide> return err
<ide> }
<del> err = js.Sign(cli.key)
<add> err = js.Sign(trustKey)
<ide> if err != nil {
<ide> return err
<ide> }
<ide><path>docker/docker.go
<ide> func main() {
<ide> }
<ide> protoAddrParts := strings.SplitN(flHosts[0], "://", 2)
<ide>
<del> trustKey, err := api.LoadOrCreateTrustKey(*flTrustKey)
<del> if err != nil {
<del> log.Fatal(err)
<del> }
<del>
<ide> var (
<ide> cli *client.DockerCli
<ide> tlsConfig tls.Config
<ide> func main() {
<ide> }
<ide>
<ide> if *flTls || *flTlsVerify {
<del> cli = client.NewDockerCli(os.Stdin, os.Stdout, os.Stderr, trustKey, protoAddrParts[0], protoAddrParts[1], &tlsConfig)
<add> cli = client.NewDockerCli(os.Stdin, os.Stdout, os.Stderr, *flTrustKey, protoAddrParts[0], protoAddrParts[1], &tlsConfig)
<ide> } else {
<del> cli = client.NewDockerCli(os.Stdin, os.Stdout, os.Stderr, trustKey, protoAddrParts[0], protoAddrParts[1], nil)
<add> cli = client.NewDockerCli(os.Stdin, os.Stdout, os.Stderr, *flTrustKey, protoAddrParts[0], protoAddrParts[1], nil)
<ide> }
<ide>
<ide> if err := cli.Cmd(flag.Args()...); err != nil {
<ide><path>integration-cli/docker_cli_daemon_test.go
<ide> func TestDaemonKeyMigration(t *testing.T) {
<ide> if err != nil {
<ide> t.Fatalf("Error generating private key: %s", err)
<ide> }
<add> if err := os.MkdirAll(filepath.Join(os.Getenv("HOME"), ".docker"), 0755); err != nil {
<add> t.Fatalf("Error creating .docker directory: %s", err)
<add> }
<ide> if err := libtrust.SaveKey(filepath.Join(os.Getenv("HOME"), ".docker", "key.json"), k1); err != nil {
<ide> t.Fatalf("Error saving private key: %s", err)
<ide> }
<ide><path>integration/commands_test.go
<ide> import (
<ide> "github.com/docker/docker/daemon"
<ide> "github.com/docker/docker/pkg/term"
<ide> "github.com/docker/docker/utils"
<del> "github.com/docker/libtrust"
<ide> "github.com/kr/pty"
<ide> )
<ide>
<ide> func TestRunDetach(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del>
<del> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, key, testDaemonProto, testDaemonAddr, nil)
<add> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, "", testDaemonProto, testDaemonAddr, nil)
<ide> defer cleanup(globalEngine, t)
<ide>
<ide> ch := make(chan struct{})
<ide> func TestAttachDetach(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del>
<del> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, key, testDaemonProto, testDaemonAddr, nil)
<add> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, "", testDaemonProto, testDaemonAddr, nil)
<ide> defer cleanup(globalEngine, t)
<ide>
<ide> ch := make(chan struct{})
<ide> func TestAttachDetach(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> cli = client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, key, testDaemonProto, testDaemonAddr, nil)
<add> cli = client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, "", testDaemonProto, testDaemonAddr, nil)
<ide>
<ide> ch = make(chan struct{})
<ide> go func() {
<ide> func TestAttachDetachTruncatedID(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del>
<del> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, key, testDaemonProto, testDaemonAddr, nil)
<add> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, "", testDaemonProto, testDaemonAddr, nil)
<ide> defer cleanup(globalEngine, t)
<ide>
<ide> // Discard the CmdRun output
<ide> func TestAttachDetachTruncatedID(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> cli = client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, key, testDaemonProto, testDaemonAddr, nil)
<add> cli = client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, "", testDaemonProto, testDaemonAddr, nil)
<ide>
<ide> ch := make(chan struct{})
<ide> go func() {
<ide> func TestAttachDisconnect(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del>
<del> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, key, testDaemonProto, testDaemonAddr, nil)
<add> cli := client.NewDockerCli(tty, stdoutPipe, ioutil.Discard, "", testDaemonProto, testDaemonAddr, nil)
<ide> defer cleanup(globalEngine, t)
<ide>
<ide> go func() {
<ide> func TestAttachDisconnect(t *testing.T) {
<ide> func TestRunAutoRemove(t *testing.T) {
<ide> t.Skip("Fixme. Skipping test for now, race condition")
<ide> stdout, stdoutPipe := io.Pipe()
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> cli := client.NewDockerCli(nil, stdoutPipe, ioutil.Discard, key, testDaemonProto, testDaemonAddr, nil)
<add>
<add> cli := client.NewDockerCli(nil, stdoutPipe, ioutil.Discard, "", testDaemonProto, testDaemonAddr, nil)
<ide> defer cleanup(globalEngine, t)
<ide>
<ide> c := make(chan struct{})
<ide><path>integration/https_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/api/client"
<del> "github.com/docker/libtrust"
<ide> )
<ide>
<ide> const (
<ide> func getTlsConfig(certFile, keyFile string, t *testing.T) *tls.Config {
<ide>
<ide> // TestHttpsInfo connects via two-way authenticated HTTPS to the info endpoint
<ide> func TestHttpsInfo(t *testing.T) {
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> cli := client.NewDockerCli(nil, ioutil.Discard, ioutil.Discard, key, testDaemonProto,
<add> cli := client.NewDockerCli(nil, ioutil.Discard, ioutil.Discard, "", testDaemonProto,
<ide> testDaemonHttpsAddr, getTlsConfig("client-cert.pem", "client-key.pem", t))
<ide>
<ide> setTimeout(t, "Reading command output time out", 10*time.Second, func() {
<ide> func TestHttpsInfo(t *testing.T) {
<ide> // TestHttpsInfoRogueCert connects via two-way authenticated HTTPS to the info endpoint
<ide> // by using a rogue client certificate and checks that it fails with the expected error.
<ide> func TestHttpsInfoRogueCert(t *testing.T) {
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> cli := client.NewDockerCli(nil, ioutil.Discard, ioutil.Discard, key, testDaemonProto,
<add> cli := client.NewDockerCli(nil, ioutil.Discard, ioutil.Discard, "", testDaemonProto,
<ide> testDaemonHttpsAddr, getTlsConfig("client-rogue-cert.pem", "client-rogue-key.pem", t))
<ide>
<ide> setTimeout(t, "Reading command output time out", 10*time.Second, func() {
<ide> func TestHttpsInfoRogueCert(t *testing.T) {
<ide> // TestHttpsInfoRogueServerCert connects via two-way authenticated HTTPS to the info endpoint
<ide> // which provides a rogue server certificate and checks that it fails with the expected error
<ide> func TestHttpsInfoRogueServerCert(t *testing.T) {
<del> key, err := libtrust.GenerateECP256PrivateKey()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> cli := client.NewDockerCli(nil, ioutil.Discard, ioutil.Discard, key, testDaemonProto,
<add> cli := client.NewDockerCli(nil, ioutil.Discard, ioutil.Discard, "", testDaemonProto,
<ide> testDaemonRogueHttpsAddr, getTlsConfig("client-cert.pem", "client-key.pem", t))
<ide>
<ide> setTimeout(t, "Reading command output time out", 10*time.Second, func() { | 6 |
PHP | PHP | extract method for easier extension/overwriting | 0b9fcb35628cf36a7b6bcb4194b64019832702ad | <ide><path>src/Controller/Component/AuthComponent.php
<ide> protected function _unauthenticated(Controller $controller)
<ide> */
<ide> protected function _loginActionRedirectUrl()
<ide> {
<del> $urlToRedirectBackTo = $this->request->here(false);
<del> if (!$this->request->is('get')) {
<del> $urlToRedirectBackTo = $this->request->referer(true);
<del> }
<add> $urlToRedirectBackTo = $this->_getUrlToRedirectBackTo();
<ide>
<ide> $loginAction = $this->_config['loginAction'];
<ide> if ($urlToRedirectBackTo === '/') {
<ide> public function authorizationProvider()
<ide> {
<ide> return $this->_authorizationProvider;
<ide> }
<add>
<add> /**
<add> * Returns the URL to redirect back to or / if not possible.
<add> *
<add> * This method takes the referrer into account as long as the
<add> * request is of type GET.
<add> *
<add> * @return string
<add> */
<add> protected function _getUrlToRedirectBackTo()
<add> {
<add> $urlToRedirectBackTo = $this->request->here(false);
<add> if (!$this->request->is('get')) {
<add> $urlToRedirectBackTo = $this->request->referer(true);
<add> }
<add>
<add> return $urlToRedirectBackTo;
<add> }
<ide> } | 1 |
Ruby | Ruby | fix autocrlf warning (again) | 77665053488c37c3ca02a145e77a799891e57fac | <ide><path>Library/Homebrew/cmd/doctor.rb
<ide> def check_git_newline_settings
<ide> return unless which "git"
<ide>
<ide> autocrlf = `git config --get core.autocrlf`.chomp
<del> safecrlf = `git config --get core.safecrlf`.chomp
<ide>
<del> if !autocrlf.empty? && autocrlf != 'false' then <<-EOS.undent
<add> if autocrlf == 'true' then <<-EOS.undent
<ide> Suspicious Git newline settings found.
<ide>
<ide> The detected Git newline settings will cause checkout problems:
<ide> core.autocrlf = #{autocrlf}
<ide>
<ide> If you are not routinely dealing with Windows-based projects,
<ide> consider removing these by running:
<del> `git config --global --set core.autocrlf false`
<add> `git config --global --set core.autocrlf input`
<ide> EOS
<ide> end
<ide> end | 1 |
Java | Java | use xcompletable in completable and single | 7a4d6330da7db2b954062b5b5fc042433db9c06a | <ide><path>src/main/java/io/reactivex/Completable.java
<ide> public interface CompletableOperator extends Function<CompletableSubscriber, Com
<ide> * Convenience interface and callback used by the compose operator to turn a Completable into another
<ide> * Completable fluently.
<ide> */
<del> public interface CompletableTransformer extends Function<Completable, Completable> {
<add> public interface CompletableTransformer extends Function<Completable, CompletableConsumable> {
<ide>
<ide> }
<ide>
<ide> public void subscribe(CompletableSubscriber s) {
<ide> }
<ide> });
<ide>
<add> /**
<add> * Wraps the given CompletableConsumable into a Completable
<add> * if not already Completable.
<add> * @param source the source to wrap
<add> * @return the source or its wrapper Completable
<add> * @throws NullPointerException if source is null
<add> */
<add> static Completable wrap(CompletableConsumable source) {
<add> Objects.requireNonNull(source, "source is null");
<add> if (source instanceof Completable) {
<add> return (Completable)source;
<add> }
<add> return new CompletableWrapper(source);
<add> }
<add>
<ide> /**
<ide> * Returns a Completable which terminates as soon as one of the source Completables
<ide> * terminates (normally or with an error) and cancels all other Completables.
<ide> public void subscribe(CompletableSubscriber s) {
<ide> * @throws NullPointerException if sources is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable amb(final Completable... sources) {
<add> public static Completable amb(final CompletableConsumable... sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> if (sources.length == 0) {
<ide> return complete();
<ide> }
<ide> if (sources.length == 1) {
<del> return sources[0];
<add> return wrap(sources[0]);
<ide> }
<ide>
<ide> return create(new CompletableConsumable() {
<ide> public void onSubscribe(Disposable d) {
<ide>
<ide> };
<ide>
<del> for (Completable c : sources) {
<add> for (CompletableConsumable c : sources) {
<ide> if (set.isDisposed()) {
<ide> return;
<ide> }
<ide> public void onSubscribe(Disposable d) {
<ide> * @throws NullPointerException if sources is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable amb(final Iterable<? extends Completable> sources) {
<add> public static Completable amb(final Iterable<? extends CompletableConsumable> sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide>
<ide> return create(new CompletableConsumable() {
<ide> public void onSubscribe(Disposable d) {
<ide>
<ide> };
<ide>
<del> Iterator<? extends Completable> it;
<add> Iterator<? extends CompletableConsumable> it;
<ide>
<ide> try {
<ide> it = sources.iterator();
<ide> public void onSubscribe(Disposable d) {
<ide> return;
<ide> }
<ide>
<del> Completable c;
<add> CompletableConsumable c;
<ide>
<ide> try {
<ide> c = it.next();
<ide> public static Completable complete() {
<ide> * @throws NullPointerException if sources is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable concat(Completable... sources) {
<add> public static Completable concat(CompletableConsumable... sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> if (sources.length == 0) {
<ide> return complete();
<ide> } else
<ide> if (sources.length == 1) {
<del> return sources[0];
<add> return wrap(sources[0]);
<ide> }
<ide> return create(new CompletableOnSubscribeConcatArray(sources));
<ide> }
<ide> public static Completable concat(Completable... sources) {
<ide> * @throws NullPointerException if sources is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable concat(Iterable<? extends Completable> sources) {
<add> public static Completable concat(Iterable<? extends CompletableConsumable> sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide>
<ide> return create(new CompletableOnSubscribeConcatIterable(sources));
<ide> public static Completable concat(Iterable<? extends Completable> sources) {
<ide> * @throws NullPointerException if sources is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable concat(Flowable<? extends Completable> sources) {
<add> public static Completable concat(Publisher<? extends CompletableConsumable> sources) {
<ide> return concat(sources, 2);
<ide> }
<ide>
<ide> public static Completable concat(Flowable<? extends Completable> sources) {
<ide> * @throws NullPointerException if sources is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable concat(Flowable<? extends Completable> sources, int prefetch) {
<add> public static Completable concat(Publisher<? extends CompletableConsumable> sources, int prefetch) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> if (prefetch < 1) {
<ide> throw new IllegalArgumentException("prefetch > 0 required but it was " + prefetch);
<ide> public static Completable create(CompletableConsumable onSubscribe) {
<ide> * @return the Completable instance
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable defer(final Supplier<? extends Completable> completableSupplier) {
<add> public static Completable defer(final Supplier<? extends CompletableConsumable> completableSupplier) {
<ide> Objects.requireNonNull(completableSupplier, "completableSupplier");
<ide> return create(new CompletableConsumable() {
<ide> @Override
<ide> public void subscribe(CompletableSubscriber s) {
<del> Completable c;
<add> CompletableConsumable c;
<ide>
<ide> try {
<ide> c = completableSupplier.get();
<ide> public void subscribe(CompletableSubscriber s) {
<ide> * @throws NullPointerException if flowable is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static <T> Completable fromFlowable(final Flowable<T> flowable) {
<add> public static <T> Completable fromFlowable(final Publisher<T> flowable) {
<ide> Objects.requireNonNull(flowable, "flowable is null");
<ide> return create(new CompletableConsumable() {
<ide> @Override
<ide> public void onSubscribe(Subscription s) {
<ide> * @throws NullPointerException if flowable is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static <T> Completable fromNbpObservable(final Observable<T> observable) {
<add> public static <T> Completable fromNbpObservable(final ObservableConsumable<T> observable) {
<ide> Objects.requireNonNull(observable, "observable is null");
<ide> return create(new CompletableConsumable() {
<ide> @Override
<ide> public void subscribe(CompletableSubscriber s) {
<ide> * @throws NullPointerException if single is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static <T> Completable fromSingle(final Single<T> single) {
<add> public static <T> Completable fromSingle(final SingleConsumable<T> single) {
<ide> Objects.requireNonNull(single, "single is null");
<ide> return create(new CompletableConsumable() {
<ide> @Override
<ide> public void onSuccess(T value) {
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if sources is null
<ide> */
<del> public static Completable merge(Completable... sources) {
<add> public static Completable merge(CompletableConsumable... sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> if (sources.length == 0) {
<ide> return complete();
<ide> } else
<ide> if (sources.length == 1) {
<del> return sources[0];
<add> return wrap(sources[0]);
<ide> }
<ide> return create(new CompletableOnSubscribeMergeArray(sources));
<ide> }
<ide> public static Completable merge(Completable... sources) {
<ide> * @throws NullPointerException if sources is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public static Completable merge(Iterable<? extends Completable> sources) {
<add> public static Completable merge(Iterable<? extends CompletableConsumable> sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> return create(new CompletableOnSubscribeMergeIterable(sources));
<ide> }
<ide> public static Completable merge(Iterable<? extends Completable> sources) {
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if sources is null
<ide> */
<del> public static Completable merge(Flowable<? extends Completable> sources) {
<add> public static Completable merge(Publisher<? extends CompletableConsumable> sources) {
<ide> return merge0(sources, Integer.MAX_VALUE, false);
<ide> }
<ide>
<ide> public static Completable merge(Flowable<? extends Completable> sources) {
<ide> * @throws NullPointerException if sources is null
<ide> * @throws IllegalArgumentException if maxConcurrency is less than 1
<ide> */
<del> public static Completable merge(Flowable<? extends Completable> sources, int maxConcurrency) {
<add> public static Completable merge(Publisher<? extends CompletableConsumable> sources, int maxConcurrency) {
<ide> return merge0(sources, maxConcurrency, false);
<ide>
<ide> }
<ide> public static Completable merge(Flowable<? extends Completable> sources, int max
<ide> * @throws NullPointerException if sources is null
<ide> * @throws IllegalArgumentException if maxConcurrency is less than 1
<ide> */
<del> protected static Completable merge0(Flowable<? extends Completable> sources, int maxConcurrency, boolean delayErrors) {
<add> protected static Completable merge0(Publisher<? extends CompletableConsumable> sources, int maxConcurrency, boolean delayErrors) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> if (maxConcurrency < 1) {
<ide> throw new IllegalArgumentException("maxConcurrency > 0 required but it was " + maxConcurrency);
<ide> protected static Completable merge0(Flowable<? extends Completable> sources, int
<ide> }
<ide>
<ide> /**
<del> * Returns a Completable that subscribes to all Completables in the source array and delays
<add> * Returns a CompletableConsumable that subscribes to all Completables in the source array and delays
<ide> * any error emitted by either the sources observable or any of the inner Completables until all of
<ide> * them terminate in a way or another.
<ide> * @param sources the array of Completables
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if sources is null
<ide> */
<del> public static Completable mergeDelayError(Completable... sources) {
<add> public static Completable mergeDelayError(CompletableConsumable... sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> return create(new CompletableOnSubscribeMergeDelayErrorArray(sources));
<ide> }
<ide> public static Completable mergeDelayError(Completable... sources) {
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if sources is null
<ide> */
<del> public static Completable mergeDelayError(Iterable<? extends Completable> sources) {
<add> public static Completable mergeDelayError(Iterable<? extends CompletableConsumable> sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> return create(new CompletableOnSubscribeMergeDelayErrorIterable(sources));
<ide> }
<ide> public static Completable mergeDelayError(Iterable<? extends Completable> source
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if sources is null
<ide> */
<del> public static Completable mergeDelayError(Flowable<? extends Completable> sources) {
<add> public static Completable mergeDelayError(Publisher<? extends CompletableConsumable> sources) {
<ide> return merge0(sources, Integer.MAX_VALUE, true);
<ide> }
<ide>
<ide> public static Completable mergeDelayError(Flowable<? extends Completable> source
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if sources is null
<ide> */
<del> public static Completable mergeDelayError(Flowable<? extends Completable> sources, int maxConcurrency) {
<add> public static Completable mergeDelayError(Publisher<? extends CompletableConsumable> sources, int maxConcurrency) {
<ide> return merge0(sources, maxConcurrency, true);
<ide> }
<ide>
<ide> static NullPointerException toNpe(Throwable ex) {
<ide> * @return the new Completable instance
<ide> */
<ide> public static <R> Completable using(Supplier<R> resourceSupplier,
<del> Function<? super R, ? extends Completable> completableFunction,
<add> Function<? super R, ? extends CompletableConsumable> completableFunction,
<ide> Consumer<? super R> disposer) {
<ide> return using(resourceSupplier, completableFunction, disposer, true);
<ide> }
<ide> public static <R> Completable using(Supplier<R> resourceSupplier,
<ide> */
<ide> public static <R> Completable using(
<ide> final Supplier<R> resourceSupplier,
<del> final Function<? super R, ? extends Completable> completableFunction,
<add> final Function<? super R, ? extends CompletableConsumable> completableFunction,
<ide> final Consumer<? super R> disposer,
<ide> final boolean eager) {
<ide> Objects.requireNonNull(resourceSupplier, "resourceSupplier is null");
<ide> public void subscribe(final CompletableSubscriber s) {
<ide> return;
<ide> }
<ide>
<del> Completable cs;
<add> CompletableConsumable cs;
<ide>
<ide> try {
<ide> cs = completableFunction.apply(resource);
<ide> public void dispose() {
<ide> * @throws NullPointerException if other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public final Completable ambWith(Completable other) {
<add> public final Completable ambWith(CompletableConsumable other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return amb(this, other);
<ide> }
<ide> public void onSubscribe(Disposable d) {
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<ide> public final Completable compose(CompletableTransformer transformer) {
<del> return to(transformer);
<add> return wrap(to(transformer));
<ide> }
<ide>
<ide> /**
<ide> public final Completable compose(CompletableTransformer transformer) {
<ide> * @throws NullPointerException if other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public final Completable concatWith(Completable other) {
<add> public final Completable concatWith(CompletableConsumable other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return concat(this, other);
<ide> }
<ide> public void accept(Throwable e) {
<ide> * Returns a completable that first runs this Completable
<ide> * and then the other completable.
<ide> * <p>
<del> * This is an alias for {@link #concatWith(Completable)}.
<del> * @param other the other Completable, not null
<add> * This is an alias for {@link #concatWith(CompletableConsumable)}.
<add> * @param other the other CompletableConsumable, not null
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public final Completable endWith(Completable other) {
<add> public final Completable endWith(CompletableConsumable other) {
<ide> return concatWith(other);
<ide> }
<ide> /**
<ide> public final Completable endWith(Completable other) {
<ide> * @throws NullPointerException if next is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.CUSTOM)
<del> public final <T> Observable<T> endWith(Observable<T> next) {
<del> return next.startWith(this.<T>toNbpObservable());
<add> public final <T> Observable<T> endWith(ObservableConsumable<T> next) {
<add> return this.<T>toNbpObservable().endWith(next);
<ide> }
<ide>
<ide> /**
<ide> public final <T> Observable<T> endWith(Observable<T> next) {
<ide> * @throws NullPointerException if next is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.CUSTOM)
<del> public final <T> Flowable<T> endWith(Flowable<T> next) {
<del> return next.startWith(this.<T>toFlowable());
<add> public final <T> Flowable<T> endWith(Publisher<T> next) {
<add> return this.<T>toFlowable().endWith(next);
<ide> }
<ide>
<ide> /**
<ide> public void subscribe(CompletableSubscriber s) {
<ide> * @throws NullPointerException if other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public final Completable mergeWith(Completable other) {
<add> public final Completable mergeWith(CompletableConsumable other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return merge(this, other);
<ide> }
<ide> public void onSubscribe(Disposable d) {
<ide> * @return the new Completable instance
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public final Completable onErrorResumeNext(final Function<? super Throwable, ? extends Completable> errorMapper) {
<add> public final Completable onErrorResumeNext(final Function<? super Throwable, ? extends CompletableConsumable> errorMapper) {
<ide> Objects.requireNonNull(errorMapper, "errorMapper is null");
<ide> return create(new CompletableConsumable() {
<ide> @Override
<ide> public void onComplete() {
<ide>
<ide> @Override
<ide> public void onError(Throwable e) {
<del> Completable c;
<add> CompletableConsumable c;
<ide>
<ide> try {
<ide> c = errorMapper.apply(e);
<ide> public void onError(Throwable e) {
<ide> }
<ide>
<ide> if (c == null) {
<del> NullPointerException npe = new NullPointerException("The completable returned is null");
<add> NullPointerException npe = new NullPointerException("The CompletableConsumable returned is null");
<ide> npe.initCause(e);
<ide> s.onError(npe);
<ide> return;
<ide> public final Completable retryWhen(Function<? super Flowable<? extends Throwable
<ide> * @throws NullPointerException if other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public final Completable startWith(Completable other) {
<add> public final Completable startWith(CompletableConsumable other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return concat(other, this);
<ide> }
<ide>
<ide> /**
<ide> * Returns an NbpObservable which first delivers the events
<del> * of the other NbpObservable then runs this Completable.
<add> * of the other NbpObservable then runs this CompletableConsumable.
<ide> * @param <T> the value type
<ide> * @param other the other NbpObservable to run first
<ide> * @return the new NbpObservable instance
<ide> public final <T> Observable<T> startWith(Observable<T> other) {
<ide> * @throws NullPointerException if other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.NONE)
<del> public final <T> Flowable<T> startWith(Flowable<T> other) {
<add> public final <T> Flowable<T> startWith(Publisher<T> other) {
<ide> Objects.requireNonNull(other, "other is null");
<del> return other.endWith(this.<T>toFlowable());
<add> return this.<T>toFlowable().startWith(other);
<ide> }
<ide>
<ide> /**
<del> * Subscribes to this Completable and returns a Disposable which can be used to cancel
<add> * Subscribes to this CompletableConsumable and returns a Disposable which can be used to cancel
<ide> * the subscription.
<ide> * @return the Disposable that allows cancelling the subscription
<ide> */
<ide> public final Completable timeout(long timeout, TimeUnit unit) {
<ide> * @throws NullPointerException if unit or other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.COMPUTATION)
<del> public final Completable timeout(long timeout, TimeUnit unit, Completable other) {
<add> public final Completable timeout(long timeout, TimeUnit unit, CompletableConsumable other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return timeout0(timeout, unit, Schedulers.computation(), other);
<ide> }
<ide> public final Completable timeout(long timeout, TimeUnit unit, Scheduler schedule
<ide> * @throws NullPointerException if unit, scheduler or other is null
<ide> */
<ide> @SchedulerSupport(SchedulerKind.CUSTOM)
<del> public final Completable timeout(long timeout, TimeUnit unit, Scheduler scheduler, Completable other) {
<add> public final Completable timeout(long timeout, TimeUnit unit, Scheduler scheduler, CompletableConsumable other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return timeout0(timeout, unit, scheduler, other);
<ide> }
<ide> public final Completable timeout(long timeout, TimeUnit unit, Scheduler schedule
<ide> * @throws NullPointerException if unit or scheduler
<ide> */
<ide> @SchedulerSupport(SchedulerKind.CUSTOM)
<del> public final Completable timeout0(long timeout, TimeUnit unit, Scheduler scheduler, Completable other) {
<add> public final Completable timeout0(long timeout, TimeUnit unit, Scheduler scheduler, CompletableConsumable other) {
<ide> Objects.requireNonNull(unit, "unit is null");
<ide> Objects.requireNonNull(scheduler, "scheduler is null");
<ide> return create(new CompletableOnSubscribeTimeout(this, timeout, unit, scheduler, other));
<ide><path>src/main/java/io/reactivex/Single.java
<ide> import io.reactivex.internal.functions.Functions;
<ide> import io.reactivex.internal.functions.Objects;
<ide> import io.reactivex.internal.operators.single.*;
<del>import io.reactivex.internal.subscriptions.*;
<ide> import io.reactivex.internal.util.*;
<ide> import io.reactivex.plugins.RxJavaPlugins;
<ide> import io.reactivex.schedulers.Schedulers;
<ide>
<ide> }
<ide>
<del> public interface SingleTransformer<Upstream, Downstream> extends Function<Single<Upstream>, Single<Downstream>> {
<add> public interface SingleTransformer<Upstream, Downstream> extends Function<Single<Upstream>, SingleConsumable<Downstream>> {
<ide>
<ide> }
<ide>
<del> public static <T> Single<T> amb(final Iterable<? extends Single<? extends T>> sources) {
<add> static <T> Single<T> wrap(SingleConsumable<T> source) {
<add> Objects.requireNonNull(source, "source is null");
<add> if (source instanceof Single) {
<add> return (Single<T>)source;
<add> }
<add> return new SingleWrapper<T>(source);
<add> }
<add>
<add> public static <T> Single<T> amb(final Iterable<? extends SingleConsumable<? extends T>> sources) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide> return create(new SingleConsumable<T>() {
<ide> @Override
<ide> public void subscribe(final SingleSubscriber<? super T> s) {
<ide> s.onSubscribe(set);
<ide>
<ide> int c = 0;
<del> Iterator<? extends Single<? extends T>> iterator;
<add> Iterator<? extends SingleConsumable<? extends T>> iterator;
<ide>
<ide> try {
<ide> iterator = sources.iterator();
<ide> public void subscribe(final SingleSubscriber<? super T> s) {
<ide> break;
<ide> }
<ide>
<del> Single<? extends T> s1;
<add> SingleConsumable<? extends T> s1;
<ide>
<ide> if (once.get()) {
<ide> return;
<ide> public void onError(Throwable e) {
<ide> }
<ide>
<ide> @SuppressWarnings("unchecked")
<del> public static <T> Single<T> amb(final Single<? extends T>... sources) {
<add> public static <T> Single<T> amb(final SingleConsumable<? extends T>... sources) {
<ide> if (sources.length == 0) {
<ide> return error(new Supplier<Throwable>() {
<ide> @Override
<ide> public Throwable get() {
<ide> });
<ide> }
<ide> if (sources.length == 1) {
<del> return (Single<T>)sources[0];
<add> return wrap((SingleConsumable<T>)sources[0]);
<ide> }
<ide> return create(new SingleConsumable<T>() {
<ide> @Override
<ide> public void subscribe(final SingleSubscriber<? super T> s) {
<ide> final CompositeDisposable set = new CompositeDisposable();
<ide> s.onSubscribe(set);
<ide>
<del> for (Single<? extends T> s1 : sources) {
<add> for (SingleConsumable<? extends T> s1 : sources) {
<ide> if (once.get()) {
<ide> return;
<ide> }
<ide> public void onError(Throwable e) {
<ide> });
<ide> }
<ide>
<del> public static <T> Flowable<T> concat(Iterable<? extends Single<? extends T>> sources) {
<add> public static <T> Flowable<T> concat(Iterable<? extends SingleConsumable<? extends T>> sources) {
<ide> return concat(Flowable.fromIterable(sources));
<ide> }
<ide>
<del> public static <T> Flowable<T> concat(Flowable<? extends Single<? extends T>> sources) {
<del> return sources.concatMap(new Function<Single<? extends T>, Publisher<? extends T>>() {
<add> public static <T> Flowable<T> concat(Flowable<? extends SingleConsumable<? extends T>> sources) {
<add> return sources.concatMap(new Function<SingleConsumable<? extends T>, Publisher<? extends T>>() {
<ide> @Override
<del> public Publisher<? extends T> apply(Single<? extends T> v){
<del> return v.toFlowable();
<add> public Publisher<? extends T> apply(SingleConsumable<? extends T> v){
<add> return new SingleToFlowable<T>(v);
<ide> }
<ide> });
<ide> }
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> concat(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> concat(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> concat(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> concat(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> concat(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6,
<del> Single<? extends T> s7
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6,
<add> SingleConsumable<? extends T> s7
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> concat(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6,
<del> Single<? extends T> s7, Single<? extends T> s8
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6,
<add> SingleConsumable<? extends T> s7, SingleConsumable<? extends T> s8
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> concat(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> concat(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6,
<del> Single<? extends T> s7, Single<? extends T> s8,
<del> Single<? extends T> s9
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6,
<add> SingleConsumable<? extends T> s7, SingleConsumable<? extends T> s8,
<add> SingleConsumable<? extends T> s9
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Single<T> create(SingleConsumable<T> onSubscribe) {
<ide> return new SingleWrapper<T>(onSubscribe);
<ide> }
<ide>
<del> public static <T> Single<T> defer(final Supplier<? extends Single<? extends T>> singleSupplier) {
<add> public static <T> Single<T> defer(final Supplier<? extends SingleConsumable<? extends T>> singleSupplier) {
<ide> Objects.requireNonNull(singleSupplier, "singleSupplier is null");
<ide> return create(new SingleConsumable<T>() {
<ide> @Override
<ide> public void subscribe(SingleSubscriber<? super T> s) {
<del> Single<? extends T> next;
<add> SingleConsumable<? extends T> next;
<ide>
<ide> try {
<ide> next = singleSupplier.get();
<ide> public void subscribe(SingleSubscriber<? super T> s) {
<ide> });
<ide> }
<ide>
<del> public static <T> Flowable<T> merge(Iterable<? extends Single<? extends T>> sources) {
<add> public static <T> Flowable<T> merge(Iterable<? extends SingleConsumable<? extends T>> sources) {
<ide> return merge(Flowable.fromIterable(sources));
<ide> }
<ide>
<del> public static <T> Flowable<T> merge(Flowable<? extends Single<? extends T>> sources) {
<del> return sources.flatMap(new Function<Single<? extends T>, Publisher<? extends T>>() {
<add> public static <T> Flowable<T> merge(Flowable<? extends SingleConsumable<? extends T>> sources) {
<add> return sources.flatMap(new Function<SingleConsumable<? extends T>, Publisher<? extends T>>() {
<ide> @Override
<del> public Publisher<? extends T> apply(Single<? extends T> v){
<del> return v.toFlowable();
<add> public Publisher<? extends T> apply(SingleConsumable<? extends T> v){
<add> return new SingleToFlowable<T>(v);
<ide> }
<ide> });
<ide> }
<ide>
<ide> @SuppressWarnings({ "unchecked", "rawtypes" })
<del> public static <T> Single<T> merge(Single<? extends Single<? extends T>> source) {
<del> return source.flatMap((Function)Functions.identity());
<add> public static <T> Single<T> merge(SingleConsumable<? extends SingleConsumable<? extends T>> source) {
<add> Objects.requireNonNull(source, "source is null");
<add> return new SingleOperatorFlatMap<SingleConsumable<? extends T>, T>(source, (Function)Functions.identity());
<ide> }
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> merge(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> merge(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> merge(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> merge(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> merge(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6,
<del> Single<? extends T> s7
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6,
<add> SingleConsumable<? extends T> s7
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> merge(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6,
<del> Single<? extends T> s7, Single<? extends T> s8
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6,
<add> SingleConsumable<? extends T> s7, SingleConsumable<? extends T> s8
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public static <T> Flowable<T> merge(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T> Flowable<T> merge(
<del> Single<? extends T> s1, Single<? extends T> s2,
<del> Single<? extends T> s3, Single<? extends T> s4,
<del> Single<? extends T> s5, Single<? extends T> s6,
<del> Single<? extends T> s7, Single<? extends T> s8,
<del> Single<? extends T> s9
<add> SingleConsumable<? extends T> s1, SingleConsumable<? extends T> s2,
<add> SingleConsumable<? extends T> s3, SingleConsumable<? extends T> s4,
<add> SingleConsumable<? extends T> s5, SingleConsumable<? extends T> s6,
<add> SingleConsumable<? extends T> s7, SingleConsumable<? extends T> s8,
<add> SingleConsumable<? extends T> s9
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> Objects.requireNonNull(s2, "s2 is null");
<ide> public void run() {
<ide> });
<ide> }
<ide>
<del> public static <T> Single<Boolean> equals(final Single<? extends T> first, final Single<? extends T> second) {
<add> public static <T> Single<Boolean> equals(final SingleConsumable<? extends T> first, final SingleConsumable<? extends T> second) {
<ide> Objects.requireNonNull(first, "first is null");
<ide> Objects.requireNonNull(second, "second is null");
<ide> return create(new SingleConsumable<Boolean>() {
<ide> public void onError(Throwable e) {
<ide> }
<ide>
<ide> public static <T, U> Single<T> using(Supplier<U> resourceSupplier,
<del> Function<? super U, ? extends Single<? extends T>> singleFunction, Consumer<? super U> disposer) {
<add> Function<? super U, ? extends SingleConsumable<? extends T>> singleFunction, Consumer<? super U> disposer) {
<ide> return using(resourceSupplier, singleFunction, disposer, true);
<ide> }
<ide>
<ide> public static <T, U> Single<T> using(
<ide> final Supplier<U> resourceSupplier,
<del> final Function<? super U, ? extends Single<? extends T>> singleFunction,
<add> final Function<? super U, ? extends SingleConsumable<? extends T>> singleFunction,
<ide> final Consumer<? super U> disposer,
<ide> final boolean eager) {
<ide> Objects.requireNonNull(resourceSupplier, "resourceSupplier is null");
<ide> public void subscribe(final SingleSubscriber<? super T> s) {
<ide> return;
<ide> }
<ide>
<del> Single<? extends T> s1;
<add> SingleConsumable<? extends T> s1;
<ide>
<ide> try {
<ide> s1 = singleFunction.apply(resource);
<ide> public void onError(Throwable e) {
<ide> });
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<del> public static <T, R> Single<R> zip(final Iterable<? extends Single<? extends T>> sources, Function<? super Object[], ? extends R> zipper) {
<add> public static <T, R> Single<R> zip(final Iterable<? extends SingleConsumable<? extends T>> sources, Function<? super Object[], ? extends R> zipper) {
<ide> Objects.requireNonNull(sources, "sources is null");
<ide>
<ide> Iterable<? extends Flowable<T>> it = new Iterable<Flowable<T>>() {
<ide> @Override
<ide> public Iterator<Flowable<T>> iterator() {
<del> final Iterator<? extends Single<? extends T>> sit = sources.iterator();
<add> final Iterator<? extends SingleConsumable<? extends T>> sit = sources.iterator();
<ide> return new Iterator<Flowable<T>>() {
<ide>
<ide> @Override
<ide> public boolean hasNext() {
<ide>
<ide> @Override
<ide> public Flowable<T> next() {
<del> return ((Flowable<T>)sit.next().toFlowable());
<add> return new SingleToFlowable<T>(sit.next());
<ide> }
<ide>
<ide> @Override
<ide> public void remove() {
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<ide> BiFunction<? super T1, ? super T2, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, R> Single<R> zip(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, T3, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<del> Single<? extends T3> s3,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<add> SingleConsumable<? extends T3> s3,
<ide> Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, T3, R> Single<R> zip(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, T3, T4, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<del> Single<? extends T3> s3, Single<? extends T4> s4,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<add> SingleConsumable<? extends T3> s3, SingleConsumable<? extends T4> s4,
<ide> Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, T3, T4, R> Single<R> zip(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, T3, T4, T5, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<del> Single<? extends T3> s3, Single<? extends T4> s4,
<del> Single<? extends T5> s5,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<add> SingleConsumable<? extends T3> s3, SingleConsumable<? extends T4> s4,
<add> SingleConsumable<? extends T5> s5,
<ide> Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, T3, T4, T5, R> Single<R> zip(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, T3, T4, T5, T6, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<del> Single<? extends T3> s3, Single<? extends T4> s4,
<del> Single<? extends T5> s5, Single<? extends T6> s6,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<add> SingleConsumable<? extends T3> s3, SingleConsumable<? extends T4> s4,
<add> SingleConsumable<? extends T5> s5, SingleConsumable<? extends T6> s6,
<ide> Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, T3, T4, T5, T6, R> Single<R> zip(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, T3, T4, T5, T6, T7, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<del> Single<? extends T3> s3, Single<? extends T4> s4,
<del> Single<? extends T5> s5, Single<? extends T6> s6,
<del> Single<? extends T7> s7,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<add> SingleConsumable<? extends T3> s3, SingleConsumable<? extends T4> s4,
<add> SingleConsumable<? extends T5> s5, SingleConsumable<? extends T6> s6,
<add> SingleConsumable<? extends T7> s7,
<ide> Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, T3, T4, T5, T6, T7, R> Single<R> zip(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, T3, T4, T5, T6, T7, T8, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<del> Single<? extends T3> s3, Single<? extends T4> s4,
<del> Single<? extends T5> s5, Single<? extends T6> s6,
<del> Single<? extends T7> s7, Single<? extends T8> s8,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<add> SingleConsumable<? extends T3> s3, SingleConsumable<? extends T4> s4,
<add> SingleConsumable<? extends T5> s5, SingleConsumable<? extends T6> s6,
<add> SingleConsumable<? extends T7> s7, SingleConsumable<? extends T8> s8,
<ide> Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, T3, T4, T5, T6, T7, T8, R> Single<R> zip(
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> public static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Single<R> zip(
<del> Single<? extends T1> s1, Single<? extends T2> s2,
<del> Single<? extends T3> s3, Single<? extends T4> s4,
<del> Single<? extends T5> s5, Single<? extends T6> s6,
<del> Single<? extends T7> s7, Single<? extends T8> s8,
<del> Single<? extends T9> s9,
<add> SingleConsumable<? extends T1> s1, SingleConsumable<? extends T2> s2,
<add> SingleConsumable<? extends T3> s3, SingleConsumable<? extends T4> s4,
<add> SingleConsumable<? extends T5> s5, SingleConsumable<? extends T6> s6,
<add> SingleConsumable<? extends T7> s7, SingleConsumable<? extends T8> s8,
<add> SingleConsumable<? extends T9> s9,
<ide> Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper
<ide> ) {
<ide> Objects.requireNonNull(s1, "s1 is null");
<ide> public static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Single<R> zip(
<ide> }
<ide>
<ide> @SuppressWarnings({"rawtypes", "unchecked"})
<del> public static <T, R> Single<R> zipArray(Function<? super Object[], ? extends R> zipper, Single<? extends T>... sources) {
<add> public static <T, R> Single<R> zipArray(Function<? super Object[], ? extends R> zipper, SingleConsumable<? extends T>... sources) {
<add> Objects.requireNonNull(sources, "sources is null");
<ide> Publisher[] sourcePublishers = new Publisher[sources.length];
<ide> int i = 0;
<del> for (Single<? extends T> s : sources) {
<del> sourcePublishers[i] = s.toFlowable();
<add> for (SingleConsumable<? extends T> s : sources) {
<add> Objects.requireNonNull(s, "The " + i + "th source is null");
<add> sourcePublishers[i] = new SingleToFlowable<T>(s);
<ide> i++;
<ide> }
<ide> return Flowable.zipArray(zipper, false, 1, sourcePublishers).toSingle();
<ide> }
<ide>
<ide> @SuppressWarnings("unchecked")
<del> public final Single<T> ambWith(Single<? extends T> other) {
<add> public final Single<T> ambWith(SingleConsumable<? extends T> other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return amb(this, other);
<ide> }
<ide> public void subscribe(SingleSubscriber<? super T> s) {
<ide> });
<ide> }
<ide>
<del> public final <R> Single<R> compose(Function<? super Single<T>, ? extends Single<R>> convert) {
<del> return to(convert);
<add> public final <R> Single<R> compose(Function<? super Single<T>, ? extends SingleConsumable<R>> convert) {
<add> return wrap(to(convert));
<ide> }
<ide>
<ide> public final Single<T> cache() {
<ide> public void onError(Throwable e) {
<ide> });
<ide> }
<ide>
<del> public final Flowable<T> concatWith(Single<? extends T> other) {
<add> public final Flowable<T> concatWith(SingleConsumable<? extends T> other) {
<ide> return concat(this, other);
<ide> }
<ide>
<ide> public void onError(Throwable e) {
<ide> });
<ide> }
<ide>
<del> public final <R> Single<R> flatMap(Function<? super T, ? extends Single<? extends R>> mapper) {
<add> public final <R> Single<R> flatMap(Function<? super T, ? extends SingleConsumable<? extends R>> mapper) {
<ide> Objects.requireNonNull(mapper, "mapper is null");
<del> return lift(new SingleOperatorFlatMap<T, R>(mapper));
<add> return new SingleOperatorFlatMap<T, R>(this, mapper);
<ide> }
<ide>
<ide> public final <R> Flowable<R> flatMapPublisher(Function<? super T, ? extends Publisher<? extends R>> mapper) {
<ide> public void onError(Throwable e) {
<ide> });
<ide> }
<ide>
<del> public final Flowable<T> mergeWith(Single<? extends T> other) {
<add> public final Flowable<T> mergeWith(SingleConsumable<? extends T> other) {
<ide> return merge(this, other);
<ide> }
<ide>
<ide> public T get() {
<ide> }
<ide>
<ide> public final Single<T> onErrorResumeNext(
<del> final Function<? super Throwable, ? extends Single<? extends T>> nextFunction) {
<add> final Function<? super Throwable, ? extends SingleConsumable<? extends T>> nextFunction) {
<ide> Objects.requireNonNull(nextFunction, "nextFunction is null");
<ide> return create(new SingleConsumable<T>() {
<ide> @Override
<ide> public void onSuccess(T value) {
<ide>
<ide> @Override
<ide> public void onError(Throwable e) {
<del> Single<? extends T> next;
<add> SingleConsumable<? extends T> next;
<ide>
<ide> try {
<ide> next = nextFunction.apply(e);
<ide> public final Single<T> timeout(long timeout, TimeUnit unit, Scheduler scheduler)
<ide> return timeout0(timeout, unit, scheduler, null);
<ide> }
<ide>
<del> public final Single<T> timeout(long timeout, TimeUnit unit, Scheduler scheduler, Single<? extends T> other) {
<add> public final Single<T> timeout(long timeout, TimeUnit unit, Scheduler scheduler, SingleConsumable<? extends T> other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return timeout0(timeout, unit, scheduler, other);
<ide> }
<ide>
<del> public final Single<T> timeout(long timeout, TimeUnit unit, Single<? extends T> other) {
<add> public final Single<T> timeout(long timeout, TimeUnit unit, SingleConsumable<? extends T> other) {
<ide> Objects.requireNonNull(other, "other is null");
<ide> return timeout0(timeout, unit, Schedulers.computation(), other);
<ide> }
<ide>
<del> private Single<T> timeout0(final long timeout, final TimeUnit unit, final Scheduler scheduler, final Single<? extends T> other) {
<add> private Single<T> timeout0(final long timeout, final TimeUnit unit, final Scheduler scheduler, final SingleConsumable<? extends T> other) {
<ide> Objects.requireNonNull(unit, "unit is null");
<ide> Objects.requireNonNull(scheduler, "scheduler is null");
<ide> return create(new SingleConsumable<T>() {
<ide> public final <R> R to(Function<? super Single<T>, R> convert) {
<ide> }
<ide>
<ide> public final Flowable<T> toFlowable() {
<del> return Flowable.create(new Publisher<T>() {
<del> @Override
<del> public void subscribe(final Subscriber<? super T> s) {
<del> final ScalarAsyncSubscription<T> sas = new ScalarAsyncSubscription<T>(s);
<del> final AsyncSubscription as = new AsyncSubscription();
<del> as.setSubscription(sas);
<del> s.onSubscribe(as);
<del>
<del> Single.this.subscribe(new SingleSubscriber<T>() {
<del> @Override
<del> public void onError(Throwable e) {
<del> s.onError(e);
<del> }
<del>
<del> @Override
<del> public void onSubscribe(Disposable d) {
<del> as.setResource(d);
<del> }
<del>
<del> @Override
<del> public void onSuccess(T value) {
<del> sas.setValue(value);
<del> }
<del>
<del> });
<del> }
<del> });
<add> return new SingleToFlowable<T>(this);
<ide> }
<ide>
<ide> public final void unsafeSubscribe(Subscriber<? super T> s) {
<ide> toFlowable().unsafeSubscribe(s);
<ide> }
<ide>
<del> public final <U, R> Single<R> zipWith(Single<U> other, BiFunction<? super T, ? super U, ? extends R> zipper) {
<add> public final <U, R> Single<R> zipWith(SingleConsumable<U> other, BiFunction<? super T, ? super U, ? extends R> zipper) {
<ide> return zip(this, other, zipper);
<ide> }
<ide> }
<ide>\ No newline at end of file
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeConcat.java
<ide> import io.reactivex.plugins.RxJavaPlugins;
<ide>
<ide> public final class CompletableOnSubscribeConcat implements CompletableConsumable {
<del> final Flowable<? extends Completable> sources;
<add> final Publisher<? extends CompletableConsumable> sources;
<ide> final int prefetch;
<ide>
<del> public CompletableOnSubscribeConcat(Flowable<? extends Completable> sources, int prefetch) {
<add> public CompletableOnSubscribeConcat(Publisher<? extends CompletableConsumable> sources, int prefetch) {
<ide> this.sources = sources;
<ide> this.prefetch = prefetch;
<ide> }
<ide> public void subscribe(CompletableSubscriber s) {
<ide>
<ide> static final class CompletableConcatSubscriber
<ide> extends AtomicInteger
<del> implements Subscriber<Completable>, Disposable {
<add> implements Subscriber<CompletableConsumable>, Disposable {
<ide> /** */
<ide> private static final long serialVersionUID = 7412667182931235013L;
<ide> final CompletableSubscriber actual;
<ide> final int prefetch;
<ide> final SerialResource<Disposable> sr;
<ide>
<del> final SpscArrayQueue<Completable> queue;
<add> final SpscArrayQueue<CompletableConsumable> queue;
<ide>
<ide> Subscription s;
<ide>
<ide> public void subscribe(CompletableSubscriber s) {
<ide> public CompletableConcatSubscriber(CompletableSubscriber actual, int prefetch) {
<ide> this.actual = actual;
<ide> this.prefetch = prefetch;
<del> this.queue = new SpscArrayQueue<Completable>(prefetch);
<add> this.queue = new SpscArrayQueue<CompletableConsumable>(prefetch);
<ide> this.sr = new SerialResource<Disposable>(Disposables.consumeAndDispose());
<ide> this.inner = new ConcatInnerSubscriber();
<ide> }
<ide> public void onSubscribe(Subscription s) {
<ide> }
<ide>
<ide> @Override
<del> public void onNext(Completable t) {
<add> public void onNext(CompletableConsumable t) {
<ide> if (!queue.offer(t)) {
<ide> onError(new MissingBackpressureException());
<ide> return;
<ide> public void dispose() {
<ide>
<ide> void next() {
<ide> boolean d = done;
<del> Completable c = queue.poll();
<add> CompletableConsumable c = queue.poll();
<ide> if (c == null) {
<ide> if (d) {
<ide> if (once.compareAndSet(false, true)) {
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeConcatArray.java
<ide> import io.reactivex.disposables.*;
<ide>
<ide> public final class CompletableOnSubscribeConcatArray implements CompletableConsumable {
<del> final Completable[] sources;
<add> final CompletableConsumable[] sources;
<ide>
<del> public CompletableOnSubscribeConcatArray(Completable[] sources) {
<add> public CompletableOnSubscribeConcatArray(CompletableConsumable[] sources) {
<ide> this.sources = sources;
<ide> }
<ide>
<ide> static final class ConcatInnerSubscriber extends AtomicInteger implements Comple
<ide> private static final long serialVersionUID = -7965400327305809232L;
<ide>
<ide> final CompletableSubscriber actual;
<del> final Completable[] sources;
<add> final CompletableConsumable[] sources;
<ide>
<ide> int index;
<ide>
<ide> final SerialDisposable sd;
<ide>
<del> public ConcatInnerSubscriber(CompletableSubscriber actual, Completable[] sources) {
<add> public ConcatInnerSubscriber(CompletableSubscriber actual, CompletableConsumable[] sources) {
<ide> this.actual = actual;
<ide> this.sources = sources;
<ide> this.sd = new SerialDisposable();
<ide> void next() {
<ide> return;
<ide> }
<ide>
<del> Completable[] a = sources;
<add> CompletableConsumable[] a = sources;
<ide> do {
<ide> if (sd.isDisposed()) {
<ide> return;
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeConcatIterable.java
<ide> import io.reactivex.internal.disposables.EmptyDisposable;
<ide>
<ide> public final class CompletableOnSubscribeConcatIterable implements CompletableConsumable {
<del> final Iterable<? extends Completable> sources;
<add> final Iterable<? extends CompletableConsumable> sources;
<ide>
<del> public CompletableOnSubscribeConcatIterable(Iterable<? extends Completable> sources) {
<add> public CompletableOnSubscribeConcatIterable(Iterable<? extends CompletableConsumable> sources) {
<ide> this.sources = sources;
<ide> }
<ide>
<ide> @Override
<ide> public void subscribe(CompletableSubscriber s) {
<ide>
<del> Iterator<? extends Completable> it;
<add> Iterator<? extends CompletableConsumable> it;
<ide>
<ide> try {
<ide> it = sources.iterator();
<ide> static final class ConcatInnerSubscriber extends AtomicInteger implements Comple
<ide> private static final long serialVersionUID = -7965400327305809232L;
<ide>
<ide> final CompletableSubscriber actual;
<del> final Iterator<? extends Completable> sources;
<add> final Iterator<? extends CompletableConsumable> sources;
<ide>
<ide> int index;
<ide>
<ide> final SerialDisposable sd;
<ide>
<del> public ConcatInnerSubscriber(CompletableSubscriber actual, Iterator<? extends Completable> sources) {
<add> public ConcatInnerSubscriber(CompletableSubscriber actual, Iterator<? extends CompletableConsumable> sources) {
<ide> this.actual = actual;
<ide> this.sources = sources;
<ide> this.sd = new SerialDisposable();
<ide> void next() {
<ide> return;
<ide> }
<ide>
<del> Iterator<? extends Completable> a = sources;
<add> Iterator<? extends CompletableConsumable> a = sources;
<ide> do {
<ide> if (sd.isDisposed()) {
<ide> return;
<ide> void next() {
<ide> return;
<ide> }
<ide>
<del> Completable c;
<add> CompletableConsumable c;
<ide>
<ide> try {
<ide> c = a.next();
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeMerge.java
<ide> import io.reactivex.plugins.RxJavaPlugins;
<ide>
<ide> public final class CompletableOnSubscribeMerge implements CompletableConsumable {
<del> final Flowable<? extends Completable> source;
<add> final Publisher<? extends CompletableConsumable> source;
<ide> final int maxConcurrency;
<ide> final boolean delayErrors;
<ide>
<del> public CompletableOnSubscribeMerge(Flowable<? extends Completable> source, int maxConcurrency, boolean delayErrors) {
<add> public CompletableOnSubscribeMerge(Publisher<? extends CompletableConsumable> source, int maxConcurrency, boolean delayErrors) {
<ide> this.source = source;
<ide> this.maxConcurrency = maxConcurrency;
<ide> this.delayErrors = delayErrors;
<ide> public void subscribe(CompletableSubscriber s) {
<ide>
<ide> static final class CompletableMergeSubscriber
<ide> extends AtomicInteger
<del> implements Subscriber<Completable>, Disposable {
<add> implements Subscriber<CompletableConsumable>, Disposable {
<ide> /** */
<ide> private static final long serialVersionUID = -2108443387387077490L;
<ide>
<ide> Queue<Throwable> getOrCreateErrors() {
<ide> }
<ide>
<ide> @Override
<del> public void onNext(Completable t) {
<add> public void onNext(CompletableConsumable t) {
<ide> if (done) {
<ide> return;
<ide> }
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeMergeArray.java
<ide> import io.reactivex.plugins.RxJavaPlugins;
<ide>
<ide> public final class CompletableOnSubscribeMergeArray implements CompletableConsumable {
<del> final Completable[] sources;
<add> final CompletableConsumable[] sources;
<ide>
<del> public CompletableOnSubscribeMergeArray(Completable[] sources) {
<add> public CompletableOnSubscribeMergeArray(CompletableConsumable[] sources) {
<ide> this.sources = sources;
<ide> }
<ide>
<ide> public void subscribe(final CompletableSubscriber s) {
<ide>
<ide> s.onSubscribe(set);
<ide>
<del> for (Completable c : sources) {
<add> for (CompletableConsumable c : sources) {
<ide> if (set.isDisposed()) {
<ide> return;
<ide> }
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeMergeDelayErrorArray.java
<ide> import io.reactivex.disposables.*;
<ide>
<ide> public final class CompletableOnSubscribeMergeDelayErrorArray implements CompletableConsumable {
<del> final Completable[] sources;
<add> final CompletableConsumable[] sources;
<ide>
<del> public CompletableOnSubscribeMergeDelayErrorArray(Completable[] sources) {
<add> public CompletableOnSubscribeMergeDelayErrorArray(CompletableConsumable[] sources) {
<ide> this.sources = sources;
<ide> }
<ide>
<ide> public void subscribe(final CompletableSubscriber s) {
<ide>
<ide> s.onSubscribe(set);
<ide>
<del> for (Completable c : sources) {
<add> for (CompletableConsumable c : sources) {
<ide> if (set.isDisposed()) {
<ide> return;
<ide> }
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeMergeDelayErrorIterable.java
<ide> import io.reactivex.internal.queue.MpscLinkedQueue;
<ide>
<ide> public final class CompletableOnSubscribeMergeDelayErrorIterable implements CompletableConsumable {
<del> final Iterable<? extends Completable> sources;
<add> final Iterable<? extends CompletableConsumable> sources;
<ide>
<del> public CompletableOnSubscribeMergeDelayErrorIterable(Iterable<? extends Completable> sources) {
<add> public CompletableOnSubscribeMergeDelayErrorIterable(Iterable<? extends CompletableConsumable> sources) {
<ide> this.sources = sources;
<ide> }
<ide>
<ide> public void subscribe(final CompletableSubscriber s) {
<ide>
<ide> s.onSubscribe(set);
<ide>
<del> Iterator<? extends Completable> iterator;
<add> Iterator<? extends CompletableConsumable> iterator;
<ide>
<ide> try {
<ide> iterator = sources.iterator();
<ide> public void subscribe(final CompletableSubscriber s) {
<ide> return;
<ide> }
<ide>
<del> Completable c;
<add> CompletableConsumable c;
<ide>
<ide> try {
<ide> c = iterator.next();
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeMergeIterable.java
<ide> import io.reactivex.plugins.RxJavaPlugins;
<ide>
<ide> public final class CompletableOnSubscribeMergeIterable implements CompletableConsumable {
<del> final Iterable<? extends Completable> sources;
<add> final Iterable<? extends CompletableConsumable> sources;
<ide>
<del> public CompletableOnSubscribeMergeIterable(Iterable<? extends Completable> sources) {
<add> public CompletableOnSubscribeMergeIterable(Iterable<? extends CompletableConsumable> sources) {
<ide> this.sources = sources;
<ide> }
<ide>
<ide> public void subscribe(final CompletableSubscriber s) {
<ide>
<ide> s.onSubscribe(set);
<ide>
<del> Iterator<? extends Completable> iterator;
<add> Iterator<? extends CompletableConsumable> iterator;
<ide>
<ide> try {
<ide> iterator = sources.iterator();
<ide> public void subscribe(final CompletableSubscriber s) {
<ide> return;
<ide> }
<ide>
<del> Completable c;
<add> CompletableConsumable c;
<ide>
<ide> try {
<ide> c = iterator.next();
<ide><path>src/main/java/io/reactivex/internal/operators/completable/CompletableOnSubscribeTimeout.java
<ide>
<ide> public final class CompletableOnSubscribeTimeout implements CompletableConsumable {
<ide>
<del> final Completable source;
<add> final CompletableConsumable source;
<ide> final long timeout;
<ide> final TimeUnit unit;
<ide> final Scheduler scheduler;
<del> final Completable other;
<add> final CompletableConsumable other;
<ide>
<del> public CompletableOnSubscribeTimeout(Completable source, long timeout,
<del> TimeUnit unit, Scheduler scheduler, Completable other) {
<add> public CompletableOnSubscribeTimeout(CompletableConsumable source, long timeout,
<add> TimeUnit unit, Scheduler scheduler, CompletableConsumable other) {
<ide> this.source = source;
<ide> this.timeout = timeout;
<ide> this.unit = unit;
<ide><path>src/main/java/io/reactivex/internal/operators/single/SingleOperatorFlatMap.java
<ide> package io.reactivex.internal.operators.single;
<ide>
<ide> import io.reactivex.*;
<del>import io.reactivex.Single.*;
<ide> import io.reactivex.disposables.*;
<ide> import io.reactivex.functions.Function;
<ide>
<del>public final class SingleOperatorFlatMap<T, R> implements SingleOperator<R, T> {
<del> final Function<? super T, ? extends Single<? extends R>> mapper;
<add>public final class SingleOperatorFlatMap<T, R> extends Single<R> {
<add> final SingleConsumable<? extends T> source;
<add>
<add> final Function<? super T, ? extends SingleConsumable<? extends R>> mapper;
<ide>
<del> public SingleOperatorFlatMap(Function<? super T, ? extends Single<? extends R>> mapper) {
<add> public SingleOperatorFlatMap(SingleConsumable<? extends T> source, Function<? super T, ? extends SingleConsumable<? extends R>> mapper) {
<ide> this.mapper = mapper;
<add> this.source = source;
<ide> }
<ide>
<ide> @Override
<del> public SingleSubscriber<? super T> apply(SingleSubscriber<? super R> t) {
<del> return new SingleFlatMapCallback<T, R>(t, mapper);
<add> protected void subscribeActual(SingleSubscriber<? super R> subscriber) {
<add> source.subscribe(new SingleFlatMapCallback<T, R>(subscriber, mapper));
<ide> }
<ide>
<ide> static final class SingleFlatMapCallback<T, R> implements SingleSubscriber<T> {
<ide> final SingleSubscriber<? super R> actual;
<del> final Function<? super T, ? extends Single<? extends R>> mapper;
<add> final Function<? super T, ? extends SingleConsumable<? extends R>> mapper;
<ide>
<ide> final MultipleAssignmentDisposable mad;
<ide>
<ide> public SingleFlatMapCallback(SingleSubscriber<? super R> actual,
<del> Function<? super T, ? extends Single<? extends R>> mapper) {
<add> Function<? super T, ? extends SingleConsumable<? extends R>> mapper) {
<ide> this.actual = actual;
<ide> this.mapper = mapper;
<ide> this.mad = new MultipleAssignmentDisposable();
<ide> public void onSubscribe(Disposable d) {
<ide>
<ide> @Override
<ide> public void onSuccess(T value) {
<del> Single<? extends R> o;
<add> SingleConsumable<? extends R> o;
<ide>
<ide> try {
<ide> o = mapper.apply(value);
<ide><path>src/main/java/io/reactivex/internal/operators/single/SingleToFlowable.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>package io.reactivex.internal.operators.single;
<add>
<add>import org.reactivestreams.Subscriber;
<add>
<add>import io.reactivex.*;
<add>import io.reactivex.disposables.Disposable;
<add>import io.reactivex.internal.subscriptions.*;
<add>
<add>/**
<add> * Wraps a Single and exposes it as a Flowable.
<add> *
<add> * @param <T> the value type
<add> */
<add>public final class SingleToFlowable<T> extends Flowable<T> {
<add>
<add> final SingleConsumable<? extends T> source;
<add>
<add> public SingleToFlowable(SingleConsumable<? extends T> source) {
<add> this.source = source;
<add> }
<add>
<add> @Override
<add> public void subscribeActual(final Subscriber<? super T> s) {
<add> final ScalarAsyncSubscription<T> sas = new ScalarAsyncSubscription<T>(s);
<add> final AsyncSubscription as = new AsyncSubscription();
<add> as.setSubscription(sas);
<add> s.onSubscribe(as);
<add>
<add> source.subscribe(new SingleSubscriber<T>() {
<add> @Override
<add> public void onError(Throwable e) {
<add> s.onError(e);
<add> }
<add>
<add> @Override
<add> public void onSubscribe(Disposable d) {
<add> as.setResource(d);
<add> }
<add>
<add> @Override
<add> public void onSuccess(T value) {
<add> sas.setValue(value);
<add> }
<add>
<add> });
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/single/SingleNullTests.java
<ide> public void concatNull() throws Exception {
<ide> for (int argCount = 2; argCount < 10; argCount++) {
<ide> for (int argNull = 1; argNull <= argCount; argNull++) {
<ide> Class<?>[] params = new Class[argCount];
<del> Arrays.fill(params, Single.class);
<add> Arrays.fill(params, SingleConsumable.class);
<ide>
<ide> Object[] values = new Object[argCount];
<ide> Arrays.fill(values, just1);
<ide> public void mergeNull() throws Exception {
<ide> for (int argCount = 2; argCount < 10; argCount++) {
<ide> for (int argNull = 1; argNull <= argCount; argNull++) {
<ide> Class<?>[] params = new Class[argCount];
<del> Arrays.fill(params, Single.class);
<add> Arrays.fill(params, SingleConsumable.class);
<ide>
<ide> Object[] values = new Object[argCount];
<ide> Arrays.fill(values, just1);
<ide> public void zipNull() throws Exception {
<ide> for (int argCount = 3; argCount < 10; argCount++) {
<ide> for (int argNull = 1; argNull <= argCount; argNull++) {
<ide> Class<?>[] params = new Class[argCount + 1];
<del> Arrays.fill(params, Single.class);
<add> Arrays.fill(params, SingleConsumable.class);
<ide> Class<?> fniClass = Class.forName("io.reactivex.functions.Function" + argCount);
<ide> params[argCount] = fniClass;
<ide>
<ide> public Object invoke(Object o, Method m1, Object[] a) throws Throwable {
<ide> }
<ide>
<ide> Class<?>[] params = new Class[argCount + 1];
<del> Arrays.fill(params, Single.class);
<add> Arrays.fill(params, SingleConsumable.class);
<ide> Class<?> fniClass = Class.forName("io.reactivex.functions.Function" + argCount);
<ide> params[argCount] = fniClass;
<ide> | 14 |
Mixed | Python | upgrade pending deprecations to deprecations | 1f996128458570a909d13f15c3d739fb12111984 | <ide><path>docs/topics/3.1-announcement.md
<ide> Thanks go to the latest member of our maintenance team, [José Padilla](https://
<ide>
<ide> ---
<ide>
<add>## Deprecations
<add>
<add>The `request.DATA`, `request.FILES` and `request.QUERY_PARAMS` attributes move from pending deprecation, to deprecated. Use `request.data` and `request.query_params` instead, as discussed in the 3.0 release notes.
<add>
<add>The ModelSerializer Meta options for `write_only_fields`, `view_name` and `lookup_field` are also moved from pending deprecation, to deprecated. Use `extra_kwargs` instead, as discussed in the 3.0 release notes.
<add>
<add>All these attributes and options will still work in 3.1, but their usage will raise a warning. They will be fully removed in 3.2.
<add>
<add>---
<add>
<ide> ## What's next?
<ide>
<ide> The next focus will be on HTML renderings of API output and will include:
<ide><path>rest_framework/request.py
<ide> def QUERY_PARAMS(self):
<ide> Synonym for `.query_params`, for backwards compatibility.
<ide> """
<ide> warnings.warn(
<del> "`request.QUERY_PARAMS` is pending deprecation. Use `request.query_params` instead.",
<del> PendingDeprecationWarning,
<add> "`request.QUERY_PARAMS` is deprecated. Use `request.query_params` instead.",
<add> DeprecationWarning,
<ide> stacklevel=1
<ide> )
<ide> return self._request.GET
<ide> def DATA(self):
<ide> arbitrary parsers, and also works on methods other than POST (eg PUT).
<ide> """
<ide> warnings.warn(
<del> "`request.DATA` is pending deprecation. Use `request.data` instead.",
<del> PendingDeprecationWarning,
<add> "`request.DATA` is deprecated. Use `request.data` instead.",
<add> DeprecationWarning,
<ide> stacklevel=1
<ide> )
<ide> if not _hasattr(self, '_data'):
<ide> def FILES(self):
<ide> arbitrary parsers, and also works on methods other than POST (eg PUT).
<ide> """
<ide> warnings.warn(
<del> "`request.FILES` is pending deprecation. Use `request.data` instead.",
<del> PendingDeprecationWarning,
<add> "`request.FILES` is deprecated. Use `request.data` instead.",
<add> DeprecationWarning,
<ide> stacklevel=1
<ide> )
<ide> if not _hasattr(self, '_files'):
<ide><path>rest_framework/serializers.py
<ide> def get_extra_kwargs(self):
<ide> write_only_fields = getattr(self.Meta, 'write_only_fields', None)
<ide> if write_only_fields is not None:
<ide> warnings.warn(
<del> "The `Meta.write_only_fields` option is pending deprecation. "
<add> "The `Meta.write_only_fields` option is deprecated. "
<ide> "Use `Meta.extra_kwargs={<field_name>: {'write_only': True}}` instead.",
<del> PendingDeprecationWarning,
<add> DeprecationWarning,
<ide> stacklevel=3
<ide> )
<ide> for field_name in write_only_fields:
<ide> def get_extra_kwargs(self):
<ide> view_name = getattr(self.Meta, 'view_name', None)
<ide> if view_name is not None:
<ide> warnings.warn(
<del> "The `Meta.view_name` option is pending deprecation. "
<add> "The `Meta.view_name` option is deprecated. "
<ide> "Use `Meta.extra_kwargs={'url': {'view_name': ...}}` instead.",
<del> PendingDeprecationWarning,
<add> DeprecationWarning,
<ide> stacklevel=3
<ide> )
<ide> kwargs = extra_kwargs.get(api_settings.URL_FIELD_NAME, {})
<ide> def get_extra_kwargs(self):
<ide> lookup_field = getattr(self.Meta, 'lookup_field', None)
<ide> if lookup_field is not None:
<ide> warnings.warn(
<del> "The `Meta.lookup_field` option is pending deprecation. "
<add> "The `Meta.lookup_field` option is deprecated. "
<ide> "Use `Meta.extra_kwargs={'url': {'lookup_field': ...}}` instead.",
<del> PendingDeprecationWarning,
<add> DeprecationWarning,
<ide> stacklevel=3
<ide> )
<ide> kwargs = extra_kwargs.get(api_settings.URL_FIELD_NAME, {})
<ide><path>rest_framework/views.py
<ide> def handle_exception(self, exc):
<ide> warnings.warn(
<ide> 'The `exception_handler(exc)` call signature is deprecated. '
<ide> 'Use `exception_handler(exc, context) instead.',
<del> PendingDeprecationWarning
<add> DeprecationWarning
<ide> )
<ide> response = exception_handler(exc)
<ide> else: | 4 |
PHP | PHP | return a new instance from sortby and sortbydesc | e57cb0d968841bd67bf2b8eb589d4479de924ee1 | <ide><path>src/Illuminate/Support/Collection.php
<ide> public function sort(callable $callback)
<ide> * @param callable|string $callback
<ide> * @param int $options
<ide> * @param bool $descending
<del> * @return $this
<add> * @return static
<ide> */
<ide> public function sortBy($callback, $options = SORT_REGULAR, $descending = false)
<ide> {
<ide> public function sortBy($callback, $options = SORT_REGULAR, $descending = false)
<ide> $results[$key] = $this->items[$key];
<ide> }
<ide>
<del> $this->items = $results;
<del>
<del> return $this;
<add> return new static($results);
<ide> }
<ide>
<ide> /**
<ide> * Sort the collection in descending order using the given callback.
<ide> *
<ide> * @param callable|string $callback
<ide> * @param int $options
<del> * @return $this
<add> * @return static
<ide> */
<ide> public function sortByDesc($callback, $options = SORT_REGULAR)
<ide> {
<ide><path>tests/Support/SupportCollectionTest.php
<ide> public function testSortBy()
<ide> $this->assertEquals(array('dayle', 'taylor'), array_values($data->all()));
<ide>
<ide> $data = new Collection(array('dayle', 'taylor'));
<del> $data->sortByDesc(function($x) { return $x; });
<add> $data = $data->sortByDesc(function($x) { return $x; });
<ide>
<ide> $this->assertEquals(array('taylor', 'dayle'), array_values($data->all()));
<ide> } | 2 |
Javascript | Javascript | update deprecation wording to be less aggressive | 2be0583ed37ffc22fefe61292a12daee083e6ed9 | <ide><path>src/isomorphic/React.js
<ide> if (__DEV__) {
<ide> warning(
<ide> warnedForCreateMixin,
<ide> 'React.createMixin is deprecated and should not be used. You ' +
<del> 'can use your mixin directly instead.'
<add> 'can use this mixin directly instead.'
<ide> );
<ide> warnedForCreateMixin = true;
<ide> return mixin; | 1 |
Ruby | Ruby | add some documentation for am.mailer_name | 58533dc9bd8736c28b5ad8286e57b02f45939621 | <ide><path>actionmailer/lib/action_mailer/base.rb
<ide> def register_interceptor(interceptor)
<ide> Mail.register_interceptor(delivery_interceptor)
<ide> end
<ide>
<add> # Returns the name of current mailer. This method also being used as a path for a view lookup.
<add> # If this is an anonymous mailer, this method will return +anonymous+ instead.
<ide> def mailer_name
<ide> @mailer_name ||= anonymous? ? "anonymous" : name.underscore
<ide> end | 1 |
PHP | PHP | specify middleware directly | 9ecfa9cf8ead645a762f7b42bed8cb035db5f2be | <ide><path>app/Http/Controllers/Auth/AuthController.php
<ide> class AuthController extends Controller
<ide> *
<ide> * @var string
<ide> */
<del> protected $redirectTo = '/';
<add> protected $redirectTo = '/home';
<ide>
<ide> /**
<ide> * Create a new authentication controller instance.
<ide> class AuthController extends Controller
<ide> */
<ide> public function __construct()
<ide> {
<del> $this->middleware($this->guestMiddleware(), ['except' => 'logout']);
<add> $this->middleware('guest', ['except' => 'logout']);
<ide> }
<ide>
<ide> /** | 1 |
Javascript | Javascript | collectionview cleans up emptyview - fixes | 8c6064902d1221cc83b5fe5e1f391cf81d46fa48 | <ide><path>packages/ember-views/lib/views/collection_view.js
<ide> Ember.CollectionView = Ember.ContainerView.extend(
<ide> if (content) { content.removeArrayObserver(this); }
<ide>
<ide> this._super();
<add>
<add> if (this._createdEmptyView) {
<add> this._createdEmptyView.destroy();
<add> }
<ide> },
<ide>
<ide> arrayWillChange: function(content, start, removedCount) {
<ide> Ember.CollectionView = Ember.ContainerView.extend(
<ide> var emptyView = get(this, 'emptyView');
<ide> if (!emptyView) { return; }
<ide>
<add> var isClass = Ember.CoreView.detect(emptyView);
<add>
<ide> emptyView = this.createChildView(emptyView);
<ide> addedViews.push(emptyView);
<ide> set(this, 'emptyView', emptyView);
<add>
<add> if (isClass) { this._createdEmptyView = emptyView; }
<ide> }
<ide> this.replace(start, 0, addedViews);
<ide> }, | 1 |
Javascript | Javascript | fix simple typo, intial -> initial | f22ead10f09b89b33c996adc3cd848cfce7a635e | <ide><path>src/js/player.js
<ide> class Player extends Component {
<ide> }
<ide> }
<ide>
<del> // update the source to the intial source right away
<add> // update the source to the initial source right away
<ide> // in some cases this will be empty string
<ide> updateSourceCaches(eventSrc);
<ide>
<ide> class Player extends Component {
<ide> return;
<ide> }
<ide>
<del> // intial sources
<add> // initial sources
<ide> this.changingSrc_ = true;
<ide>
<ide> this.cache_.sources = sources;
<ide><path>src/js/tech/html5.js
<ide> Html5.resetMediaElement = function(el) {
<ide>
<ide> /**
<ide> * Set the value of `defaultMuted` on the media element. `defaultMuted` indicates that the current
<del> * audio level should be silent, but will only effect the muted level on intial playback..
<add> * audio level should be silent, but will only effect the muted level on initial playback..
<ide> *
<ide> * @method Html5.prototype.setDefaultMuted
<ide> * @param {boolean} defaultMuted
<ide><path>test/unit/sourceset.test.js
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> this.mediaEl = this.player.tech_.el();
<ide> });
<ide>
<del> // intial sourceset should happen on player.ready
<add> // initial sourceset should happen on player.ready
<ide> this.player.one('sourceset', (e) => {
<ide> validateSource(this.player, [testSrc], e);
<ide> done(); | 3 |
Python | Python | use unicode literals in train_ud | 22189e60dbf92fe22ad49e296c4e41fc84b71f9d | <ide><path>bin/parser/train_ud.py
<add>from __future__ import unicode_literals
<ide> import plac
<ide> import json
<ide> from os import path
<ide> import shutil
<ide> import os
<ide> import random
<ide> import io
<add>import pathlib
<ide>
<ide> from spacy.tokens import Doc
<ide> from spacy.syntax.nonproj import PseudoProjectivity
<ide> from spacy.syntax.arc_eager import ArcEager
<ide> from spacy.scorer import Scorer
<ide> import spacy.attrs
<add>import io
<ide>
<del>try:
<del> from codecs import open
<del>except ImportError:
<del> pass
<ide>
<ide>
<ide> def read_conllx(loc):
<del> with open(loc, 'r', 'utf8') as file_:
<add> with io.open(loc, 'r', encoding='utf8') as file_:
<ide> text = file_.read()
<ide> for sent in text.strip().split('\n\n'):
<ide> lines = sent.strip().split('\n')
<ide> def score_model(vocab, tagger, parser, gold_docs, verbose=False):
<ide> doc = Doc(vocab, words=words)
<ide> tagger(doc)
<ide> parser(doc)
<add> PseudoProjectivity.deprojectivize(doc)
<ide> gold = GoldParse(doc, tags=tags, heads=heads, deps=deps)
<ide> scorer.score(doc, gold, verbose=verbose)
<ide> return scorer
<ide> def main(train_loc, dev_loc, model_dir, tag_map_loc):
<ide> tag_map = json.loads(file_.read())
<ide> train_sents = list(read_conllx(train_loc))
<ide> train_sents = PseudoProjectivity.preprocess_training_data(train_sents)
<add>
<ide> actions = ArcEager.get_actions(gold_parses=train_sents)
<ide> features = get_templates('basic')
<add>
<add> model_dir = pathlib.Path(model_dir)
<add> with (model_dir / 'deps' / 'config.json').open('wb') as file_:
<add> json.dump({'pseudoprojective': True, 'labels': actions, 'features': features}, file_)
<ide>
<ide> vocab = Vocab(lex_attr_getters=Language.Defaults.lex_attr_getters, tag_map=tag_map)
<ide> # Populate vocab
<ide> for _, doc_sents in train_sents:
<ide> for (ids, words, tags, heads, deps, ner), _ in doc_sents:
<ide> for word in words:
<ide> _ = vocab[word]
<add> for dep in deps:
<add> _ = vocab[dep]
<add> for tag in tags:
<add> _ = vocab[tag]
<ide> for tag in tags:
<ide> assert tag in tag_map, repr(tag)
<del> print(tags)
<ide> tagger = Tagger(vocab, tag_map=tag_map)
<ide> parser = DependencyParser(vocab, actions=actions, features=features)
<ide> | 1 |
Javascript | Javascript | fix documentation typo in reactcomponent | 419038f84602d75394f9983a899fd2ddf5ab06c5 | <ide><path>src/modern/class/ReactComponent.js
<ide> ReactComponent.prototype.setState = function(partialState, callback) {
<ide> * You may want to call this when you know that some deeper aspect of the
<ide> * component's state has changed but `setState` was not called.
<ide> *
<del> * This will not invoke `shouldUpdateComponent`, but it will invoke
<add> * This will not invoke `shouldComponentUpdate`, but it will invoke
<ide> * `componentWillUpdate` and `componentDidUpdate`.
<ide> *
<ide> * @param {?function} callback Called after update is complete. | 1 |
Text | Text | add @mhdawson back to the ctc | 997a3be3af1f202dc8058ee1072cbd4b4b77895c | <ide><path>README.md
<ide> information about the governance of the Node.js project, see
<ide> * [fishrock123](https://github.com/fishrock123) - **Jeremiah Senkpiel** <[email protected]>
<ide> * [indutny](https://github.com/indutny) - **Fedor Indutny** <[email protected]>
<ide> * [jasnell](https://github.com/jasnell) - **James M Snell** <[email protected]>
<add>* [mhdawson](https://github.com/mhdawson) - **Michael Dawson** <[email protected]>
<ide> * [misterdjules](https://github.com/misterdjules) - **Julien Gilli** <[email protected]>
<ide> * [mscdex](https://github.com/mscdex) - **Brian White** <[email protected]>
<ide> * [ofrobots](https://github.com/ofrobots) - **Ali Ijaz Sheikh** <[email protected]>
<ide> information about the governance of the Node.js project, see
<ide> * [lxe](https://github.com/lxe) - **Aleksey Smolenchuk** <[email protected]>
<ide> * [matthewloring](https://github.com/matthewloring) - **Matthew Loring** <[email protected]>
<ide> * [mcollina](https://github.com/mcollina) - **Matteo Collina** <[email protected]>
<del>* [mhdawson](https://github.com/mhdawson) - **Michael Dawson** <[email protected]>
<ide> * [micnic](https://github.com/micnic) - **Nicu Micleușanu** <[email protected]>
<ide> * [mikeal](https://github.com/mikeal) - **Mikeal Rogers** <[email protected]>
<ide> * [monsanto](https://github.com/monsanto) - **Christopher Monsanto** <[email protected]> | 1 |
Text | Text | suggest open index.html in browser regularly | a9d0135f8474b98924eaa4726766c7a674377f66 | <ide><path>docs/docs/tutorial.md
<ide> For this tutorial, we'll use prebuilt JavaScript files on a CDN. Open up your fa
<ide> </html>
<ide> ```
<ide>
<del>For the remainder of this tutorial, we'll be writing our JavaScript code in this script tag.
<add>For the remainder of this tutorial, we'll be writing our JavaScript code in this script tag. Follow your progress by opening your index.html file in your browser after each addition.
<ide>
<ide> > Note:
<ide> > | 1 |
Javascript | Javascript | remove unnecessary reference | ea7e746d13f7c9173d2b3f3a4b66d1228aeb60c2 | <ide><path>packages/ember-runtime/lib/core.js
<ide> var errorProps = ['description', 'fileName', 'lineNumber', 'message', 'name', 'n
<ide> @constructor
<ide> */
<ide> Ember.Error = function() {
<del> var tmp = Error.prototype.constructor.apply(this, arguments);
<add> var tmp = Error.apply(this, arguments);
<ide>
<ide> // Unfortunately errors are not enumerable in Chrome (at least), so `for prop in tmp` doesn't work.
<ide> for (var idx = 0; idx < errorProps.length; idx++) { | 1 |
Python | Python | preserve call/errbacks of replaced tasks | 5d72aeedb6329b609469c63998e9335e017bd204 | <ide><path>celery/app/task.py
<ide> from kombu.exceptions import OperationalError
<ide> from kombu.utils.uuid import uuid
<ide>
<del>from celery import current_app, group, states
<add>from celery import current_app, states
<ide> from celery._state import _task_stack
<del>from celery.canvas import _chain, signature
<add>from celery.canvas import _chain, group, signature
<ide> from celery.exceptions import (Ignore, ImproperlyConfigured,
<ide> MaxRetriesExceededError, Reject, Retry)
<ide> from celery.local import class_property
<ide> def replace(self, sig):
<ide> raise ImproperlyConfigured(
<ide> "A signature replacing a task must not be part of a chord"
<ide> )
<add> if isinstance(sig, _chain) and not getattr(sig, "tasks", True):
<add> raise ImproperlyConfigured("Cannot replace with an empty chain")
<ide>
<add> # Ensure callbacks or errbacks from the replaced signature are retained
<ide> if isinstance(sig, group):
<del> sig |= self.app.tasks['celery.accumulate'].s(index=0).set(
<del> link=self.request.callbacks,
<del> link_error=self.request.errbacks,
<del> )
<del> elif isinstance(sig, _chain):
<del> if not sig.tasks:
<del> raise ImproperlyConfigured(
<del> "Cannot replace with an empty chain"
<del> )
<del>
<del> if self.request.chain:
<del> # We need to freeze the new signature with the current task's ID to
<del> # ensure that we don't disassociate the new chain from the existing
<del> # task IDs which would break previously constructed results
<del> # objects.
<del> sig.freeze(self.request.id)
<del> if "link" in sig.options:
<del> final_task_links = sig.tasks[-1].options.setdefault("link", [])
<del> final_task_links.extend(maybe_list(sig.options["link"]))
<del> # Construct the new remainder of the task by chaining the signature
<del> # we're being replaced by with signatures constructed from the
<del> # chain elements in the current request.
<del> for t in reversed(self.request.chain):
<del> sig |= signature(t, app=self.app)
<del>
<add> # Groups get uplifted to a chord so that we can link onto the body
<add> sig |= self.app.tasks['celery.accumulate'].s(index=0)
<add> for callback in maybe_list(self.request.callbacks) or []:
<add> sig.link(callback)
<add> for errback in maybe_list(self.request.errbacks) or []:
<add> sig.link_error(errback)
<add> # If the replacement signature is a chain, we need to push callbacks
<add> # down to the final task so they run at the right time even if we
<add> # proceed to link further tasks from the original request below
<add> if isinstance(sig, _chain) and "link" in sig.options:
<add> final_task_links = sig.tasks[-1].options.setdefault("link", [])
<add> final_task_links.extend(maybe_list(sig.options["link"]))
<add> # We need to freeze the replacement signature with the current task's
<add> # ID to ensure that we don't disassociate it from the existing task IDs
<add> # which would break previously constructed results objects.
<add> sig.freeze(self.request.id)
<add> # Ensure the important options from the original signature are retained
<ide> sig.set(
<ide> chord=chord,
<ide> group_id=self.request.group,
<ide> group_index=self.request.group_index,
<ide> root_id=self.request.root_id,
<ide> )
<del> sig.freeze(self.request.id)
<del>
<add> # If the task being replaced is part of a chain, we need to re-create
<add> # it with the replacement signature - these subsequent tasks will
<add> # retain their original task IDs as well
<add> for t in reversed(self.request.chain or []):
<add> sig |= signature(t, app=self.app)
<add> # Finally, either apply or delay the new signature!
<ide> if self.request.is_eager:
<ide> return sig.apply().get()
<ide> else:
<ide><path>celery/canvas.py
<ide> def apply_async(self, args=None, kwargs=None, **options):
<ide>
<ide> def run(self, args=None, kwargs=None, group_id=None, chord=None,
<ide> task_id=None, link=None, link_error=None, publisher=None,
<del> producer=None, root_id=None, parent_id=None, app=None, **options):
<add> producer=None, root_id=None, parent_id=None, app=None,
<add> group_index=None, **options):
<ide> # pylint: disable=redefined-outer-name
<ide> # XXX chord is also a class in outer scope.
<ide> args = args if args else ()
<ide> def run(self, args=None, kwargs=None, group_id=None, chord=None,
<ide>
<ide> tasks, results_from_prepare = self.prepare_steps(
<ide> args, kwargs, self.tasks, root_id, parent_id, link_error, app,
<del> task_id, group_id, chord,
<add> task_id, group_id, chord, group_index=group_index,
<ide> )
<ide>
<ide> if results_from_prepare:
<ide><path>t/integration/tasks.py
<ide> def retry_once_priority(self, *args, expires=60.0, max_retries=1,
<ide>
<ide>
<ide> @shared_task
<del>def redis_echo(message):
<add>def redis_echo(message, redis_key="redis-echo"):
<ide> """Task that appends the message to a redis list."""
<ide> redis_connection = get_redis_connection()
<del> redis_connection.rpush('redis-echo', message)
<add> redis_connection.rpush(redis_key, message)
<ide>
<ide>
<ide> @shared_task
<del>def redis_count():
<del> """Task that increments a well-known redis key."""
<add>def redis_count(redis_key="redis-count"):
<add> """Task that increments a specified or well-known redis key."""
<ide> redis_connection = get_redis_connection()
<del> redis_connection.incr('redis-count')
<add> redis_connection.incr(redis_key)
<ide>
<ide>
<ide> @shared_task(bind=True)
<ide> def fail(*args):
<ide> raise ExpectedException(*args)
<ide>
<ide>
<add>@shared_task(bind=True)
<add>def fail_replaced(self, *args):
<add> """Replace this task with one which raises ExpectedException."""
<add> raise self.replace(fail.si(*args))
<add>
<add>
<ide> @shared_task
<ide> def chord_error(*args):
<ide> return args
<ide><path>t/integration/test_canvas.py
<add>import collections
<ide> import re
<ide> import tempfile
<ide> import uuid
<ide> from .tasks import (ExpectedException, add, add_chord_to_chord, add_replaced,
<ide> add_to_all, add_to_all_to_chord, build_chain_inside_task,
<ide> chord_error, collect_ids, delayed_sum,
<del> delayed_sum_with_soft_guard, fail, identity, ids,
<del> print_unicode, raise_error, redis_count, redis_echo,
<del> replace_with_chain, replace_with_chain_which_raises,
<del> replace_with_empty_chain, retry_once, return_exception,
<del> return_priority, second_order_replace1, tsum,
<del> write_to_file_and_return_int)
<add> delayed_sum_with_soft_guard, fail, fail_replaced,
<add> identity, ids, print_unicode, raise_error, redis_count,
<add> redis_echo, replace_with_chain,
<add> replace_with_chain_which_raises, replace_with_empty_chain,
<add> retry_once, return_exception, return_priority,
<add> second_order_replace1, tsum, write_to_file_and_return_int)
<ide>
<ide> RETRYABLE_EXCEPTIONS = (OSError, ConnectionError, TimeoutError)
<ide>
<ide> def flaky(fn):
<ide> return _timeout(_flaky(fn))
<ide>
<ide>
<add>def await_redis_echo(expected_msgs, redis_key="redis-echo", timeout=TIMEOUT):
<add> """
<add> Helper to wait for a specified or well-known redis key to contain a string.
<add> """
<add> redis_connection = get_redis_connection()
<add>
<add> if isinstance(expected_msgs, (str, bytes, bytearray)):
<add> expected_msgs = (expected_msgs, )
<add> expected_msgs = collections.Counter(
<add> e if not isinstance(e, str) else e.encode("utf-8")
<add> for e in expected_msgs
<add> )
<add>
<add> # This can technically wait for `len(expected_msg_or_msgs) * timeout` :/
<add> while +expected_msgs:
<add> maybe_key_msg = redis_connection.blpop(redis_key, timeout)
<add> if maybe_key_msg is None:
<add> raise TimeoutError(
<add> "Fetching from {!r} timed out - still awaiting {!r}"
<add> .format(redis_key, dict(+expected_msgs))
<add> )
<add> retrieved_key, msg = maybe_key_msg
<add> assert retrieved_key.decode("utf-8") == redis_key
<add> expected_msgs[msg] -= 1 # silently accepts unexpected messages
<add>
<add> # There should be no more elements - block momentarily
<add> assert redis_connection.blpop(redis_key, min(1, timeout)) is None
<add>
<add>
<add>def await_redis_count(expected_count, redis_key="redis-count", timeout=TIMEOUT):
<add> """
<add> Helper to wait for a specified or well-known redis key to count to a value.
<add> """
<add> redis_connection = get_redis_connection()
<add>
<add> check_interval = 0.1
<add> check_max = int(timeout / check_interval)
<add> for i in range(check_max + 1):
<add> maybe_count = redis_connection.get(redis_key)
<add> # It's either `None` or a base-10 integer
<add> if maybe_count is not None:
<add> count = int(maybe_count)
<add> if count == expected_count:
<add> break
<add> elif i >= check_max:
<add> assert count == expected_count
<add> # try again later
<add> sleep(check_interval)
<add> else:
<add> raise TimeoutError("{!r} was never incremented".format(redis_key))
<add>
<add> # There should be no more increments - block momentarily
<add> sleep(min(1, timeout))
<add> assert int(redis_connection.get(redis_key)) == expected_count
<add>
<add>
<ide> class test_link_error:
<ide> @flaky
<ide> def test_link_error_eager(self):
<ide> def test_chain_replaced_with_a_chain_and_a_callback(self, manager):
<ide> res = c.delay()
<ide>
<ide> assert res.get(timeout=TIMEOUT) == 'Hello world'
<del>
<del> expected_msgs = {link_msg, }
<del> while expected_msgs:
<del> maybe_key_msg = redis_connection.blpop('redis-echo', TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError('redis-echo')
<del> _, msg = maybe_key_msg
<del> msg = msg.decode()
<del> expected_msgs.remove(msg) # KeyError if `msg` is not in here
<del>
<del> # There should be no more elements - block momentarily
<del> assert redis_connection.blpop('redis-echo', min(1, TIMEOUT)) is None
<del> redis_connection.delete('redis-echo')
<add> await_redis_echo({link_msg, })
<ide>
<ide> def test_chain_replaced_with_a_chain_and_an_error_callback(self, manager):
<ide> if not manager.app.conf.result_backend.startswith('redis'):
<ide> def test_chain_replaced_with_a_chain_and_an_error_callback(self, manager):
<ide>
<ide> with pytest.raises(ValueError):
<ide> res.get(timeout=TIMEOUT)
<del>
<del> expected_msgs = {link_msg, }
<del> while expected_msgs:
<del> maybe_key_msg = redis_connection.blpop('redis-echo', TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError('redis-echo')
<del> _, msg = maybe_key_msg
<del> msg = msg.decode()
<del> expected_msgs.remove(msg) # KeyError if `msg` is not in here
<del>
<del> # There should be no more elements - block momentarily
<del> assert redis_connection.blpop('redis-echo', min(1, TIMEOUT)) is None
<del> redis_connection.delete('redis-echo')
<add> await_redis_echo({link_msg, })
<ide>
<ide> def test_chain_with_cb_replaced_with_chain_with_cb(self, manager):
<ide> if not manager.app.conf.result_backend.startswith('redis'):
<ide> def test_chain_with_cb_replaced_with_chain_with_cb(self, manager):
<ide> res = c.delay()
<ide>
<ide> assert res.get(timeout=TIMEOUT) == 'Hello world'
<add> await_redis_echo({link_msg, 'Hello world'})
<ide>
<del> expected_msgs = {link_msg, 'Hello world'}
<del> while expected_msgs:
<del> maybe_key_msg = redis_connection.blpop('redis-echo', TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError('redis-echo')
<del> _, msg = maybe_key_msg
<del> msg = msg.decode()
<del> expected_msgs.remove(msg) # KeyError if `msg` is not in here
<del>
<del> # There should be no more elements - block momentarily
<del> assert redis_connection.blpop('redis-echo', min(1, TIMEOUT)) is None
<del> redis_connection.delete('redis-echo')
<del>
<del> @pytest.mark.xfail(reason="#6441")
<del> def test_chain_with_eb_replaced_with_chain_with_eb(self, manager):
<add> def test_chain_with_eb_replaced_with_chain_with_eb(
<add> self, manager, subtests
<add> ):
<ide> if not manager.app.conf.result_backend.startswith('redis'):
<ide> raise pytest.skip('Requires redis result backend.')
<ide>
<ide> def test_chain_with_eb_replaced_with_chain_with_eb(self, manager):
<ide> outer_link_msg = 'External chain errback'
<ide> c = chain(
<ide> identity.s('Hello '),
<del> # The replacement chain will pass its args though
<add> # The replacement chain will die and break the encapsulating chain
<ide> replace_with_chain_which_raises.s(link_msg=inner_link_msg),
<ide> add.s('world'),
<ide> )
<del> c.link_error(redis_echo.s(outer_link_msg))
<add> c.link_error(redis_echo.si(outer_link_msg))
<ide> res = c.delay()
<ide>
<del> with pytest.raises(ValueError):
<del> res.get(timeout=TIMEOUT)
<del>
<del> expected_msgs = {inner_link_msg, outer_link_msg}
<del> while expected_msgs:
<del> # Shorter timeout here because we expect failure
<del> timeout = min(5, TIMEOUT)
<del> maybe_key_msg = redis_connection.blpop('redis-echo', timeout)
<del> if maybe_key_msg is None:
<del> raise TimeoutError('redis-echo')
<del> _, msg = maybe_key_msg
<del> msg = msg.decode()
<del> expected_msgs.remove(msg) # KeyError if `msg` is not in here
<del>
<del> # There should be no more elements - block momentarily
<del> assert redis_connection.blpop('redis-echo', min(1, TIMEOUT)) is None
<del> redis_connection.delete('redis-echo')
<add> with subtests.test(msg="Chain fails due to a child task dying"):
<add> with pytest.raises(ValueError):
<add> res.get(timeout=TIMEOUT)
<add> with subtests.test(msg="Chain and child task callbacks are called"):
<add> await_redis_echo({inner_link_msg, outer_link_msg})
<ide>
<ide> def test_replace_chain_with_empty_chain(self, manager):
<ide> r = chain(identity.s(1), replace_with_empty_chain.s()).delay()
<ide> def test_replace_chain_with_empty_chain(self, manager):
<ide> match="Cannot replace with an empty chain"):
<ide> r.get(timeout=TIMEOUT)
<ide>
<add> def test_chain_children_with_callbacks(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> callback = redis_count.si(redis_key=redis_key)
<add>
<add> child_task_count = 42
<add> child_sig = identity.si(1337)
<add> child_sig.link(callback)
<add> chain_sig = chain(child_sig for _ in range(child_task_count))
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = chain_sig()
<add> assert res_obj.get(timeout=TIMEOUT) == 1337
<add> with subtests.test(msg="Chain child task callbacks are called"):
<add> await_redis_count(child_task_count, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_chain_children_with_errbacks(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<add>
<add> child_task_count = 42
<add> child_sig = fail.si()
<add> child_sig.link_error(errback)
<add> chain_sig = chain(child_sig for _ in range(child_task_count))
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain fails due to a child task dying"):
<add> res_obj = chain_sig()
<add> with pytest.raises(ExpectedException):
<add> res_obj.get(timeout=TIMEOUT)
<add> with subtests.test(msg="Chain child task errbacks are called"):
<add> # Only the first child task gets a change to run and fail
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_chain_with_callback_child_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> callback = redis_count.si(redis_key=redis_key)
<add>
<add> chain_sig = chain(add_replaced.si(42, 1337), identity.s())
<add> chain_sig.link(callback)
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = chain_sig()
<add> assert res_obj.get(timeout=TIMEOUT) == 42 + 1337
<add> with subtests.test(msg="Callback is called after chain finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_chain_with_errback_child_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<add>
<add> chain_sig = chain(add_replaced.si(42, 1337), fail.s())
<add> chain_sig.link_error(errback)
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = chain_sig()
<add> with pytest.raises(ExpectedException):
<add> res_obj.get(timeout=TIMEOUT)
<add> with subtests.test(msg="Errback is called after chain finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_chain_child_with_callback_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> callback = redis_count.si(redis_key=redis_key)
<add>
<add> child_sig = add_replaced.si(42, 1337)
<add> child_sig.link(callback)
<add> chain_sig = chain(child_sig, identity.s())
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = chain_sig()
<add> assert res_obj.get(timeout=TIMEOUT) == 42 + 1337
<add> with subtests.test(msg="Callback is called after chain finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_chain_child_with_errback_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<add>
<add> child_sig = fail_replaced.si()
<add> child_sig.link_error(errback)
<add> chain_sig = chain(child_sig, identity.si(42))
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = chain_sig()
<add> with pytest.raises(ExpectedException):
<add> res_obj.get(timeout=TIMEOUT)
<add> with subtests.test(msg="Errback is called after chain finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_task_replaced_with_chain(self):
<add> orig_sig = replace_with_chain.si(42)
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == 42
<add>
<add> def test_chain_child_replaced_with_chain_first(self):
<add> orig_sig = chain(replace_with_chain.si(42), identity.s())
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == 42
<add>
<add> def test_chain_child_replaced_with_chain_middle(self):
<add> orig_sig = chain(
<add> identity.s(42), replace_with_chain.s(), identity.s()
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == 42
<add>
<add> def test_chain_child_replaced_with_chain_last(self):
<add> orig_sig = chain(identity.s(42), replace_with_chain.s())
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == 42
<add>
<ide>
<ide> class test_result_set:
<ide>
<ide> def test_callback_called_by_group(self, manager, subtests):
<ide> redis_connection = get_redis_connection()
<ide>
<ide> callback_msg = str(uuid.uuid4()).encode()
<del> callback = redis_echo.si(callback_msg)
<add> redis_key = str(uuid.uuid4())
<add> callback = redis_echo.si(callback_msg, redis_key=redis_key)
<ide>
<ide> group_sig = group(identity.si(42), identity.si(1337))
<ide> group_sig.link(callback)
<del> redis_connection.delete("redis-echo")
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Group result is returned"):
<ide> res = group_sig.delay()
<ide> assert res.get(timeout=TIMEOUT) == [42, 1337]
<ide> with subtests.test(msg="Callback is called after group is completed"):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Callback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == callback_msg
<add> await_redis_echo({callback_msg, }, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<ide>
<ide> def test_errback_called_by_group_fail_first(self, manager, subtests):
<ide> if not manager.app.conf.result_backend.startswith("redis"):
<ide> raise pytest.skip("Requires redis result backend.")
<ide> redis_connection = get_redis_connection()
<ide>
<ide> errback_msg = str(uuid.uuid4()).encode()
<del> errback = redis_echo.si(errback_msg)
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_echo.si(errback_msg, redis_key=redis_key)
<ide>
<ide> group_sig = group(fail.s(), identity.si(42))
<ide> group_sig.link_error(errback)
<del> redis_connection.delete("redis-echo")
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from group"):
<ide> res = group_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(msg="Errback is called after group task fails"):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<ide>
<ide> def test_errback_called_by_group_fail_last(self, manager, subtests):
<ide> if not manager.app.conf.result_backend.startswith("redis"):
<ide> raise pytest.skip("Requires redis result backend.")
<ide> redis_connection = get_redis_connection()
<ide>
<ide> errback_msg = str(uuid.uuid4()).encode()
<del> errback = redis_echo.si(errback_msg)
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_echo.si(errback_msg, redis_key=redis_key)
<ide>
<ide> group_sig = group(identity.si(42), fail.s())
<ide> group_sig.link_error(errback)
<del> redis_connection.delete("redis-echo")
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from group"):
<ide> res = group_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(msg="Errback is called after group task fails"):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<ide>
<ide> def test_errback_called_by_group_fail_multiple(self, manager, subtests):
<ide> if not manager.app.conf.result_backend.startswith("redis"):
<ide> raise pytest.skip("Requires redis result backend.")
<ide> redis_connection = get_redis_connection()
<ide>
<ide> expected_errback_count = 42
<del> errback = redis_count.si()
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<ide>
<ide> # Include a mix of passing and failing tasks
<ide> group_sig = group(
<ide> *(identity.si(42) for _ in range(24)), # arbitrary task count
<ide> *(fail.s() for _ in range(expected_errback_count)),
<ide> )
<ide> group_sig.link_error(errback)
<del> redis_connection.delete("redis-count")
<add>
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from group"):
<ide> res = group_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(msg="Errback is called after group task fails"):
<del> check_interval = 0.1
<del> check_max = int(TIMEOUT * check_interval)
<del> for i in range(check_max + 1):
<del> maybe_count = redis_connection.get("redis-count")
<del> # It's either `None` or a base-10 integer
<del> count = int(maybe_count or b"0")
<del> if count == expected_errback_count:
<del> # escape and pass
<del> break
<del> elif i < check_max:
<del> # try again later
<del> sleep(check_interval)
<del> else:
<del> # fail
<del> assert count == expected_errback_count
<del> else:
<del> raise TimeoutError("Errbacks were not called in time")
<add> await_redis_count(expected_errback_count, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_group_children_with_callbacks(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> callback = redis_count.si(redis_key=redis_key)
<add>
<add> child_task_count = 42
<add> child_sig = identity.si(1337)
<add> child_sig.link(callback)
<add> group_sig = group(child_sig for _ in range(child_task_count))
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = group_sig()
<add> assert res_obj.get(timeout=TIMEOUT) == [1337] * child_task_count
<add> with subtests.test(msg="Chain child task callbacks are called"):
<add> await_redis_count(child_task_count, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_group_children_with_errbacks(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<add>
<add> child_task_count = 42
<add> child_sig = fail.si()
<add> child_sig.link_error(errback)
<add> group_sig = group(child_sig for _ in range(child_task_count))
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain fails due to a child task dying"):
<add> res_obj = group_sig()
<add> with pytest.raises(ExpectedException):
<add> res_obj.get(timeout=TIMEOUT)
<add> with subtests.test(msg="Chain child task errbacks are called"):
<add> await_redis_count(child_task_count, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_group_with_callback_child_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> callback = redis_count.si(redis_key=redis_key)
<add>
<add> group_sig = group(add_replaced.si(42, 1337), identity.si(31337))
<add> group_sig.link(callback)
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = group_sig()
<add> assert res_obj.get(timeout=TIMEOUT) == [42 + 1337, 31337]
<add> with subtests.test(msg="Callback is called after group finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_group_with_errback_child_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<add>
<add> group_sig = group(add_replaced.si(42, 1337), fail.s())
<add> group_sig.link_error(errback)
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = group_sig()
<add> with pytest.raises(ExpectedException):
<add> res_obj.get(timeout=TIMEOUT)
<add> with subtests.test(msg="Errback is called after group finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_group_child_with_callback_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> callback = redis_count.si(redis_key=redis_key)
<add>
<add> child_sig = add_replaced.si(42, 1337)
<add> child_sig.link(callback)
<add> group_sig = group(child_sig, identity.si(31337))
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = group_sig()
<add> assert res_obj.get(timeout=TIMEOUT) == [42 + 1337, 31337]
<add> with subtests.test(msg="Callback is called after group finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_group_child_with_errback_replaced(self, manager, subtests):
<add> if not manager.app.conf.result_backend.startswith("redis"):
<add> raise pytest.skip("Requires redis result backend.")
<add> redis_connection = get_redis_connection()
<add>
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<add>
<add> child_sig = fail_replaced.si()
<add> child_sig.link_error(errback)
<add> group_sig = group(child_sig, identity.si(42))
<add>
<add> redis_connection.delete(redis_key)
<add> with subtests.test(msg="Chain executes as expected"):
<add> res_obj = group_sig()
<add> with pytest.raises(ExpectedException):
<add> res_obj.get(timeout=TIMEOUT)
<add> with subtests.test(msg="Errback is called after group finishes"):
<add> await_redis_count(1, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_group_child_replaced_with_chain_first(self):
<add> orig_sig = group(replace_with_chain.si(42), identity.s(1337))
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42, 1337]
<add>
<add> def test_group_child_replaced_with_chain_middle(self):
<add> orig_sig = group(
<add> identity.s(42), replace_with_chain.s(1337), identity.s(31337)
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42, 1337, 31337]
<add>
<add> def test_group_child_replaced_with_chain_last(self):
<add> orig_sig = group(identity.s(42), replace_with_chain.s(1337))
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42, 1337]
<ide>
<ide>
<ide> def assert_ids(r, expected_value, expected_root_id, expected_parent_id):
<ide> def test_errback_called_by_chord_from_simple(self, manager, subtests):
<ide> redis_connection = get_redis_connection()
<ide>
<ide> errback_msg = str(uuid.uuid4()).encode()
<del> errback = redis_echo.si(errback_msg)
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_echo.si(errback_msg, redis_key=redis_key)
<ide> child_sig = fail.s()
<ide>
<ide> chord_sig = chord((child_sig, ), identity.s())
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from simple header task"):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(
<ide> msg="Errback is called after simple header task fails"
<ide> ):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<ide>
<ide> chord_sig = chord((identity.si(42), ), child_sig)
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from simple body task"):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(
<ide> msg="Errback is called after simple body task fails"
<ide> ):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<ide>
<ide> def test_error_propagates_to_chord_from_chain(self, manager, subtests):
<ide> try:
<ide> def test_errback_called_by_chord_from_chain(self, manager, subtests):
<ide> redis_connection = get_redis_connection()
<ide>
<ide> errback_msg = str(uuid.uuid4()).encode()
<del> errback = redis_echo.si(errback_msg)
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_echo.si(errback_msg, redis_key=redis_key)
<ide> child_sig = chain(identity.si(42), fail.s(), identity.si(42))
<ide>
<ide> chord_sig = chord((child_sig, ), identity.s())
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(
<ide> msg="Error propagates from header chain which fails before the end"
<ide> ):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(
<ide> msg="Errback is called after header chain which fails before the end"
<ide> ):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<ide>
<ide> chord_sig = chord((identity.si(42), ), child_sig)
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(
<ide> msg="Error propagates from body chain which fails before the end"
<ide> ):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(
<ide> msg="Errback is called after body chain which fails before the end"
<ide> ):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<ide>
<ide> def test_error_propagates_to_chord_from_chain_tail(self, manager, subtests):
<ide> try:
<ide> def test_errback_called_by_chord_from_chain_tail(self, manager, subtests):
<ide> redis_connection = get_redis_connection()
<ide>
<ide> errback_msg = str(uuid.uuid4()).encode()
<del> errback = redis_echo.si(errback_msg)
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_echo.si(errback_msg, redis_key=redis_key)
<ide> child_sig = chain(identity.si(42), fail.s())
<ide>
<ide> chord_sig = chord((child_sig, ), identity.s())
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(
<ide> msg="Error propagates from header chain which fails at the end"
<ide> ):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(
<ide> msg="Errback is called after header chain which fails at the end"
<ide> ):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<ide>
<ide> chord_sig = chord((identity.si(42), ), child_sig)
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(
<ide> msg="Error propagates from body chain which fails at the end"
<ide> ):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(
<ide> msg="Errback is called after body chain which fails at the end"
<ide> ):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<ide>
<ide> def test_error_propagates_to_chord_from_group(self, manager, subtests):
<ide> try:
<ide> def test_errback_called_by_chord_from_group(self, manager, subtests):
<ide> redis_connection = get_redis_connection()
<ide>
<ide> errback_msg = str(uuid.uuid4()).encode()
<del> errback = redis_echo.si(errback_msg)
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_echo.si(errback_msg, redis_key=redis_key)
<ide> child_sig = group(identity.si(42), fail.s())
<ide>
<ide> chord_sig = chord((child_sig, ), identity.s())
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from header group"):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(msg="Errback is called after header group fails"):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<ide>
<ide> chord_sig = chord((identity.si(42), ), child_sig)
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from body group"):
<del> redis_connection.delete("redis-echo")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(msg="Errback is called after body group fails"):
<del> maybe_key_msg = redis_connection.blpop("redis-echo", TIMEOUT)
<del> if maybe_key_msg is None:
<del> raise TimeoutError("Errback was not called in time")
<del> _, msg = maybe_key_msg
<del> assert msg == errback_msg
<add> await_redis_echo({errback_msg, }, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<ide>
<ide> def test_errback_called_by_chord_from_group_fail_multiple(
<ide> self, manager, subtests
<ide> def test_errback_called_by_chord_from_group_fail_multiple(
<ide> redis_connection = get_redis_connection()
<ide>
<ide> fail_task_count = 42
<del> errback = redis_count.si()
<add> redis_key = str(uuid.uuid4())
<add> errback = redis_count.si(redis_key=redis_key)
<ide> # Include a mix of passing and failing tasks
<ide> child_sig = group(
<ide> *(identity.si(42) for _ in range(24)), # arbitrary task count
<ide> def test_errback_called_by_chord_from_group_fail_multiple(
<ide>
<ide> chord_sig = chord((child_sig, ), identity.s())
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from header group"):
<del> redis_connection.delete("redis-count")
<add> redis_connection.delete(redis_key)
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(msg="Errback is called after header group fails"):
<ide> # NOTE: Here we only expect the errback to be called once since it
<ide> # is attached to the chord body which is a single task!
<del> expected_errback_count = 1
<del> check_interval = 0.1
<del> check_max = int(TIMEOUT * check_interval)
<del> for i in range(check_max + 1):
<del> maybe_count = redis_connection.get("redis-count")
<del> # It's either `None` or a base-10 integer
<del> count = int(maybe_count or b"0")
<del> if count == expected_errback_count:
<del> # escape and pass
<del> break
<del> elif i < check_max:
<del> # try again later
<del> sleep(check_interval)
<del> else:
<del> # fail
<del> assert count == expected_errback_count
<del> else:
<del> raise TimeoutError("Errbacks were not called in time")
<add> await_redis_count(1, redis_key=redis_key)
<ide>
<ide> chord_sig = chord((identity.si(42), ), child_sig)
<ide> chord_sig.link_error(errback)
<add> redis_connection.delete(redis_key)
<ide> with subtests.test(msg="Error propagates from body group"):
<del> redis_connection.delete("redis-count")
<ide> res = chord_sig.delay()
<ide> with pytest.raises(ExpectedException):
<ide> res.get(timeout=TIMEOUT)
<ide> with subtests.test(msg="Errback is called after body group fails"):
<ide> # NOTE: Here we expect the errback to be called once per failing
<ide> # task in the chord body since it is a group
<del> expected_errback_count = fail_task_count
<del> check_interval = 0.1
<del> check_max = int(TIMEOUT * check_interval)
<del> for i in range(check_max + 1):
<del> maybe_count = redis_connection.get("redis-count")
<del> # It's either `None` or a base-10 integer
<del> count = int(maybe_count or b"0")
<del> if count == expected_errback_count:
<del> # escape and pass
<del> break
<del> elif i < check_max:
<del> # try again later
<del> sleep(check_interval)
<del> else:
<del> # fail
<del> assert count == expected_errback_count
<del> else:
<del> raise TimeoutError("Errbacks were not called in time")
<add> await_redis_count(fail_task_count, redis_key=redis_key)
<add> redis_connection.delete(redis_key)
<add>
<add> def test_chord_header_task_replaced_with_chain(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> replace_with_chain.si(42),
<add> identity.s(),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42]
<add>
<add> def test_chord_header_child_replaced_with_chain_first(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> (replace_with_chain.si(42), identity.s(1337), ),
<add> identity.s(),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42, 1337]
<add>
<add> def test_chord_header_child_replaced_with_chain_middle(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> (identity.s(42), replace_with_chain.s(1337), identity.s(31337), ),
<add> identity.s(),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42, 1337, 31337]
<add>
<add> def test_chord_header_child_replaced_with_chain_last(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> (identity.s(42), replace_with_chain.s(1337), ),
<add> identity.s(),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42, 1337]
<add>
<add> def test_chord_body_task_replaced_with_chain(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> identity.s(42),
<add> replace_with_chain.s(),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42]
<add>
<add> def test_chord_body_chain_child_replaced_with_chain_first(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> identity.s(42),
<add> chain(replace_with_chain.s(), identity.s(), ),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42]
<add>
<add> def test_chord_body_chain_child_replaced_with_chain_middle(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> identity.s(42),
<add> chain(identity.s(), replace_with_chain.s(), identity.s(), ),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42]
<add>
<add> def test_chord_body_chain_child_replaced_with_chain_last(self, manager):
<add> try:
<add> manager.app.backend.ensure_chords_allowed()
<add> except NotImplementedError as e:
<add> raise pytest.skip(e.args[0])
<add>
<add> orig_sig = chord(
<add> identity.s(42),
<add> chain(identity.s(), replace_with_chain.s(), ),
<add> )
<add> res_obj = orig_sig.delay()
<add> assert res_obj.get(timeout=TIMEOUT) == [42]
<ide>
<ide>
<ide> class test_signature_serialization:
<ide><path>t/unit/tasks/test_canvas.py
<ide> def test_apply_contains_chords_containing_empty_chain(self):
<ide> # This is an invalid setup because we can't complete a chord header if
<ide> # there are no actual tasks which will run in it. However, the current
<ide> # behaviour of an `IndexError` isn't particularly helpful to a user.
<del> res_obj = group_sig.apply_async()
<add> group_sig.apply_async()
<ide>
<ide> def test_apply_contains_chords_containing_chain_with_empty_tail(self):
<ide> ggchild_count = 42
<ide><path>t/unit/tasks/test_tasks.py
<ide> import socket
<ide> import tempfile
<ide> from datetime import datetime, timedelta
<del>from unittest.mock import ANY, MagicMock, Mock, patch
<add>from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel
<ide>
<ide> import pytest
<ide> from case import ContextMock
<ide> def test_send_event(self):
<ide> retry=True, retry_policy=self.app.conf.task_publish_retry_policy)
<ide>
<ide> def test_replace(self):
<del> sig1 = Mock(name='sig1')
<add> sig1 = MagicMock(name='sig1')
<ide> sig1.options = {}
<add> self.mytask.request.id = sentinel.request_id
<ide> with pytest.raises(Ignore):
<ide> self.mytask.replace(sig1)
<add> sig1.freeze.assert_called_once_with(self.mytask.request.id)
<ide>
<ide> def test_replace_with_chord(self):
<ide> sig1 = Mock(name='sig1')
<ide> sig1.options = {'chord': None}
<ide> with pytest.raises(ImproperlyConfigured):
<ide> self.mytask.replace(sig1)
<ide>
<del> @pytest.mark.usefixtures('depends_on_current_app')
<ide> def test_replace_callback(self):
<ide> c = group([self.mytask.s()], app=self.app)
<ide> c.freeze = Mock(name='freeze')
<ide> c.delay = Mock(name='delay')
<ide> self.mytask.request.id = 'id'
<ide> self.mytask.request.group = 'group'
<ide> self.mytask.request.root_id = 'root_id'
<del> self.mytask.request.callbacks = 'callbacks'
<del> self.mytask.request.errbacks = 'errbacks'
<del>
<del> class JsonMagicMock(MagicMock):
<del> parent = None
<del>
<del> def __json__(self):
<del> return 'whatever'
<del>
<del> def reprcall(self, *args, **kwargs):
<del> return 'whatever2'
<del>
<del> mocked_signature = JsonMagicMock(name='s')
<del> accumulate_mock = JsonMagicMock(name='accumulate', s=mocked_signature)
<del> self.mytask.app.tasks['celery.accumulate'] = accumulate_mock
<del>
<del> try:
<del> self.mytask.replace(c)
<del> except Ignore:
<del> mocked_signature.return_value.set.assert_called_with(
<del> link='callbacks',
<del> link_error='errbacks',
<del> )
<add> self.mytask.request.callbacks = callbacks = 'callbacks'
<add> self.mytask.request.errbacks = errbacks = 'errbacks'
<add>
<add> # Replacement groups get uplifted to chords so that we can accumulate
<add> # the results and link call/errbacks - patch the appropriate `chord`
<add> # methods so we can validate this behaviour
<add> with patch(
<add> "celery.canvas.chord.link"
<add> ) as mock_chord_link, patch(
<add> "celery.canvas.chord.link_error"
<add> ) as mock_chord_link_error:
<add> with pytest.raises(Ignore):
<add> self.mytask.replace(c)
<add> # Confirm that the call/errbacks on the original signature are linked
<add> # to the replacement signature as expected
<add> mock_chord_link.assert_called_once_with(callbacks)
<add> mock_chord_link_error.assert_called_once_with(errbacks)
<ide>
<ide> def test_replace_group(self):
<ide> c = group([self.mytask.s()], app=self.app) | 6 |
Javascript | Javascript | remove test repetition | 88d7904c0ba66bd2ced87f674d3f6ee098db970c | <ide><path>test/parallel/test-buffer.js
<ide> assert.equal(writeTest.toString(), 'nodejs');
<ide>
<ide> var asciiString = 'hello world';
<ide> var offset = 100;
<del>for (var j = 0; j < 500; j++) {
<ide>
<del> for (var i = 0; i < asciiString.length; i++) {
<del> b[i] = asciiString.charCodeAt(i);
<del> }
<del> var asciiSlice = b.toString('ascii', 0, asciiString.length);
<del> assert.equal(asciiString, asciiSlice);
<del>
<del> var written = b.write(asciiString, offset, 'ascii');
<del> assert.equal(asciiString.length, written);
<del> var asciiSlice = b.toString('ascii', offset, offset + asciiString.length);
<del> assert.equal(asciiString, asciiSlice);
<del>
<del> var sliceA = b.slice(offset, offset + asciiString.length);
<del> var sliceB = b.slice(offset, offset + asciiString.length);
<del> for (var i = 0; i < asciiString.length; i++) {
<del> assert.equal(sliceA[i], sliceB[i]);
<del> }
<del>
<del> // TODO utf8 slice tests
<add>for (var i = 0; i < asciiString.length; i++) {
<add> b[i] = asciiString.charCodeAt(i);
<add>}
<add>var asciiSlice = b.toString('ascii', 0, asciiString.length);
<add>assert.equal(asciiString, asciiSlice);
<add>
<add>var written = b.write(asciiString, offset, 'ascii');
<add>assert.equal(asciiString.length, written);
<add>var asciiSlice = b.toString('ascii', offset, offset + asciiString.length);
<add>assert.equal(asciiString, asciiSlice);
<add>
<add>var sliceA = b.slice(offset, offset + asciiString.length);
<add>var sliceB = b.slice(offset, offset + asciiString.length);
<add>for (var i = 0; i < asciiString.length; i++) {
<add> assert.equal(sliceA[i], sliceB[i]);
<ide> }
<ide>
<add>// TODO utf8 slice tests
<ide>
<del>for (var j = 0; j < 100; j++) {
<del> var slice = b.slice(100, 150);
<del> assert.equal(50, slice.length);
<del> for (var i = 0; i < 50; i++) {
<del> assert.equal(b[100 + i], slice[i]);
<del> }
<add>
<add>var slice = b.slice(100, 150);
<add>assert.equal(50, slice.length);
<add>for (var i = 0; i < 50; i++) {
<add> assert.equal(b[100 + i], slice[i]);
<ide> }
<ide>
<ide> | 1 |
Go | Go | fix image pipe with builder copy | 4390a3182f9ea4fc1e215917c2c6494d6fc453ac | <ide><path>builder.go
<ide> func (builder *Builder) Build(dockerfile io.Reader, stdout io.Writer) (*Image, e
<ide> return nil, err
<ide> }
<ide> fmt.Fprintf(stdout, "===> %s\n", base.ShortId())
<add>
<add> image = base
<add>
<ide> break
<ide> default:
<ide> fmt.Fprintf(stdout, "Skipping unknown op %s\n", tmp[0]) | 1 |
Text | Text | add redirect.action_dispatch to action dispatch | dc6ddbd2b0a95740e32586397acd6b63bbd7ee0f | <ide><path>guides/source/active_support_instrumentation.md
<ide> INFO. Additional keys may be added by the caller.
<ide> | ------------- | ---------------------- |
<ide> | `:middleware` | Name of the middleware |
<ide>
<add>#### redirect.action_dispatch
<add>
<add>| Key | Value |
<add>| ----------- | ------------------- |
<add>| `:status` | HTTP response code |
<add>| `:location` | URL to redirect to |
<add>
<ide> ### Action View
<ide>
<ide> #### render_template.action_view | 1 |
Java | Java | add clientrequest attributes | 74b4c028819c67b90d4d06d352d3083d91d8a3f8 | <ide><path>spring-test/src/main/java/org/springframework/test/web/reactive/server/DefaultWebTestClient.java
<ide> import org.springframework.web.reactive.function.client.WebClient;
<ide> import org.springframework.web.util.UriBuilder;
<ide>
<del>import static java.nio.charset.StandardCharsets.*;
<del>import static org.springframework.test.util.AssertionErrors.*;
<del>import static org.springframework.web.reactive.function.BodyExtractors.*;
<add>import static java.nio.charset.StandardCharsets.UTF_8;
<add>import static org.springframework.test.util.AssertionErrors.assertEquals;
<add>import static org.springframework.test.util.AssertionErrors.assertTrue;
<add>import static org.springframework.web.reactive.function.BodyExtractors.toFlux;
<add>import static org.springframework.web.reactive.function.BodyExtractors.toMono;
<ide>
<ide> /**
<ide> * Default implementation of {@link WebTestClient}.
<ide> public RequestBodySpec headers(Consumer<HttpHeaders> headersConsumer) {
<ide> return this;
<ide> }
<ide>
<add> @Override
<add> public RequestBodySpec attribute(String name, Object value) {
<add> this.bodySpec.attribute(name, value);
<add> return this;
<add> }
<add>
<add> @Override
<add> public RequestBodySpec attributes(
<add> Consumer<Map<String, Object>> attributesConsumer) {
<add> this.bodySpec.attributes(attributesConsumer);
<add> return this;
<add> }
<add>
<ide> @Override
<ide> public RequestBodySpec accept(MediaType... acceptableMediaTypes) {
<ide> this.bodySpec.accept(acceptableMediaTypes);
<ide><path>spring-test/src/main/java/org/springframework/test/web/reactive/server/WebTestClient.java
<ide> interface Builder {
<ide> */
<ide> S headers(Consumer<HttpHeaders> headersConsumer);
<ide>
<add> /**
<add> * Set the attribute with the given name to the given value.
<add> * @param name the name of the attribute to add
<add> * @param value the value of the attribute to add
<add> * @return this builder
<add> */
<add> S attribute(String name, Object value);
<add>
<add> /**
<add> * Manipulate the request attributes with the given consumer. The attributes provided to
<add> * the consumer are "live", so that the consumer can be used to inspect attributes,
<add> * remove attributes, or use any of the other map-provided methods.
<add> * @param attributesConsumer a function that consumes the attributes
<add> * @return this builder
<add> */
<add> S attributes(Consumer<Map<String, Object>> attributesConsumer);
<add>
<ide> /**
<ide> * Perform the exchange without a request body.
<ide> * @return spec for decoding the response
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/client/ClientRequest.java
<ide> package org.springframework.web.reactive.function.client;
<ide>
<ide> import java.net.URI;
<add>import java.util.Map;
<ide> import java.util.function.Consumer;
<ide>
<ide> import org.reactivestreams.Publisher;
<ide> public interface ClientRequest {
<ide> */
<ide> BodyInserter<?, ? super ClientHttpRequest> body();
<ide>
<add> /**
<add> * Return the attributes of this request.
<add> */
<add> Map<String, Object> attributes();
<add>
<ide> /**
<ide> * Writes this request to the given {@link ClientHttpRequest}.
<ide> *
<ide> static Builder from(ClientRequest other) {
<ide> return new DefaultClientRequestBuilder(other.method(), other.url())
<ide> .headers(headers -> headers.addAll(other.headers()))
<ide> .cookies(cookies -> cookies.addAll(other.cookies()))
<add> .attributes(attributes -> attributes.putAll(other.attributes()))
<ide> .body(other.body());
<ide> }
<ide>
<ide> interface Builder {
<ide> */
<ide> <S, P extends Publisher<S>> Builder body(P publisher, Class<S> elementClass);
<ide>
<add> /**
<add> * Set the attribute with the given name to the given value.
<add> * @param name the name of the attribute to add
<add> * @param value the value of the attribute to add
<add> * @return this builder
<add> */
<add> Builder attribute(String name, Object value);
<add>
<add> /**
<add> * Manipulate the request attributes with the given consumer. The attributes provided to
<add> * the consumer are "live", so that the consumer can be used to inspect attributes,
<add> * remove attributes, or use any of the other map-provided methods.
<add> * @param attributesConsumer a function that consumes the attributes
<add> * @return this builder
<add> */
<add> Builder attributes(Consumer<Map<String, Object>> attributesConsumer);
<add>
<ide> /**
<ide> * Builds the request entity with no body.
<ide> * @return the request entity
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/client/DefaultClientRequestBuilder.java
<ide>
<ide> import java.net.URI;
<ide> import java.util.Collections;
<add>import java.util.LinkedHashMap;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.Optional;
<ide> class DefaultClientRequestBuilder implements ClientRequest.Builder {
<ide>
<ide> private final MultiValueMap<String, String> cookies = new LinkedMultiValueMap<>();
<ide>
<add> private final Map<String, Object> attributes = new LinkedHashMap<>();
<add>
<ide> private BodyInserter<?, ? super ClientHttpRequest> inserter = BodyInserters.empty();
<ide>
<ide>
<ide> public <S, P extends Publisher<S>> ClientRequest.Builder body(P publisher, Class
<ide> return this;
<ide> }
<ide>
<add> @Override
<add> public ClientRequest.Builder attribute(String name, Object value) {
<add> this.attributes.put(name, value);
<add> return this;
<add> }
<add>
<add> @Override
<add> public ClientRequest.Builder attributes(Consumer<Map<String, Object>> attributesConsumer) {
<add> Assert.notNull(attributesConsumer, "'attributesConsumer' must not be null");
<add> attributesConsumer.accept(this.attributes);
<add> return this;
<add> }
<add>
<ide> @Override
<ide> public ClientRequest.Builder body(BodyInserter<?, ? super ClientHttpRequest> inserter) {
<ide> this.inserter = inserter;
<ide> public ClientRequest.Builder body(BodyInserter<?, ? super ClientHttpRequest> ins
<ide> @Override
<ide> public ClientRequest build() {
<ide> return new BodyInserterRequest(this.method, this.url, this.headers, this.cookies,
<del> this.inserter);
<add> this.inserter, this.attributes);
<ide> }
<ide>
<ide>
<ide> private static class BodyInserterRequest implements ClientRequest {
<ide>
<ide> private final BodyInserter<?, ? super ClientHttpRequest> inserter;
<ide>
<add> private final Map<String, Object> attributes;
<add>
<ide> public BodyInserterRequest(HttpMethod method, URI url, HttpHeaders headers,
<del> MultiValueMap<String, String> cookies, BodyInserter<?, ? super ClientHttpRequest> inserter) {
<add> MultiValueMap<String, String> cookies,
<add> BodyInserter<?, ? super ClientHttpRequest> inserter,
<add> Map<String, Object> attributes) {
<ide>
<ide> this.method = method;
<ide> this.url = url;
<ide> this.headers = HttpHeaders.readOnlyHttpHeaders(headers);
<ide> this.cookies = CollectionUtils.unmodifiableMultiValueMap(cookies);
<ide> this.inserter = inserter;
<add> this.attributes = Collections.unmodifiableMap(attributes);
<ide> }
<ide>
<ide> @Override
<ide> public MultiValueMap<String, String> cookies() {
<ide> return this.inserter;
<ide> }
<ide>
<add> @Override
<add> public Map<String, Object> attributes() {
<add> return this.attributes;
<add> }
<add>
<ide> @Override
<ide> public Mono<Void> writeTo(ClientHttpRequest request, ExchangeStrategies strategies) {
<ide> HttpHeaders requestHeaders = request.getHeaders();
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/client/DefaultWebClient.java
<ide> import java.time.ZonedDateTime;
<ide> import java.time.format.DateTimeFormatter;
<ide> import java.util.Arrays;
<add>import java.util.LinkedHashMap;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.function.Consumer;
<ide> private class DefaultRequestBodySpec implements RequestBodySpec {
<ide> @Nullable
<ide> private BodyInserter<?, ? super ClientHttpRequest> inserter;
<ide>
<add> @Nullable
<add> private Map<String, Object> attributes;
<add>
<ide> DefaultRequestBodySpec(HttpMethod httpMethod, URI uri) {
<ide> this.httpMethod = httpMethod;
<ide> this.uri = uri;
<ide> private MultiValueMap<String, String> getCookies() {
<ide> return this.cookies;
<ide> }
<ide>
<add> private Map<String, Object> getAttributes() {
<add> if (this.attributes == null) {
<add> this.attributes = new LinkedHashMap<>(4);
<add> }
<add> return this.attributes;
<add> }
<add>
<ide> @Override
<ide> public DefaultRequestBodySpec header(String headerName, String... headerValues) {
<ide> for (String headerValue : headerValues) {
<ide> public DefaultRequestBodySpec headers(Consumer<HttpHeaders> headersConsumer) {
<ide> return this;
<ide> }
<ide>
<add> @Override
<add> public RequestBodySpec attribute(String name, Object value) {
<add> getAttributes().put(name, value);
<add> return this;
<add> }
<add>
<add> @Override
<add> public RequestBodySpec attributes(Consumer<Map<String, Object>> attributesConsumer) {
<add> Assert.notNull(attributesConsumer, "'attributesConsumer' must not be null");
<add> attributesConsumer.accept(getAttributes());
<add> return this;
<add> }
<add>
<ide> @Override
<ide> public DefaultRequestBodySpec accept(MediaType... acceptableMediaTypes) {
<ide> getHeaders().setAccept(Arrays.asList(acceptableMediaTypes));
<ide> public Mono<ClientResponse> exchange() {
<ide> private ClientRequest.Builder initRequestBuilder() {
<ide> return ClientRequest.method(this.httpMethod, this.uri)
<ide> .headers(headers -> headers.addAll(initHeaders()))
<del> .cookies(cookies -> cookies.addAll(initCookies()));
<add> .cookies(cookies -> cookies.addAll(initCookies()))
<add> .attributes(attributes -> attributes.putAll(getAttributes()));
<ide> }
<ide>
<ide> private HttpHeaders initHeaders() {
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/client/ExchangeFilterFunctions.java
<ide>
<ide> import java.nio.charset.StandardCharsets;
<ide> import java.util.Base64;
<add>import java.util.Map;
<add>import java.util.function.Function;
<ide>
<ide> import reactor.core.publisher.Mono;
<ide>
<ide> public abstract class ExchangeFilterFunctions {
<ide>
<ide> /**
<del> * Return a filter that adds an Authorization header for HTTP Basic Authentication.
<add> * Name of the {@link ClientRequest} attribute that contains the username, as used by
<add> * {@link #basicAuthentication()}
<add> */
<add> public static final String USERNAME_ATTRIBUTE = ExchangeFilterFunctions.class.getName() + ".username";
<add>
<add> /**
<add> * Name of the {@link ClientRequest} attribute that contains the password, as used by
<add> * {@link #basicAuthentication()}
<add> */
<add> public static final String PASSWORD_ATTRIBUTE = ExchangeFilterFunctions.class.getName() + ".password";
<add>
<add>
<add> /**
<add> * Return a filter that adds an Authorization header for HTTP Basic Authentication, based on
<add> * the given username and password.
<ide> * @param username the username to use
<ide> * @param password the password to use
<ide> * @return the {@link ExchangeFilterFunction} that adds the Authorization header
<ide> public static ExchangeFilterFunction basicAuthentication(String username, String
<ide> Assert.notNull(username, "'username' must not be null");
<ide> Assert.notNull(password, "'password' must not be null");
<ide>
<add> return basicAuthentication(r -> username, r -> password);
<add> }
<add>
<add> /**
<add> * Return a filter that adds an Authorization header for HTTP Basic Authentication, based on
<add> * the username and password provided in the
<add> * {@linkplain ClientRequest#attributes() request attributes}.
<add> * @return the {@link ExchangeFilterFunction} that adds the Authorization header
<add> * @see #USERNAME_ATTRIBUTE
<add> * @see #PASSWORD_ATTRIBUTE
<add> */
<add> public static ExchangeFilterFunction basicAuthentication() {
<add> return basicAuthentication(
<add> request -> getRequiredAttribute(request, USERNAME_ATTRIBUTE),
<add> request -> getRequiredAttribute(request, PASSWORD_ATTRIBUTE)
<add> );
<add> }
<add>
<add> private static String getRequiredAttribute(ClientRequest request, String key) {
<add> Map<String, Object> attributes = request.attributes();
<add> if (attributes.containsKey(key)) {
<add> return (String) attributes.get(key);
<add> } else {
<add> throw new IllegalStateException(
<add> "Could not find request attribute with key \"" + key + "\"");
<add> }
<add> }
<add>
<add> private static ExchangeFilterFunction basicAuthentication(Function<ClientRequest, String> usernameFunction,
<add> Function<ClientRequest, String> passwordFunction) {
<add>
<ide> return ExchangeFilterFunction.ofRequestProcessor(
<ide> clientRequest -> {
<del> String authorization = authorization(username, password);
<add> String authorization = authorization(usernameFunction.apply(clientRequest),
<add> passwordFunction.apply(clientRequest));
<ide> ClientRequest authorizedRequest = ClientRequest.from(clientRequest)
<ide> .headers(headers -> {
<ide> headers.set(HttpHeaders.AUTHORIZATION, authorization);
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/client/WebClient.java
<ide> interface Builder {
<ide> */
<ide> S headers(Consumer<HttpHeaders> headersConsumer);
<ide>
<add> /**
<add> * Set the attribute with the given name to the given value.
<add> * @param name the name of the attribute to add
<add> * @param value the value of the attribute to add
<add> * @return this builder
<add> */
<add> S attribute(String name, Object value);
<add>
<add> /**
<add> * Manipulate the request attributes with the given consumer. The attributes provided to
<add> * the consumer are "live", so that the consumer can be used to inspect attributes,
<add> * remove attributes, or use any of the other map-provided methods.
<add> * @param attributesConsumer a function that consumes the attributes
<add> * @return this builder
<add> */
<add> S attributes(Consumer<Map<String, Object>> attributesConsumer);
<add>
<ide> /**
<ide> * Exchange the request for a {@code ClientResponse} with full access
<ide> * to the response status and headers before extracting the body.
<ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/function/client/DefaultWebClientTests.java
<ide> public void mutateDoesCopy() throws Exception {
<ide> client2.mutate().defaultCookies(cookies -> assertEquals(2, cookies.size()));
<ide> }
<ide>
<add> @Test
<add> public void attributes() {
<add> ExchangeFilterFunction filter = (request, next) -> {
<add> assertEquals("bar", request.attributes().get("foo"));
<add> return next.exchange(request);
<add> };
<add>
<add> WebClient client = builder().filter(filter).build();
<add>
<add> client.get().uri("/path").attribute("foo", "bar").exchange();
<add> }
<add>
<ide>
<ide>
<ide> private WebClient.Builder builder() {
<ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/function/client/ExchangeFilterFunctionsTests.java
<ide>
<ide> import static org.junit.Assert.*;
<ide> import static org.mockito.Mockito.*;
<del>import static org.springframework.http.HttpMethod.*;
<add>import static org.springframework.http.HttpMethod.GET;
<ide>
<ide> /**
<ide> * @author Arjen Poutsma
<ide> public void basicAuthentication() throws Exception {
<ide> assertEquals(response, result);
<ide> }
<ide>
<add> @Test
<add> public void basicAuthenticationAttributes() throws Exception {
<add> ClientRequest request = ClientRequest.method(GET, URI.create("http://example.com"))
<add> .attribute(ExchangeFilterFunctions.USERNAME_ATTRIBUTE, "foo")
<add> .attribute(ExchangeFilterFunctions.PASSWORD_ATTRIBUTE, "bar").build();
<add> ClientResponse response = mock(ClientResponse.class);
<add>
<add> ExchangeFunction exchange = r -> {
<add> assertTrue(r.headers().containsKey(HttpHeaders.AUTHORIZATION));
<add> assertTrue(r.headers().getFirst(HttpHeaders.AUTHORIZATION).startsWith("Basic "));
<add> return Mono.just(response);
<add> };
<add>
<add> ExchangeFilterFunction auth = ExchangeFilterFunctions.basicAuthentication();
<add> assertFalse(request.headers().containsKey(HttpHeaders.AUTHORIZATION));
<add> ClientResponse result = auth.filter(request, exchange).block();
<add> assertEquals(response, result);
<add> }
<add>
<ide> } | 9 |
Javascript | Javascript | watch missing items as files not as directories | 31f2e6d1f7e12fb8cadeb098f6aa9b8859e81c3a | <ide><path>lib/node/NodeWatchFileSystem.js
<ide> class NodeWatchFileSystem {
<ide> );
<ide> });
<ide>
<del> this.watcher.watch(
<del> cachedFiles.concat(missing),
<del> cachedDirs.concat(missing),
<del> startTime
<del> );
<add> this.watcher.watch(cachedFiles.concat(missing), cachedDirs, startTime);
<ide>
<ide> if (oldWatcher) {
<ide> oldWatcher.close(); | 1 |
Python | Python | convert floatx to ascii | 92f66a279a3564420c9b7003f297c2fc12d40a7e | <ide><path>keras/backend/common.py
<ide> def set_floatx(floatx):
<ide> global _FLOATX
<ide> if floatx not in {'float32', 'float64'}:
<ide> raise Exception('Unknown floatx type: ' + str(floatx))
<add> if isinstance(floatx, unicode):
<add> floatx = floatx.encode('ascii')
<ide> _FLOATX = floatx
<ide>
<ide> | 1 |
Javascript | Javascript | add ie 11 to the text/html data uri shim | 74ee2958129fee1d26129d5b8a00c08148c7e86b | <ide><path>web/compatibility.js
<ide> if (typeof PDFJS === 'undefined') {
<ide> };
<ide> })();
<ide>
<del>// IE9/10 text/html data URI
<add>// IE9-11 text/html data URI
<ide> (function checkDataURICompatibility() {
<del> if (!('documentMode' in document) ||
<del> document.documentMode !== 9 && document.documentMode !== 10)
<add> if (!('documentMode' in document) || document.documentMode > 11)
<ide> return;
<ide> // overriding the src property
<ide> var originalSrcDescriptor = Object.getOwnPropertyDescriptor( | 1 |
Text | Text | remove incorrect entry from changelog | 60402b924b4b38196a658a023fad945421710457 | <ide><path>CHANGELOG.md
<ide> https://iojs.org/api/tls.html
<ide>
<ide> https://iojs.org/api/url.html
<ide>
<del>- Added support for `path` option in `url.format`, which encompasses `pathname`, `query`, and `search`.
<ide> - Improved escaping of certain characters.
<ide> - Improved parsing speed.
<ide> | 1 |
Ruby | Ruby | fix failing template tests | 8e49fa607f539f9cf698ef5296e7d760b7a3f2c7 | <ide><path>actionpack/test/template/template_test.rb
<ide> def my_buffer
<ide> end
<ide> end
<ide>
<del> def new_template(body = "<%= hello %>", details = {format: html})
<add> def new_template(body = "<%= hello %>", details = { format: :html })
<ide> ActionView::Template.new(body, "hello template", details.fetch(:handler) { ERBHandler }, {:virtual_path => "hello"}.merge!(details))
<ide> end
<ide>
<ide> def test_basic_template_does_html_escape
<ide> end
<ide>
<ide> def test_text_template_does_not_html_escape
<del> @template = new_template("<%= apostrophe %>", format: text)
<add> @template = new_template("<%= apostrophe %>", format: :text)
<ide> assert_equal "l'apostrophe", render
<ide> end
<ide> | 1 |
Python | Python | fix reference to thinc copy_array util | 490b38e6bbfb9d73402a14fba1d77ecac4c55daf | <ide><path>spacy/compat.py
<ide> import sys
<ide> import ujson
<ide>
<del>import thinc.neural.util
<add>from thinc.neural.util import copy_array
<ide>
<ide> try:
<ide> import cPickle as pickle
<ide> CudaStream = CudaStream
<ide> cupy = cupy
<ide> fix_text = ftfy.fix_text
<del>copy_array = thinc.neural.util.copy_array
<add>copy_array = copy_array
<ide>
<ide> is_python2 = six.PY2
<ide> is_python3 = six.PY3 | 1 |
Mixed | Javascript | move convertnpnprotocols to end-of-life | 4d00cd4ce7388893405a833531b56542f206c9e4 | <ide><path>benchmark/tls/convertprotocols.js
<ide> function main({ n }) {
<ide> var m = {};
<ide> // First call dominates results
<ide> if (n > 1) {
<del> tls.convertNPNProtocols(input, m);
<add> tls.convertALPNProtocols(input, m);
<ide> m = {};
<ide> }
<ide> bench.start();
<ide> for (var i = 0; i < n; i++)
<del> tls.convertNPNProtocols(input, m);
<add> tls.convertALPNProtocols(input, m);
<ide> bench.end(n);
<ide> }
<ide><path>doc/api/deprecations.md
<ide> objects respectively.
<ide> <a id="DEP0107"></a>
<ide> ### DEP0107: tls.convertNPNProtocols()
<ide>
<del>Type: Runtime
<add>Type: End-of-Life
<ide>
<ide> This was an undocumented helper function not intended for use outside Node.js
<ide> core and obsoleted by the removal of NPN (Next Protocol Negotiation) support.
<ide><path>lib/tls.js
<ide> function convertProtocols(protocols) {
<ide> return buff;
<ide> }
<ide>
<del>exports.convertNPNProtocols = internalUtil.deprecate(function(protocols, out) {
<del> // If protocols is Array - translate it into buffer
<del> if (Array.isArray(protocols)) {
<del> out.NPNProtocols = convertProtocols(protocols);
<del> } else if (isUint8Array(protocols)) {
<del> // Copy new buffer not to be modified by user.
<del> out.NPNProtocols = Buffer.from(protocols);
<del> }
<del>}, 'tls.convertNPNProtocols() is deprecated.', 'DEP0107');
<del>
<ide> exports.convertALPNProtocols = function(protocols, out) {
<ide> // If protocols is Array - translate it into buffer
<ide> if (Array.isArray(protocols)) {
<ide><path>test/parallel/test-tls-basic-validations.js
<ide> common.expectsError(
<ide> assert(out.ALPNProtocols.equals(Buffer.from('efgh')));
<ide> }
<ide>
<del>{
<del> const buffer = Buffer.from('abcd');
<del> const out = {};
<del> tls.convertNPNProtocols(buffer, out);
<del> out.NPNProtocols.write('efgh');
<del> assert(buffer.equals(Buffer.from('abcd')));
<del> assert(out.NPNProtocols.equals(Buffer.from('efgh')));
<del>}
<del>
<ide> {
<ide> const buffer = new Uint8Array(Buffer.from('abcd'));
<ide> const out = {};
<ide> tls.convertALPNProtocols(buffer, out);
<ide> assert(out.ALPNProtocols.equals(Buffer.from('abcd')));
<ide> }
<del>
<del>{
<del> const buffer = new Uint8Array(Buffer.from('abcd'));
<del> const out = {};
<del> tls.convertNPNProtocols(buffer, out);
<del> assert(out.NPNProtocols.equals(Buffer.from('abcd')));
<del>} | 4 |
PHP | PHP | fix docblock e.g | 8891f6d64e47a9d25070a982f3eb48598a1d3ec9 | <ide><path>src/Controller/Controller.php
<ide> public function components(?ComponentRegistry $components = null): ComponentRegi
<ide> * $this->loadComponent('Authentication.Authentication');
<ide> * ```
<ide> *
<del> * Will result in a `Toolbar` property being set.
<add> * Will result in a `Authentication` property being set.
<ide> *
<ide> * @param string $name The name of the component to load.
<ide> * @param array $config The config for the component. | 1 |
Python | Python | ignore cython warnings in init | 15b7ecdacdb37b6334e9c0312115ad0fabefb676 | <ide><path>numpy/__init__.py
<ide> def pkgload(*packages, **options):
<ide> __all__.extend(lib.__all__)
<ide> __all__.extend(['linalg', 'fft', 'random', 'ctypeslib', 'ma'])
<ide>
<add> # Filter out Cython harmless warnings
<add> warnings.filterwarnings("ignore", message="numpy.dtype size changed")
<add> warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
<add> warnings.filterwarnings("ignore", message="numpy.ndarray size changed")
<add>
<ide> # oldnumeric and numarray were removed in 1.9. In case some packages import
<ide> # but do not use them, we define them here for backward compatibility.
<ide> oldnumeric = 'removed' | 1 |
Ruby | Ruby | use reject to remove empty hash values | 2852d9f0de7e49a138357b35f1f5c25e0607c15b | <ide><path>Library/Homebrew/github_packages.rb
<ide> def upload_bottle(user, token, skopeo, formula_full_name, bottle_hash, dry_run:)
<ide> "org.opencontainers.image.url" => bottle_hash["formula"]["homepage"],
<ide> "org.opencontainers.image.vendor" => org,
<ide> "org.opencontainers.image.version" => version,
<del> }
<del> delete_blank_hash_values(formula_annotations_hash)
<add> }.reject { |_, v| v.blank? }
<ide>
<ide> manifests = bottle_hash["bottle"]["tags"].map do |bottle_tag, tag_hash|
<ide> local_file = tag_hash["local_filename"]
<ide> def upload_bottle(user, token, skopeo, formula_full_name, bottle_hash, dry_run:)
<ide> architecture: architecture,
<ide> os: os,
<ide> "os.version" => os_version,
<del> }
<del> delete_blank_hash_values(platform_hash)
<add> }.reject { |_, v| v.blank? }
<ide>
<ide> tar_sha256 = Digest::SHA256.hexdigest(
<ide> Utils.safe_popen_read("gunzip", "--stdout", "--decompress", local_file),
<ide> def upload_bottle(user, token, skopeo, formula_full_name, bottle_hash, dry_run:)
<ide> "sh.brew.bottle.digest" => tar_gz_sha256,
<ide> "sh.brew.bottle.glibc.version" => glibc_version,
<ide> "sh.brew.tab" => tab.to_json,
<del> }
<del> delete_blank_hash_values(descriptor_annotations_hash)
<add> }.reject { |_, v| v.blank? }
<ide>
<ide> annotations_hash = formula_annotations_hash.merge(descriptor_annotations_hash).merge(
<ide> {
<ide> "org.opencontainers.image.created" => created_date,
<ide> "org.opencontainers.image.documentation" => documentation,
<ide> "org.opencontainers.image.title" => "#{formula_full_name} #{tag}",
<ide> },
<del> ).sort.to_h
<del> delete_blank_hash_values(annotations_hash)
<add> ).reject { |_, v| v.blank? }.sort.to_h
<ide>
<ide> image_manifest = {
<ide> schemaVersion: 2,
<ide> def write_hash(directory, hash, filename = nil)
<ide>
<ide> [sha256, json.size]
<ide> end
<del>
<del> def delete_blank_hash_values(hash)
<del> hash.each do |key, value|
<del> hash.delete(key) if value.blank?
<del> end
<del> end
<ide> end | 1 |
PHP | PHP | add array dependency to download function | ab5f4b1dcc2cf273339c0f1434edd810c6d8a2d1 | <ide><path>src/Illuminate/Support/Facades/Response.php
<ide> public static function stream($callback, $status = 200, array $headers = array()
<ide> * @param array $headers
<ide> * @return \Symfony\Component\HttpFoundation\BinaryFileResponse
<ide> */
<del> public static function download($file, $name = null, $headers = array())
<add> public static function download($file, $name = null, array $headers = array())
<ide> {
<ide> $response = new BinaryFileResponse($file, 200, $headers, true, 'attachment');
<ide> | 1 |
Python | Python | add parametric softplus | 36a9a3947368bc6763ba2c733d501c89ad1b2f67 | <ide><path>keras/layers/advanced_activations.py
<ide> from ..layers.core import Layer
<del>from ..utils.theano_utils import shared_zeros
<add>from ..utils.theano_utils import shared_zeros, shared_ones
<ide>
<ide> class LeakyReLU(Layer):
<ide> def __init__(self, alpha=0.3):
<ide> def get_output(self, train):
<ide> def get_config(self):
<ide> return {"name":self.__class__.__name__,
<ide> "input_shape":self.input_shape}
<add>
<add>
<add>class Psoftplus(Layer):
<add> '''
<add> Parametric softplus of the form: alpha * (1 + exp(beta * X))
<add>
<add> Reference:
<add> Inferring Nonlinear Neuronal Computation Based on Physiologically Plausible Inputs
<add> http://journals.plos.org/ploscompbiol/article?id=10.1371/journal.pcbi.1003143
<add> '''
<add> def __init__(self, input_shape):
<add> super(Psoftplus,self).__init__()
<add> self.alphas = shared_ones(input_shape)
<add> self.betas = shared_ones(input_shape)
<add> self.params = [self.alphas, self.betas]
<add> self.input_shape = input_shape
<add>
<add> def get_output(self, train):
<add> X = self.get_input(train)
<add> return T.nnet.softplus(self.betas * X) * self.alphas
<add>
<add> def get_config(self):
<add> return {"name":self.__class__.__name__,
<add> "input_shape":self.input_shape}
<ide>\ No newline at end of file | 1 |
Javascript | Javascript | remove createreactclass from layouteventstest | 89850583f4c8a19199c9a44ea1b7392c37b915b4 | <ide><path>IntegrationTests/LayoutEventsTest.js
<ide> 'use strict';
<ide>
<ide> const React = require('react');
<del>const createReactClass = require('create-react-class');
<ide> const ReactNative = require('react-native');
<ide> const {Image, LayoutAnimation, StyleSheet, Text, View} = ReactNative;
<ide> const {TestModule} = ReactNative.NativeModules;
<ide> function debug(...args) {
<ide>
<ide> import type {Layout, LayoutEvent} from 'CoreEventTypes';
<ide>
<add>type Props = $ReadOnly<{||}>;
<add>
<ide> type State = {
<ide> didAnimation: boolean,
<ide> extraText?: string,
<ide> type State = {
<ide> containerStyle?: ViewStyleProp,
<ide> };
<ide>
<del>const LayoutEventsTest = createReactClass({
<del> displayName: 'LayoutEventsTest',
<del> getInitialState(): State {
<del> return {
<del> didAnimation: false,
<del> };
<del> },
<del> animateViewLayout: function() {
<add>class LayoutEventsTest extends React.Component<Props, State> {
<add> _view: ?React.ElementRef<typeof View>;
<add> _img: ?React.ElementRef<typeof Image>;
<add> _txt: ?React.ElementRef<typeof Text>;
<add>
<add> state: State = {
<add> didAnimation: false,
<add> };
<add>
<add> animateViewLayout() {
<ide> debug('animateViewLayout invoked');
<ide> LayoutAnimation.configureNext(LayoutAnimation.Presets.spring, () => {
<ide> debug('animateViewLayout done');
<ide> this.checkLayout(this.addWrapText);
<ide> });
<ide> this.setState({viewStyle: {margin: 60}});
<del> },
<del> addWrapText: function() {
<add> }
<add>
<add> addWrapText = () => {
<ide> debug('addWrapText invoked');
<ide> this.setState(
<ide> {extraText: ' And a bunch more text to wrap around a few lines.'},
<ide> () => this.checkLayout(this.changeContainer),
<ide> );
<del> },
<del> changeContainer: function() {
<add> };
<add>
<add> changeContainer = () => {
<ide> debug('changeContainer invoked');
<ide> this.setState({containerStyle: {width: 280}}, () =>
<ide> this.checkLayout(TestModule.markTestCompleted),
<ide> );
<del> },
<del> checkLayout: function(next?: ?Function) {
<del> if (!this.isMounted()) {
<add> };
<add>
<add> checkLayout = (next?: ?() => void) => {
<add> const view = this._view;
<add> const txt = this._txt;
<add> const img = this._img;
<add>
<add> if (view == null || txt == null || img == null) {
<ide> return;
<ide> }
<del> this.refs.view.measure((x, y, width, height) => {
<del> this.compare('view', {x, y, width, height}, this.state.viewLayout);
<add>
<add> view.measure((x, y, width, height) => {
<add> this.compare(
<add> 'view',
<add> {x, y, width, height},
<add> this.state.viewLayout || null,
<add> );
<ide> if (typeof next === 'function') {
<ide> next();
<ide> } else if (!this.state.didAnimation) {
<ide> const LayoutEventsTest = createReactClass({
<ide> this.state.didAnimation = true;
<ide> }
<ide> });
<del> this.refs.txt.measure((x, y, width, height) => {
<add>
<add> txt.measure((x, y, width, height) => {
<ide> this.compare('txt', {x, y, width, height}, this.state.textLayout);
<ide> });
<del> this.refs.img.measure((x, y, width, height) => {
<add>
<add> img.measure((x, y, width, height) => {
<ide> this.compare('img', {x, y, width, height}, this.state.imageLayout);
<ide> });
<del> },
<del> compare: function(node: string, measured: any, onLayout: any): void {
<add> };
<add>
<add> compare(node: string, measured: Layout, onLayout?: ?Layout): void {
<ide> if (deepDiffer(measured, onLayout)) {
<ide> const data = {measured, onLayout};
<ide> throw new Error(
<ide> const LayoutEventsTest = createReactClass({
<ide> JSON.stringify(data, null, ' '),
<ide> );
<ide> }
<del> },
<del> onViewLayout: function(e: LayoutEvent) {
<add> }
<add>
<add> onViewLayout = (e: LayoutEvent) => {
<ide> debug('received view layout event\n', e.nativeEvent);
<ide> this.setState({viewLayout: e.nativeEvent.layout}, this.checkLayout);
<del> },
<del> onTextLayout: function(e: LayoutEvent) {
<add> };
<add>
<add> onTextLayout = (e: LayoutEvent) => {
<ide> debug('received text layout event\n', e.nativeEvent);
<ide> this.setState({textLayout: e.nativeEvent.layout}, this.checkLayout);
<del> },
<del> onImageLayout: function(e: LayoutEvent) {
<add> };
<add>
<add> onImageLayout = (e: LayoutEvent) => {
<ide> debug('received image layout event\n', e.nativeEvent);
<ide> this.setState({imageLayout: e.nativeEvent.layout}, this.checkLayout);
<del> },
<del> render: function() {
<add> };
<add>
<add> render() {
<ide> const viewStyle = [styles.view, this.state.viewStyle];
<ide> const textLayout = this.state.textLayout || {width: '?', height: '?'};
<ide> const imageLayout = this.state.imageLayout || {x: '?', y: '?'};
<ide> debug('viewLayout', this.state.viewLayout);
<ide> return (
<ide> <View style={[styles.container, this.state.containerStyle]}>
<del> <View ref="view" onLayout={this.onViewLayout} style={viewStyle}>
<add> <View
<add> ref={ref => {
<add> this._view = ref;
<add> }}
<add> onLayout={this.onViewLayout}
<add> style={viewStyle}>
<ide> <Image
<del> ref="img"
<add> ref={ref => {
<add> this._img = ref;
<add> }}
<ide> onLayout={this.onImageLayout}
<ide> style={styles.image}
<ide> source={{uri: 'uie_thumb_big.png'}}
<ide> />
<del> <Text ref="txt" onLayout={this.onTextLayout} style={styles.text}>
<add> <Text
<add> ref={ref => {
<add> this._txt = ref;
<add> }}
<add> onLayout={this.onTextLayout}
<add> style={styles.text}>
<ide> A simple piece of text.{this.state.extraText}
<ide> </Text>
<ide> <Text>
<ide> const LayoutEventsTest = createReactClass({
<ide> </View>
<ide> </View>
<ide> );
<del> },
<del>});
<add> }
<add>}
<ide>
<ide> const styles = StyleSheet.create({
<ide> container: {
<ide> const styles = StyleSheet.create({
<ide> },
<ide> });
<ide>
<del>LayoutEventsTest.displayName = 'LayoutEventsTest';
<del>
<ide> module.exports = LayoutEventsTest; | 1 |
Ruby | Ruby | use tt in doc for railties [skip ci] | 38b13929f09b87cae85fb1d9ae6dec413f175304 | <ide><path>activesupport/lib/active_support/dependencies.rb
<ide> def self.unload_interlock
<ide> # handles the new constants.
<ide> #
<ide> # If child.rb is being autoloaded, its constants will be added to
<del> # autoloaded_constants. If it was being `require`d, they will be discarded.
<add> # autoloaded_constants. If it was being required, they will be discarded.
<ide> #
<ide> # This is handled by walking back up the watch stack and adding the constants
<ide> # found by child.rb to the list of original constants in parent.rb.
<ide><path>railties/lib/rails/application/configuration.rb
<ide> def paths
<ide> end
<ide>
<ide> # Loads and returns the entire raw configuration of database from
<del> # values stored in `config/database.yml`.
<add> # values stored in <tt>config/database.yml</tt>.
<ide> def database_configuration
<ide> path = paths["config/database"].existent.first
<ide> yaml = Pathname.new(path) if path
<ide><path>railties/lib/rails/command/actions.rb
<ide> module Rails
<ide> module Command
<ide> module Actions
<del> # Change to the application's path if there is no config.ru file in current directory.
<del> # This allows us to run `rails server` from other directories, but still get
<del> # the main config.ru and properly set the tmp directory.
<add> # Change to the application's path if there is no <tt>config.ru</tt> file in current directory.
<add> # This allows us to run <tt>rails server</tt> from other directories, but still get
<add> # the main <tt>config.ru</tt> and properly set the <tt>tmp</tt> directory.
<ide> def set_application_directory!
<ide> Dir.chdir(File.expand_path("../..", APP_PATH)) unless File.exist?(File.expand_path("config.ru"))
<ide> end
<ide><path>railties/lib/rails/command/base.rb
<ide> def usage_path
<ide> # Default file root to place extra files a command might need, placed
<ide> # one folder above the command file.
<ide> #
<del> # For a `Rails::Command::TestCommand` placed in `rails/command/test_command.rb`
<del> # would return `rails/test`.
<add> # For a Rails::Command::TestCommand placed in <tt>rails/command/test_command.rb</tt>
<add> # would return <tt>rails/test</tt>.
<ide> def default_command_root
<ide> path = File.expand_path(File.join("../commands", command_root_namespace), __dir__)
<ide> path if File.exist?(path) | 4 |
Javascript | Javascript | use correct connect event for tls socket | f3686f2a4dc017d998a057f7fa6107e36a721641 | <ide><path>lib/internal/http2/core.js
<ide> class Http2Session extends EventEmitter {
<ide> const setupFn = setupHandle(this, socket, type, options);
<ide> if (socket.connecting) {
<ide> this[kState].connecting = true;
<del> socket.once('connect', setupFn);
<add> const connectEvent =
<add> socket instanceof tls.TLSSocket ? 'secureConnect' : 'connect';
<add> socket.once(connectEvent, setupFn);
<ide> } else {
<ide> setupFn();
<ide> } | 1 |
Python | Python | remove flask superclass | af4cb0ff2bb77c0c2f03227b17cc6ef67f28e63b | <ide><path>tests/__init__.py
<ide> """
<ide>
<ide> from __future__ import print_function
<del>import pytest
<ide>
<ide> import os
<ide> import sys
<del>import flask
<ide> import warnings
<ide> from functools import update_wrapper
<ide> from contextlib import contextmanager
<ide> def new_f(self, *args, **kwargs):
<ide> for entry in log:
<ide> assert 'Modules are deprecated' in str(entry['message'])
<ide> return update_wrapper(new_f, f)
<del>
<del>
<del>class TestFlask(object):
<del> """Baseclass for all the tests that Flask uses. Use these methods
<del> for testing instead of the camelcased ones in the baseclass for
<del> consistency.
<del> """
<del>
<del> def setup_method(self, method):
<del> self.setup()
<del>
<del> def teardown_method(self, method):
<del> self.teardown()
<del>
<del> def setup(self):
<del> pass
<del>
<del> def teardown(self):
<del> pass
<ide><path>tests/test_appctx.py
<ide>
<ide> import flask
<ide> import unittest
<del>from tests import TestFlask
<add>
<ide>
<ide>
<ide> def test_basic_url_generation():
<ide><path>tests/test_basic.py
<ide> import unittest
<ide> from datetime import datetime
<ide> from threading import Thread
<del>from tests import TestFlask, emits_module_deprecation_warning
<add>from tests import emits_module_deprecation_warning
<ide> from flask._compat import text_type
<ide> from werkzeug.exceptions import BadRequest, NotFound, Forbidden
<ide> from werkzeug.http import parse_date
<ide><path>tests/test_blueprints.py
<ide>
<ide> import flask
<ide> import unittest
<del>from tests import TestFlask
<add>
<ide> from flask._compat import text_type
<ide> from werkzeug.http import parse_cache_control_header
<ide> from jinja2 import TemplateNotFound
<ide><path>tests/test_config.py
<ide> import unittest
<ide> import textwrap
<ide> from contextlib import contextmanager
<del>from tests import TestFlask
<add>
<ide> from flask._compat import PY2
<ide>
<ide>
<ide><path>tests/test_deprecations.py
<ide>
<ide> import flask
<ide> import unittest
<del>from tests import TestFlask, catch_warnings
<add>from tests import catch_warnings
<ide><path>tests/test_ext.py
<ide> from imp import reload as reload_module
<ide> except ImportError:
<ide> reload_module = reload
<del>from tests import TestFlask
<add>
<ide> from flask._compat import PY2
<ide>
<ide>
<ide><path>tests/test_helpers.py
<ide> import flask
<ide> import unittest
<ide> from logging import StreamHandler
<del>from tests import TestFlask, catch_warnings, catch_stderr
<add>from tests import catch_warnings, catch_stderr
<ide> from werkzeug.http import parse_cache_control_header, parse_options_header
<ide> from flask._compat import StringIO, text_type
<ide>
<ide><path>tests/test_regression.py
<ide> import flask
<ide> import threading
<ide> from werkzeug.exceptions import NotFound
<del>from tests import TestFlask
<add>
<ide>
<ide>
<ide> _gc_lock = threading.Lock()
<ide><path>tests/test_reqctx.py
<ide> from greenlet import greenlet
<ide> except ImportError:
<ide> greenlet = None
<del>from tests import TestFlask
<add>
<ide>
<ide>
<ide> def test_teardown_on_pop():
<ide><path>tests/test_signals.py
<ide> blinker = None
<ide>
<ide> import flask
<del>from tests import TestFlask
<add>
<ide>
<ide>
<ide> pytestmark = pytest.mark.skipif(
<ide><path>tests/test_subclassing.py
<ide> import flask
<ide> import unittest
<ide> from logging import StreamHandler
<del>from tests import TestFlask
<add>
<ide> from flask._compat import StringIO
<ide>
<ide>
<ide><path>tests/test_templating.py
<ide> import logging
<ide> from jinja2 import TemplateNotFound
<ide>
<del>from tests import TestFlask
<add>
<ide>
<ide>
<ide> def test_context_processing():
<ide><path>tests/test_testing.py
<ide>
<ide> import flask
<ide> import unittest
<del>from tests import TestFlask
<add>
<ide> from flask._compat import text_type
<ide>
<ide>
<ide><path>tests/test_views.py
<ide> import flask
<ide> import flask.views
<ide> import unittest
<del>from tests import TestFlask
<add>
<ide> from werkzeug.http import parse_set_header
<ide>
<ide> def common_test(app): | 15 |
Java | Java | remove unused imports from rctviewmanager | 98604485c49b64c5c459735adecd2d621f8a1012 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/RCTViewManager.java
<ide> import com.facebook.react.common.MapBuilder;
<ide> import com.facebook.react.uimanager.PixelUtil;
<ide> import com.facebook.react.uimanager.ReactProp;
<del>import com.facebook.react.uimanager.ReactPropGroup;
<del>import com.facebook.react.uimanager.ThemedReactContext;
<del>import com.facebook.react.uimanager.ViewProps;
<ide> import com.facebook.react.views.view.ReactDrawableHelper;
<ide>
<ide> /** | 1 |
Python | Python | add install/uninstall api to databricks hook | b0272231320a4975cc39968dec8f0abf7a5cca11 | <ide><path>airflow/providers/databricks/hooks/databricks.py
<ide> CANCEL_RUN_ENDPOINT = ('POST', 'api/2.0/jobs/runs/cancel')
<ide> USER_AGENT_HEADER = {'user-agent': f'airflow-{__version__}'}
<ide>
<add>INSTALL_LIBS_ENDPOINT = ('POST', 'api/2.0/libraries/install')
<add>UNINSTALL_LIBS_ENDPOINT = ('POST', 'api/2.0/libraries/uninstall')
<add>
<ide>
<ide> class RunState:
<ide> """Utility class for the run state concept of Databricks runs."""
<ide> def terminate_cluster(self, json: dict) -> None:
<ide> """
<ide> self._do_api_call(TERMINATE_CLUSTER_ENDPOINT, json)
<ide>
<add> def install(self, json: dict) -> None:
<add> """
<add> Install libraries on the cluster.
<add>
<add> Utility function to call the ``2.0/libraries/install`` endpoint.
<add>
<add> :param json: json dictionary containing cluster_id and an array of library
<add> :type json: dict
<add> """
<add> self._do_api_call(INSTALL_LIBS_ENDPOINT, json)
<add>
<add> def uninstall(self, json: dict) -> None:
<add> """
<add> Uninstall libraries on the cluster.
<add>
<add> Utility function to call the ``2.0/libraries/uninstall`` endpoint.
<add>
<add> :param json: json dictionary containing cluster_id and an array of library
<add> :type json: dict
<add> """
<add> self._do_api_call(UNINSTALL_LIBS_ENDPOINT, json)
<add>
<ide>
<ide> def _retryable_error(exception) -> bool:
<ide> return (
<ide><path>tests/providers/databricks/hooks/test_databricks.py
<ide> NOTEBOOK_PARAMS = {"dry-run": "true", "oldest-time-to-consider": "1457570074236"}
<ide> JAR_PARAMS = ["param1", "param2"]
<ide> RESULT_STATE = None # type: None
<add>LIBRARIES = [
<add> {"jar": "dbfs:/mnt/libraries/library.jar"},
<add> {"maven": {"coordinates": "org.jsoup:jsoup:1.7.2", "exclusions": ["slf4j:slf4j"]}},
<add>]
<ide>
<ide>
<ide> def run_now_endpoint(host):
<ide> def terminate_cluster_endpoint(host):
<ide> return f'https://{host}/api/2.0/clusters/delete'
<ide>
<ide>
<add>def install_endpoint(host):
<add> """
<add> Utility function to generate the install endpoint given the host.
<add> """
<add> return f'https://{host}/api/2.0/libraries/install'
<add>
<add>
<add>def uninstall_endpoint(host):
<add> """
<add> Utility function to generate the uninstall endpoint given the host.
<add> """
<add> return f'https://{host}/api/2.0/libraries/uninstall'
<add>
<add>
<ide> def create_valid_response_mock(content):
<ide> response = mock.MagicMock()
<ide> response.json.return_value = content
<ide> def test_terminate_cluster(self, mock_requests):
<ide> timeout=self.hook.timeout_seconds,
<ide> )
<ide>
<add> @mock.patch('airflow.providers.databricks.hooks.databricks.requests')
<add> def test_install_libs_on_cluster(self, mock_requests):
<add> mock_requests.codes.ok = 200
<add> mock_requests.post.return_value.json.return_value = {}
<add> status_code_mock = mock.PropertyMock(return_value=200)
<add> type(mock_requests.post.return_value).status_code = status_code_mock
<add>
<add> data = {'cluster_id': CLUSTER_ID, 'libraries': LIBRARIES}
<add> self.hook.install(data)
<add>
<add> mock_requests.post.assert_called_once_with(
<add> install_endpoint(HOST),
<add> json={'cluster_id': CLUSTER_ID, 'libraries': LIBRARIES},
<add> params=None,
<add> auth=(LOGIN, PASSWORD),
<add> headers=USER_AGENT_HEADER,
<add> timeout=self.hook.timeout_seconds,
<add> )
<add>
<add> @mock.patch('airflow.providers.databricks.hooks.databricks.requests')
<add> def test_uninstall_libs_on_cluster(self, mock_requests):
<add> mock_requests.codes.ok = 200
<add> mock_requests.post.return_value.json.return_value = {}
<add> status_code_mock = mock.PropertyMock(return_value=200)
<add> type(mock_requests.post.return_value).status_code = status_code_mock
<add>
<add> data = {'cluster_id': CLUSTER_ID, 'libraries': LIBRARIES}
<add> self.hook.uninstall(data)
<add>
<add> mock_requests.post.assert_called_once_with(
<add> uninstall_endpoint(HOST),
<add> json={'cluster_id': CLUSTER_ID, 'libraries': LIBRARIES},
<add> params=None,
<add> auth=(LOGIN, PASSWORD),
<add> headers=USER_AGENT_HEADER,
<add> timeout=self.hook.timeout_seconds,
<add> )
<add>
<ide>
<ide> class TestDatabricksHookToken(unittest.TestCase):
<ide> """ | 2 |
Python | Python | add a function to generate a .def file from a dll | e9d5604aebc5e04a299bbb93dd9d6865a383974f | <ide><path>numpy/distutils/mingw32ccompiler.py
<ide> def dump_table(dll):
<ide> st = subprocess.Popen(["objdump.exe", "-p", dll], stdout=subprocess.PIPE)
<ide> return st.stdout.readlines()
<ide>
<add>def generate_def(dll, dfile):
<add> """Given a dll file location, get all its exported symbols and dump them
<add> into the given def file.
<add>
<add> The .def file will be overwritten"""
<add> dump = dump_table(dll)
<add> for i in range(len(dump)):
<add> if TABLE.match(dump[i]):
<add> break
<add>
<add> if i == len(dump):
<add> raise ValueError("Symbol table not found")
<add>
<add> syms = []
<add> for j in range(i, len(dump)):
<add> m = table.match(lines[j])
<add> if m:
<add> syms.append((int(m.group(1).strip()), m.group(2)))
<add> else:
<add> break
<add>
<add> d = open(deffile, 'w')
<add> d.write('LIBRARY %s\n' % dllname)
<add> d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n')
<add> d.write(';DATA PRELOAD SINGLE\n')
<add> d.write('\nEXPORTS\n')
<add> for s in syms:
<add> d.write('@%d %s\n' % (s[0], s[1]))
<add> d.close()
<add>
<ide> def build_import_library():
<ide> """ Build the import libraries for Mingw32-gcc on Windows
<ide> """ | 1 |
Javascript | Javascript | fix truncate tests | 3f1bca7d26eb63be968d70908fd4dafa2a3de5f7 | <ide><path>Libraries/Utilities/truncate.js
<ide> */
<ide> 'use strict';
<ide>
<del>var merge = require('merge');
<del>
<ide> type truncateOptions = {
<ide> breakOnWords: boolean;
<ide> minDelta: number;
<ide> elipsis: string;
<ide> }
<ide>
<del>var defaultOptions = {
<add>const defaultOptions = {
<ide> breakOnWords: true,
<ide> minDelta: 10, // Prevents truncating a tiny bit off the end
<ide> elipsis: '...',
<ide> };
<ide>
<ide> // maxChars (including ellipsis)
<del>var truncate = function(
<add>const truncate = function(
<ide> str: ?string,
<ide> maxChars: number,
<ide> options: truncateOptions
<ide> ): ?string {
<del> options = merge(defaultOptions, options);
<add> options = Object.assign({}, defaultOptions, options);
<ide> if (str && str.length &&
<ide> str.length - options.minDelta + options.elipsis.length >= maxChars) {
<ide> str = str.slice(0, maxChars - options.elipsis.length + 1); | 1 |
Javascript | Javascript | throw errors on invalid paths synchronously | d8f73385e220d54de9f9c53da8d8693813e6d774 | <ide><path>lib/fs.js
<ide> function copyObject(source) {
<ide> return target;
<ide> }
<ide>
<add>// TODO(joyeecheung): explore how the deprecation could be solved via linting
<add>// rules. See https://github.com/nodejs/node/pull/12976
<ide> function rethrow() {
<del> // TODO(thefourtheye) Throw error instead of warning in major version > 7
<ide> process.emitWarning(
<ide> 'Calling an asynchronous function without callback is deprecated.',
<ide> 'DeprecationWarning', 'DEP0013', rethrow
<ide> function validateOffsetLengthWrite(offset, length, byteLength) {
<ide> }
<ide> }
<ide>
<add>// Check if the path contains null types if it is a string nor Uint8Array,
<add>// otherwise return silently.
<add>function nullCheck(path, propName, throwError = true) {
<add> const pathIsString = typeof path === 'string';
<add> const pathIsUint8Array = isUint8Array(path);
<add>
<add> // We can only perform meaningful checks on strings and Uint8Arrays.
<add> if (!pathIsString && !pathIsUint8Array) {
<add> return;
<add> }
<add>
<add> if (pathIsString && path.indexOf('\u0000') === -1) {
<add> return;
<add> } else if (pathIsUint8Array && path.indexOf(0) === -1) {
<add> return;
<add> }
<add>
<add> const err = new errors.Error(
<add> 'ERR_INVALID_ARG_VALUE', propName, path,
<add> 'must be a string or Uint8Array without null bytes');
<add>
<add> if (throwError) {
<add> Error.captureStackTrace(err, nullCheck);
<add> throw err;
<add> }
<add> return err;
<add>}
<add>
<ide> function validatePath(path, propName) {
<ide> let err;
<ide>
<ide> function validatePath(path, propName) {
<ide> if (typeof path !== 'string' && !isUint8Array(path)) {
<ide> err = new errors.TypeError('ERR_INVALID_ARG_TYPE', propName,
<ide> ['string', 'Buffer', 'URL']);
<add> } else {
<add> err = nullCheck(path, propName, false);
<ide> }
<ide>
<ide> if (err !== undefined) {
<ide> function makeStatsCallback(cb) {
<ide> };
<ide> }
<ide>
<del>function nullCheck(path, callback) {
<del> if (('' + path).indexOf('\u0000') !== -1) {
<del> const er = new errors.Error('ERR_INVALID_ARG_TYPE',
<del> 'path',
<del> 'string without null bytes',
<del> path);
<del>
<del> if (typeof callback !== 'function')
<del> throw er;
<del> process.nextTick(callback, er);
<del> return false;
<del> }
<del> return true;
<del>}
<del>
<ide> function isFd(path) {
<ide> return (path >>> 0) === path;
<ide> }
<ide> Object.defineProperties(fs, {
<ide> X_OK: { enumerable: true, value: constants.X_OK || 0 },
<ide> });
<ide>
<del>function handleError(val, callback) {
<del> if (val instanceof Error) {
<del> if (typeof callback === 'function') {
<del> process.nextTick(callback, val);
<del> return true;
<del> } else throw val;
<del> }
<del> return false;
<del>}
<del>
<ide> fs.access = function(path, mode, callback) {
<ide> if (typeof mode === 'function') {
<ide> callback = mode;
<ide> fs.access = function(path, mode, callback) {
<ide> throw new errors.TypeError('ERR_INVALID_CALLBACK');
<ide> }
<ide>
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del>
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide>
<del> if (!nullCheck(path, callback))
<del> return;
<del>
<ide> mode = mode | 0;
<ide> var req = new FSReqWrap();
<ide> req.oncomplete = makeCallback(callback);
<ide> binding.access(pathModule.toNamespacedPath(path), mode, req);
<ide> };
<ide>
<ide> fs.accessSync = function(path, mode) {
<del> handleError((path = getPathFromURL(path)));
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<del> nullCheck(path);
<ide>
<ide> if (mode === undefined)
<ide> mode = fs.F_OK;
<ide> fs.accessSync = function(path, mode) {
<ide> }
<ide> };
<ide>
<add>// fs.exists never throws even when the arguments are invalid - if there is
<add>// a callback it would invoke it with false, otherwise it emits a warning
<add>// (see the comments of rethrow()).
<add>// This is to bring it inline with fs.existsSync, which never throws.
<add>// TODO(joyeecheung): deprecate the never-throw-on-invalid-arguments behavior
<ide> fs.exists = function(path, callback) {
<del> if (handleError((path = getPathFromURL(path)), cb))
<add> if (typeof callback !== 'function') {
<add> rethrow();
<ide> return;
<del> validatePath(path);
<del> if (!nullCheck(path, cb)) return;
<add> }
<add>
<add> function suppressedCallback(err) {
<add> callback(err ? false : true);
<add> }
<add>
<add> try {
<add> path = getPathFromURL(path);
<add> validatePath(path);
<add> } catch (err) {
<add> return callback(false);
<add> }
<ide> var req = new FSReqWrap();
<del> req.oncomplete = cb;
<add> req.oncomplete = suppressedCallback;
<ide> binding.stat(pathModule.toNamespacedPath(path), req);
<del> function cb(err) {
<del> if (callback) callback(err ? false : true);
<del> }
<ide> };
<ide>
<ide> Object.defineProperty(fs.exists, internalUtil.promisify.custom, {
<ide> Object.defineProperty(fs.exists, internalUtil.promisify.custom, {
<ide> }
<ide> });
<ide>
<del>
<add>// fs.existsSync never throws, it only returns true or false.
<add>// Since fs.existsSync never throws, users have established
<add>// the expectation that passing invalid arguments to it, even like
<add>// fs.existsSync(), would only get a false in return, so we cannot signal
<add>// validation errors to users properly out of compatibility concerns.
<add>// TODO(joyeecheung): deprecate the never-throw-on-invalid-arguments behavior
<ide> fs.existsSync = function(path) {
<ide> try {
<del> handleError((path = getPathFromURL(path)));
<del> try {
<del> validatePath(path);
<del> } catch (e) {
<del> return false;
<del> }
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<add> validatePath(path);
<ide> const ctx = { path };
<ide> binding.stat(pathModule.toNamespacedPath(path), undefined, ctx);
<ide> if (ctx.errno !== undefined) {
<ide> fs.existsSync = function(path) {
<ide> fs.readFile = function(path, options, callback) {
<ide> callback = maybeCallback(callback || options);
<ide> options = getOptions(options, { flag: 'r' });
<del>
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback))
<del> return;
<del>
<ide> var context = new ReadFileContext(callback, options.encoding);
<ide> context.isUserFd = isFd(path); // file descriptor ownership
<ide> var req = new FSReqWrap();
<ide> fs.readFile = function(path, options, callback) {
<ide> return;
<ide> }
<ide>
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<del>
<ide> binding.open(pathModule.toNamespacedPath(path),
<ide> stringToFlags(options.flag || 'r'),
<ide> 0o666,
<ide> fs.open = function(path, flags, mode, callback_) {
<ide> var callback = makeCallback(arguments[arguments.length - 1]);
<ide> mode = modeNum(mode, 0o666);
<ide>
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> validateUint32(mode, 'mode');
<ide>
<ide> fs.open = function(path, flags, mode, callback_) {
<ide>
<ide> fs.openSync = function(path, flags, mode) {
<ide> mode = modeNum(mode, 0o666);
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> validateUint32(mode, 'mode');
<ide>
<ide> fs.writeSync = function(fd, buffer, offset, length, position) {
<ide>
<ide> fs.rename = function(oldPath, newPath, callback) {
<ide> callback = makeCallback(callback);
<del> if (handleError((oldPath = getPathFromURL(oldPath)), callback))
<del> return;
<del>
<del> if (handleError((newPath = getPathFromURL(newPath)), callback))
<del> return;
<del>
<del> if (!nullCheck(oldPath, callback)) return;
<del> if (!nullCheck(newPath, callback)) return;
<add> oldPath = getPathFromURL(oldPath);
<ide> validatePath(oldPath, 'oldPath');
<add> newPath = getPathFromURL(newPath);
<ide> validatePath(newPath, 'newPath');
<ide> const req = new FSReqWrap();
<ide> req.oncomplete = callback;
<ide> fs.rename = function(oldPath, newPath, callback) {
<ide> };
<ide>
<ide> fs.renameSync = function(oldPath, newPath) {
<del> handleError((oldPath = getPathFromURL(oldPath)));
<del> handleError((newPath = getPathFromURL(newPath)));
<del> nullCheck(oldPath);
<del> nullCheck(newPath);
<add> oldPath = getPathFromURL(oldPath);
<ide> validatePath(oldPath, 'oldPath');
<add> newPath = getPathFromURL(newPath);
<ide> validatePath(newPath, 'newPath');
<ide> const ctx = { path: oldPath, dest: newPath };
<ide> binding.rename(pathModule.toNamespacedPath(oldPath),
<ide> fs.ftruncateSync = function(fd, len = 0) {
<ide>
<ide> fs.rmdir = function(path, callback) {
<ide> callback = maybeCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> const req = new FSReqWrap();
<ide> req.oncomplete = callback;
<ide> binding.rmdir(pathModule.toNamespacedPath(path), req);
<ide> };
<ide>
<ide> fs.rmdirSync = function(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return binding.rmdir(pathModule.toNamespacedPath(path));
<ide> };
<ide> fs.fsyncSync = function(fd) {
<ide> fs.mkdir = function(path, mode, callback) {
<ide> if (typeof mode === 'function') callback = mode;
<ide> callback = makeCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<del>
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> mode = modeNum(mode, 0o777);
<ide> validateUint32(mode, 'mode');
<ide> fs.mkdir = function(path, mode, callback) {
<ide> };
<ide>
<ide> fs.mkdirSync = function(path, mode) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> mode = modeNum(mode, 0o777);
<ide> validateUint32(mode, 'mode');
<ide> fs.mkdirSync = function(path, mode) {
<ide> fs.readdir = function(path, options, callback) {
<ide> callback = makeCallback(typeof options === 'function' ? options : callback);
<ide> options = getOptions(options, {});
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<del>
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide>
<ide> const req = new FSReqWrap();
<ide> fs.readdir = function(path, options, callback) {
<ide>
<ide> fs.readdirSync = function(path, options) {
<ide> options = getOptions(options, {});
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return binding.readdir(pathModule.toNamespacedPath(path), options.encoding);
<ide> };
<ide> fs.fstat = function(fd, callback) {
<ide>
<ide> fs.lstat = function(path, callback) {
<ide> callback = makeStatsCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> const req = new FSReqWrap();
<ide> req.oncomplete = callback;
<ide> fs.lstat = function(path, callback) {
<ide>
<ide> fs.stat = function(path, callback) {
<ide> callback = makeStatsCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> const req = new FSReqWrap();
<ide> req.oncomplete = callback;
<ide> fs.fstatSync = function(fd) {
<ide> };
<ide>
<ide> fs.lstatSync = function(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> const ctx = { path };
<ide> binding.lstat(pathModule.toNamespacedPath(path), undefined, ctx);
<ide> fs.lstatSync = function(path) {
<ide> };
<ide>
<ide> fs.statSync = function(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> const ctx = { path };
<ide> binding.stat(pathModule.toNamespacedPath(path), undefined, ctx);
<ide> fs.statSync = function(path) {
<ide> fs.readlink = function(path, options, callback) {
<ide> callback = makeCallback(typeof options === 'function' ? options : callback);
<ide> options = getOptions(options, {});
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<add> path = getPathFromURL(path);
<ide> validatePath(path, 'oldPath');
<ide> const req = new FSReqWrap();
<ide> req.oncomplete = callback;
<ide> fs.readlink = function(path, options, callback) {
<ide>
<ide> fs.readlinkSync = function(path, options) {
<ide> options = getOptions(options, {});
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path, 'oldPath');
<ide> const ctx = { path };
<ide> const result = binding.readlink(pathModule.toNamespacedPath(path),
<ide> fs.symlink = function(target, path, type_, callback_) {
<ide> var type = (typeof type_ === 'string' ? type_ : null);
<ide> var callback = makeCallback(arguments[arguments.length - 1]);
<ide>
<del> if (handleError((target = getPathFromURL(target)), callback))
<del> return;
<del>
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del>
<del> if (!nullCheck(target, callback)) return;
<del> if (!nullCheck(path, callback)) return;
<add> target = getPathFromURL(target);
<add> path = getPathFromURL(path);
<ide> validatePath(target, 'target');
<ide> validatePath(path);
<ide>
<ide> fs.symlink = function(target, path, type_, callback_) {
<ide>
<ide> fs.symlinkSync = function(target, path, type) {
<ide> type = (typeof type === 'string' ? type : null);
<del> handleError((target = getPathFromURL(target)));
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(target);
<del> nullCheck(path);
<del>
<add> target = getPathFromURL(target);
<add> path = getPathFromURL(path);
<ide> validatePath(target, 'target');
<ide> validatePath(path);
<ide> const flags = stringToSymlinkType(type);
<ide> fs.symlinkSync = function(target, path, type) {
<ide> fs.link = function(existingPath, newPath, callback) {
<ide> callback = makeCallback(callback);
<ide>
<del> if (handleError((existingPath = getPathFromURL(existingPath)), callback))
<del> return;
<del>
<del> if (handleError((newPath = getPathFromURL(newPath)), callback))
<del> return;
<del>
<del> if (!nullCheck(existingPath, callback)) return;
<del> if (!nullCheck(newPath, callback)) return;
<del>
<add> existingPath = getPathFromURL(existingPath);
<add> newPath = getPathFromURL(newPath);
<ide> validatePath(existingPath, 'existingPath');
<ide> validatePath(newPath, 'newPath');
<ide>
<ide> fs.link = function(existingPath, newPath, callback) {
<ide> };
<ide>
<ide> fs.linkSync = function(existingPath, newPath) {
<del> handleError((existingPath = getPathFromURL(existingPath)));
<del> handleError((newPath = getPathFromURL(newPath)));
<del> nullCheck(existingPath);
<del> nullCheck(newPath);
<add> existingPath = getPathFromURL(existingPath);
<add> newPath = getPathFromURL(newPath);
<ide> validatePath(existingPath, 'existingPath');
<ide> validatePath(newPath, 'newPath');
<ide>
<ide> fs.linkSync = function(existingPath, newPath) {
<ide>
<ide> fs.unlink = function(path, callback) {
<ide> callback = makeCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> const req = new FSReqWrap();
<ide> req.oncomplete = callback;
<ide> binding.unlink(pathModule.toNamespacedPath(path), req);
<ide> };
<ide>
<ide> fs.unlinkSync = function(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> const ctx = { path };
<ide> binding.unlink(pathModule.toNamespacedPath(path), undefined, ctx);
<ide> if (constants.O_SYMLINK !== undefined) {
<ide>
<ide> fs.chmod = function(path, mode, callback) {
<ide> callback = makeCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<del>
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> mode = modeNum(mode);
<ide> validateUint32(mode, 'mode');
<ide> fs.chmod = function(path, mode, callback) {
<ide> };
<ide>
<ide> fs.chmodSync = function(path, mode) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> mode = modeNum(mode);
<ide> validateUint32(mode, 'mode');
<ide> fs.fchownSync = function(fd, uid, gid) {
<ide>
<ide> fs.chown = function(path, uid, gid, callback) {
<ide> callback = makeCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<del>
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> validateUint32(uid, 'uid');
<ide> validateUint32(gid, 'gid');
<ide> fs.chown = function(path, uid, gid, callback) {
<ide> };
<ide>
<ide> fs.chownSync = function(path, uid, gid) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> validateUint32(uid, 'uid');
<ide> validateUint32(gid, 'gid');
<ide> fs._toUnixTimestamp = toUnixTimestamp;
<ide>
<ide> fs.utimes = function(path, atime, mtime, callback) {
<ide> callback = makeCallback(callback);
<del> if (handleError((path = getPathFromURL(path)), callback))
<del> return;
<del> if (!nullCheck(path, callback)) return;
<del>
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide>
<ide> const req = new FSReqWrap();
<ide> fs.utimes = function(path, atime, mtime, callback) {
<ide> };
<ide>
<ide> fs.utimesSync = function(path, atime, mtime) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> binding.utimes(pathModule.toNamespacedPath(path),
<ide> toUnixTimestamp(atime),
<ide> function FSWatcher() {
<ide> }
<ide> util.inherits(FSWatcher, EventEmitter);
<ide>
<add>// FIXME(joyeecheung): this method is not documented.
<add>// At the moment if filename is undefined, we
<add>// 1. Throw an Error from C++ land if it's the first time .start() is called
<add>// 2. Return silently from C++ land if .start() has already been called
<add>// on a valid filename and the wrap has been initialized
<ide> FSWatcher.prototype.start = function(filename,
<ide> persistent,
<ide> recursive,
<ide> encoding) {
<del> handleError((filename = getPathFromURL(filename)));
<del> nullCheck(filename);
<add> filename = getPathFromURL(filename);
<add> nullCheck(filename, 'filename');
<ide> var err = this._handle.start(pathModule.toNamespacedPath(filename),
<ide> persistent,
<ide> recursive,
<ide> FSWatcher.prototype.close = function() {
<ide> };
<ide>
<ide> fs.watch = function(filename, options, listener) {
<del> handleError((filename = getPathFromURL(filename)));
<del> nullCheck(filename);
<del>
<ide> if (typeof options === 'function') {
<ide> listener = options;
<ide> }
<ide> function StatWatcher() {
<ide> util.inherits(StatWatcher, EventEmitter);
<ide>
<ide>
<add>// FIXME(joyeecheung): this method is not documented.
<add>// At the moment if filename is undefined, we
<add>// 1. Throw an Error from C++ land if it's the first time .start() is called
<add>// 2. Return silently from C++ land if .start() has already been called
<add>// on a valid filename and the wrap has been initialized
<ide> StatWatcher.prototype.start = function(filename, persistent, interval) {
<del> handleError((filename = getPathFromURL(filename)));
<del> nullCheck(filename);
<add> filename = getPathFromURL(filename);
<add> nullCheck(filename, 'filename');
<ide> this._handle.start(pathModule.toNamespacedPath(filename),
<ide> persistent, interval);
<ide> };
<ide> StatWatcher.prototype.stop = function() {
<ide> const statWatchers = new Map();
<ide>
<ide> fs.watchFile = function(filename, options, listener) {
<del> handleError((filename = getPathFromURL(filename)));
<del> nullCheck(filename);
<add> filename = getPathFromURL(filename);
<add> validatePath(filename);
<ide> filename = pathModule.resolve(filename);
<ide> var stat;
<ide>
<ide> fs.watchFile = function(filename, options, listener) {
<ide> };
<ide>
<ide> fs.unwatchFile = function(filename, listener) {
<del> handleError((filename = getPathFromURL(filename)));
<del> nullCheck(filename);
<add> filename = getPathFromURL(filename);
<add> validatePath(filename);
<ide> filename = pathModule.resolve(filename);
<ide> var stat = statWatchers.get(filename);
<ide>
<ide> fs.realpathSync = function realpathSync(p, options) {
<ide> options = emptyObj;
<ide> else
<ide> options = getOptions(options, emptyObj);
<add> p = getPathFromURL(p);
<ide> if (typeof p !== 'string') {
<del> handleError((p = getPathFromURL(p)));
<del> if (typeof p !== 'string')
<del> p += '';
<add> p += '';
<ide> }
<del> nullCheck(p);
<add> validatePath(p);
<ide> p = pathModule.resolve(p);
<ide>
<ide> const cache = options[internalFS.realpathCacheKey];
<ide> fs.realpathSync = function realpathSync(p, options) {
<ide>
<ide> fs.realpathSync.native = function(path, options) {
<ide> options = getOptions(options, {});
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<add> validatePath(path);
<ide> return binding.realpath(path, options.encoding);
<ide> };
<ide>
<ide> fs.realpath = function realpath(p, options, callback) {
<ide> options = emptyObj;
<ide> else
<ide> options = getOptions(options, emptyObj);
<add> p = getPathFromURL(p);
<ide> if (typeof p !== 'string') {
<del> if (handleError((p = getPathFromURL(p)), callback))
<del> return;
<del> if (typeof p !== 'string')
<del> p += '';
<add> p += '';
<ide> }
<del> if (!nullCheck(p, callback))
<del> return;
<add> validatePath(p);
<ide> p = pathModule.resolve(p);
<ide>
<ide> const seenLinks = Object.create(null);
<ide> fs.realpath = function realpath(p, options, callback) {
<ide> fs.realpath.native = function(path, options, callback) {
<ide> callback = maybeCallback(callback || options);
<ide> options = getOptions(options, {});
<del> if (handleError((path = getPathFromURL(path)), callback)) return;
<del> if (!nullCheck(path, callback)) return;
<add> path = getPathFromURL(path);
<add> validatePath(path);
<ide> const req = new FSReqWrap();
<ide> req.oncomplete = callback;
<ide> return binding.realpath(path, options.encoding, req);
<ide> };
<ide>
<del>
<ide> fs.mkdtemp = function(prefix, options, callback) {
<ide> callback = makeCallback(typeof options === 'function' ? options : callback);
<ide> options = getOptions(options, {});
<ide> fs.mkdtemp = function(prefix, options, callback) {
<ide> 'string',
<ide> prefix);
<ide> }
<del> if (!nullCheck(prefix, callback)) {
<del> return;
<del> }
<del>
<add> nullCheck(prefix, 'prefix');
<ide> var req = new FSReqWrap();
<ide> req.oncomplete = callback;
<del>
<ide> binding.mkdtemp(`${prefix}XXXXXX`, options.encoding, req);
<ide> };
<ide>
<ide>
<ide> fs.mkdtempSync = function(prefix, options) {
<add> options = getOptions(options, {});
<ide> if (!prefix || typeof prefix !== 'string') {
<ide> throw new errors.TypeError('ERR_INVALID_ARG_TYPE',
<ide> 'prefix',
<ide> 'string',
<ide> prefix);
<ide> }
<del> options = getOptions(options, {});
<del> nullCheck(prefix);
<add> nullCheck(prefix, 'prefix');
<ide> return binding.mkdtemp(`${prefix}XXXXXX`, options.encoding);
<ide> };
<ide>
<ide> fs.copyFile = function(src, dest, flags, callback) {
<ide> }
<ide>
<ide> src = getPathFromURL(src);
<del>
<del> if (handleError(src, callback))
<del> return;
<del>
<del> if (!nullCheck(src, callback))
<del> return;
<del>
<ide> dest = getPathFromURL(dest);
<del>
<del> if (handleError(dest, callback))
<del> return;
<del>
<del> if (!nullCheck(dest, callback))
<del> return;
<del>
<ide> validatePath(src, 'src');
<ide> validatePath(dest, 'dest');
<ide>
<ide> fs.copyFile = function(src, dest, flags, callback) {
<ide>
<ide> fs.copyFileSync = function(src, dest, flags) {
<ide> src = getPathFromURL(src);
<del> handleError(src);
<del> nullCheck(src);
<del>
<ide> dest = getPathFromURL(dest);
<del> handleError(dest);
<del> nullCheck(dest);
<del>
<ide> validatePath(src, 'src');
<ide> validatePath(dest, 'dest');
<ide>
<ide> function ReadStream(path, options) {
<ide>
<ide> Readable.call(this, options);
<ide>
<del> handleError((this.path = getPathFromURL(path)));
<add> // path will be ignored when fd is specified, so it can be falsy
<add> this.path = getPathFromURL(path);
<ide> this.fd = options.fd === undefined ? null : options.fd;
<ide> this.flags = options.flags === undefined ? 'r' : options.flags;
<ide> this.mode = options.mode === undefined ? 0o666 : options.mode;
<ide> function WriteStream(path, options) {
<ide>
<ide> Writable.call(this, options);
<ide>
<del> handleError((this.path = getPathFromURL(path)));
<add> // path will be ignored when fd is specified, so it can be falsy
<add> this.path = getPathFromURL(path);
<ide> this.fd = options.fd === undefined ? null : options.fd;
<ide> this.flags = options.flags === undefined ? 'w' : options.flags;
<ide> this.mode = options.mode === undefined ? 0o666 : options.mode;
<ide> async function readFileHandle(filehandle, options) {
<ide> // thrown synchronously
<ide> const promises = {
<ide> async access(path, mode = fs.F_OK) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide>
<ide> mode = mode | 0;
<ide> const promises = {
<ide> },
<ide>
<ide> async copyFile(src, dest, flags) {
<del> handleError((src = getPathFromURL(src)));
<del> handleError((dest = getPathFromURL(dest)));
<del> nullCheck(src);
<del> nullCheck(dest);
<add> src = getPathFromURL(src);
<add> dest = getPathFromURL(dest);
<ide> validatePath(src, 'src');
<ide> validatePath(dest, 'dest');
<ide> flags = flags | 0;
<ide> const promises = {
<ide> // promises.open() uses the fs.FileHandle class.
<ide> async open(path, flags, mode) {
<ide> mode = modeNum(mode, 0o666);
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> validateUint32(mode, 'mode');
<ide> return new FileHandle(
<ide> const promises = {
<ide> },
<ide>
<ide> async rename(oldPath, newPath) {
<del> handleError((oldPath = getPathFromURL(oldPath)));
<del> handleError((newPath = getPathFromURL(newPath)));
<del> nullCheck(oldPath);
<del> nullCheck(newPath);
<add> oldPath = getPathFromURL(oldPath);
<add> newPath = getPathFromURL(newPath);
<ide> validatePath(oldPath, 'oldPath');
<ide> validatePath(newPath, 'newPath');
<ide> return binding.rename(pathModule.toNamespacedPath(oldPath),
<ide> const promises = {
<ide> },
<ide>
<ide> async rmdir(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return binding.rmdir(pathModule.toNamespacedPath(path), kUsePromises);
<ide> },
<ide> const promises = {
<ide>
<ide> async mkdir(path, mode) {
<ide> mode = modeNum(mode, 0o777);
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> validateUint32(mode, 'mode');
<ide> return binding.mkdir(pathModule.toNamespacedPath(path), mode, kUsePromises);
<ide> },
<ide>
<ide> async readdir(path, options) {
<ide> options = getOptions(options, {});
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return binding.readdir(pathModule.toNamespacedPath(path),
<ide> options.encoding, kUsePromises);
<ide> },
<ide>
<ide> async readlink(path, options) {
<ide> options = getOptions(options, {});
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path, 'oldPath');
<ide> return binding.readlink(pathModule.toNamespacedPath(path),
<ide> options.encoding, kUsePromises);
<ide> },
<ide>
<ide> async symlink(target, path, type_) {
<ide> const type = (typeof type_ === 'string' ? type_ : null);
<del> handleError((target = getPathFromURL(target)));
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(target);
<del> nullCheck(path);
<add> target = getPathFromURL(target);
<add> path = getPathFromURL(path);
<ide> validatePath(target, 'target');
<ide> validatePath(path);
<ide> return binding.symlink(preprocessSymlinkDestination(target, type, path),
<ide> const promises = {
<ide> },
<ide>
<ide> async lstat(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return statsFromValues(
<ide> await binding.lstat(pathModule.toNamespacedPath(path), kUsePromises));
<ide> },
<ide>
<ide> async stat(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return statsFromValues(
<ide> await binding.stat(pathModule.toNamespacedPath(path), kUsePromises));
<ide> },
<ide>
<ide> async link(existingPath, newPath) {
<del> handleError((existingPath = getPathFromURL(existingPath)));
<del> handleError((newPath = getPathFromURL(newPath)));
<del> nullCheck(existingPath);
<del> nullCheck(newPath);
<add> existingPath = getPathFromURL(existingPath);
<add> newPath = getPathFromURL(newPath);
<ide> validatePath(existingPath, 'existingPath');
<ide> validatePath(newPath, 'newPath');
<ide> return binding.link(pathModule.toNamespacedPath(existingPath),
<ide> const promises = {
<ide> },
<ide>
<ide> async unlink(path) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return binding.unlink(pathModule.toNamespacedPath(path), kUsePromises);
<ide> },
<ide> const promises = {
<ide> },
<ide>
<ide> async chmod(path, mode) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> mode = modeNum(mode);
<ide> validateUint32(mode, 'mode');
<ide> const promises = {
<ide> },
<ide>
<ide> async chown(path, uid, gid) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> validateUint32(uid, 'uid');
<ide> validateUint32(gid, 'gid');
<ide> const promises = {
<ide> },
<ide>
<ide> async utimes(path, atime, mtime) {
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return binding.utimes(pathModule.toNamespacedPath(path),
<ide> toUnixTimestamp(atime),
<ide> const promises = {
<ide>
<ide> async realpath(path, options) {
<ide> options = getOptions(options, {});
<del> handleError((path = getPathFromURL(path)));
<del> nullCheck(path);
<add> path = getPathFromURL(path);
<ide> validatePath(path);
<ide> return binding.realpath(path, options.encoding, kUsePromises);
<ide> },
<ide><path>lib/internal/url.js
<ide> function getPathFromURLWin32(url) {
<ide> var third = pathname.codePointAt(n + 2) | 0x20;
<ide> if ((pathname[n + 1] === '2' && third === 102) || // 2f 2F /
<ide> (pathname[n + 1] === '5' && third === 99)) { // 5c 5C \
<del> return new errors.TypeError(
<add> throw new errors.TypeError(
<ide> 'ERR_INVALID_FILE_URL_PATH',
<ide> 'must not include encoded \\ or / characters');
<ide> }
<ide> function getPathFromURLWin32(url) {
<ide> var sep = pathname[2];
<ide> if (letter < 97 || letter > 122 || // a..z A..Z
<ide> (sep !== ':')) {
<del> return new errors.TypeError('ERR_INVALID_FILE_URL_PATH',
<del> 'must be absolute');
<add> throw new errors.TypeError('ERR_INVALID_FILE_URL_PATH',
<add> 'must be absolute');
<ide> }
<ide> return pathname.slice(1);
<ide> }
<ide> }
<ide>
<ide> function getPathFromURLPosix(url) {
<ide> if (url.hostname !== '') {
<del> return new errors.TypeError('ERR_INVALID_FILE_URL_HOST', platform);
<add> throw new errors.TypeError('ERR_INVALID_FILE_URL_HOST', platform);
<ide> }
<ide> var pathname = url.pathname;
<ide> for (var n = 0; n < pathname.length; n++) {
<ide> if (pathname[n] === '%') {
<ide> var third = pathname.codePointAt(n + 2) | 0x20;
<ide> if (pathname[n + 1] === '2' && third === 102) {
<del> return new errors.TypeError('ERR_INVALID_FILE_URL_PATH',
<del> 'must not include encoded / characters');
<add> throw new errors.TypeError('ERR_INVALID_FILE_URL_PATH',
<add> 'must not include encoded / characters');
<ide> }
<ide> }
<ide> }
<ide> function getPathFromURL(path) {
<ide> return path;
<ide> }
<ide> if (path.protocol !== 'file:')
<del> return new errors.TypeError('ERR_INVALID_URL_SCHEME', 'file');
<add> throw new errors.TypeError('ERR_INVALID_URL_SCHEME', 'file');
<ide> return isWindows ? getPathFromURLWin32(path) : getPathFromURLPosix(path);
<ide> }
<ide>
<ide><path>test/parallel/test-fs-copyfile.js
<ide> common.expectsError(() => {
<ide> );
<ide> });
<ide>
<del>// Throws if the source path is an invalid path.
<del>common.expectsError(() => {
<del> fs.copyFileSync('\u0000', dest);
<del>}, {
<del> code: 'ERR_INVALID_ARG_TYPE',
<del> type: Error,
<del> message: 'The "path" argument must be of type string without null bytes.' +
<del> ' Received type string'
<del>});
<del>
<del>// Throws if the destination path is an invalid path.
<del>common.expectsError(() => {
<del> fs.copyFileSync(src, '\u0000');
<del>}, {
<del> code: 'ERR_INVALID_ARG_TYPE',
<del> type: Error,
<del> message: 'The "path" argument must be of type string without null bytes.' +
<del> ' Received type string'
<del>});
<del>
<ide> // Errors if invalid flags are provided.
<ide> assert.throws(() => {
<ide> fs.copyFileSync(src, dest, -1);
<ide><path>test/parallel/test-fs-exists.js
<ide> const fs = require('fs');
<ide> const { URL } = require('url');
<ide> const f = __filename;
<ide>
<add>// Only warnings are emitted when the callback is invalid
<add>assert.doesNotThrow(() => fs.exists(f));
<add>assert.doesNotThrow(() => fs.exists());
<add>assert.doesNotThrow(() => fs.exists(f, {}));
<add>
<ide> fs.exists(f, common.mustCall(function(y) {
<ide> assert.strictEqual(y, true);
<ide> }));
<ide>
<del>assert.doesNotThrow(() => fs.exists(f));
<del>
<ide> fs.exists(`${f}-NO`, common.mustCall(function(y) {
<ide> assert.strictEqual(y, false);
<ide> }));
<ide>
<add>// If the path is invalid, fs.exists will still invoke the callback with false
<add>// instead of throwing errors
<ide> fs.exists(new URL('https://foo'), common.mustCall(function(y) {
<ide> assert.strictEqual(y, false);
<ide> }));
<ide>
<add>fs.exists({}, common.mustCall(function(y) {
<add> assert.strictEqual(y, false);
<add>}));
<add>
<ide> assert(fs.existsSync(f));
<ide> assert(!fs.existsSync(`${f}-NO`));
<ide>
<del>common.expectsError(
<del> () => { fs.exists(() => {}); },
<del> {
<del> code: 'ERR_INVALID_ARG_TYPE',
<del> message: 'The "path" argument must be one of type string, Buffer, or URL',
<del> type: TypeError
<del> }
<del>);
<del>
<add>// fs.existsSync() never throws
<ide> assert(!fs.existsSync());
<add>assert(!fs.existsSync({}));
<add>assert(!fs.existsSync(new URL('https://foo')));
<ide><path>test/parallel/test-fs-null-bytes.js
<ide> const URL = require('url').URL;
<ide>
<ide> function check(async, sync) {
<ide> const argsSync = Array.prototype.slice.call(arguments, 2);
<del> const argsAsync = argsSync.concat((er) => {
<del> common.expectsError(
<del> () => {
<del> throw er;
<del> },
<del> {
<del> code: 'ERR_INVALID_ARG_TYPE',
<del> type: Error
<del> });
<del> });
<add> const argsAsync = argsSync.concat(common.mustNotCall());
<ide>
<ide> if (sync) {
<ide> common.expectsError(
<ide> () => {
<ide> sync.apply(null, argsSync);
<ide> },
<ide> {
<del> code: 'ERR_INVALID_ARG_TYPE',
<add> code: 'ERR_INVALID_ARG_VALUE',
<ide> type: Error,
<ide> });
<ide> }
<ide>
<ide> if (async) {
<del> async.apply(null, argsAsync);
<add> common.expectsError(
<add> () => {
<add> async.apply(null, argsAsync);
<add> },
<add> {
<add> code: 'ERR_INVALID_ARG_VALUE',
<add> type: Error
<add> });
<ide> }
<ide> }
<ide>
<ide> check(fs.access, fs.accessSync, 'foo\u0000bar', fs.F_OK);
<ide> check(fs.appendFile, fs.appendFileSync, 'foo\u0000bar', 'abc');
<ide> check(fs.chmod, fs.chmodSync, 'foo\u0000bar', '0644');
<ide> check(fs.chown, fs.chownSync, 'foo\u0000bar', 12, 34);
<add>check(fs.copyFile, fs.copyFileSync, 'foo\u0000bar', 'abc');
<add>check(fs.copyFile, fs.copyFileSync, 'abc', 'foo\u0000bar');
<ide> check(fs.link, fs.linkSync, 'foo\u0000bar', 'foobar');
<ide> check(fs.link, fs.linkSync, 'foobar', 'foo\u0000bar');
<ide> check(fs.lstat, fs.lstatSync, 'foo\u0000bar');
<ide> check(fs.access, fs.accessSync, fileUrl, fs.F_OK);
<ide> check(fs.appendFile, fs.appendFileSync, fileUrl, 'abc');
<ide> check(fs.chmod, fs.chmodSync, fileUrl, '0644');
<ide> check(fs.chown, fs.chownSync, fileUrl, 12, 34);
<add>check(fs.copyFile, fs.copyFileSync, fileUrl, 'abc');
<add>check(fs.copyFile, fs.copyFileSync, 'abc', fileUrl);
<ide> check(fs.link, fs.linkSync, fileUrl, 'foobar');
<ide> check(fs.link, fs.linkSync, 'foobar', fileUrl);
<ide> check(fs.lstat, fs.lstatSync, fileUrl);
<ide> check(fs.access, fs.accessSync, fileUrl2, fs.F_OK);
<ide> check(fs.appendFile, fs.appendFileSync, fileUrl2, 'abc');
<ide> check(fs.chmod, fs.chmodSync, fileUrl2, '0644');
<ide> check(fs.chown, fs.chownSync, fileUrl2, 12, 34);
<add>check(fs.copyFile, fs.copyFileSync, fileUrl2, 'abc');
<add>check(fs.copyFile, fs.copyFileSync, 'abc', fileUrl2);
<ide> check(fs.link, fs.linkSync, fileUrl2, 'foobar');
<ide> check(fs.link, fs.linkSync, 'foobar', fileUrl2);
<ide> check(fs.lstat, fs.lstatSync, fileUrl2);
<ide><path>test/parallel/test-fs-whatwg-url.js
<ide> fs.readFile(url, common.mustCall((err, data) => {
<ide>
<ide> // Check that using a non file:// URL reports an error
<ide> const httpUrl = new URL('http://example.org');
<del>fs.readFile(httpUrl, common.expectsError({
<del> code: 'ERR_INVALID_URL_SCHEME',
<del> type: TypeError,
<del> message: 'The URL must be of scheme file'
<del>}));
<ide>
<del>// pct-encoded characters in the path will be decoded and checked
<del>fs.readFile(new URL('file:///c:/tmp/%00test'), common.mustCall((err) => {
<del> common.expectsError(
<del> () => {
<del> throw err;
<del> },
<del> {
<del> code: 'ERR_INVALID_ARG_TYPE',
<del> type: Error
<del> });
<del>}));
<add>common.expectsError(
<add> () => {
<add> fs.readFile(httpUrl, common.mustNotCall());
<add> },
<add> {
<add> code: 'ERR_INVALID_URL_SCHEME',
<add> type: TypeError,
<add> message: 'The URL must be of scheme file'
<add> });
<ide>
<add>// pct-encoded characters in the path will be decoded and checked
<ide> if (common.isWindows) {
<ide> // encoded back and forward slashes are not permitted on windows
<ide> ['%2f', '%2F', '%5c', '%5C'].forEach((i) => {
<del> fs.readFile(new URL(`file:///c:/tmp/${i}`), common.expectsError({
<del> code: 'ERR_INVALID_FILE_URL_PATH',
<del> type: TypeError,
<del> message: 'File URL path must not include encoded \\ or / characters'
<del> }));
<add> common.expectsError(
<add> () => {
<add> fs.readFile(new URL(`file:///c:/tmp/${i}`), common.mustNotCall());
<add> },
<add> {
<add> code: 'ERR_INVALID_FILE_URL_PATH',
<add> type: TypeError,
<add> message: 'File URL path must not include encoded \\ or / characters'
<add> }
<add> );
<ide> });
<add> common.expectsError(
<add> () => {
<add> fs.readFile(new URL('file:///c:/tmp/%00test'), common.mustNotCall());
<add> },
<add> {
<add> code: 'ERR_INVALID_ARG_VALUE',
<add> type: Error,
<add> message: 'The argument \'path\' must be a string or Uint8Array without ' +
<add> 'null bytes. Received \'c:/tmp/\\u0000test\''
<add> }
<add> );
<ide> } else {
<ide> // encoded forward slashes are not permitted on other platforms
<ide> ['%2f', '%2F'].forEach((i) => {
<del> fs.readFile(new URL(`file:///c:/tmp/${i}`), common.expectsError({
<del> code: 'ERR_INVALID_FILE_URL_PATH',
<del> type: TypeError,
<del> message: 'File URL path must not include encoded / characters'
<del> }));
<add> common.expectsError(
<add> () => {
<add> fs.readFile(new URL(`file:///c:/tmp/${i}`), common.mustNotCall());
<add> },
<add> {
<add> code: 'ERR_INVALID_FILE_URL_PATH',
<add> type: TypeError,
<add> message: 'File URL path must not include encoded / characters'
<add> });
<ide> });
<del>
<del> fs.readFile(new URL('file://hostname/a/b/c'), common.expectsError({
<del> code: 'ERR_INVALID_FILE_URL_HOST',
<del> type: TypeError,
<del> message: `File URL host must be "localhost" or empty on ${os.platform()}`
<del> }));
<add> common.expectsError(
<add> () => {
<add> fs.readFile(new URL('file://hostname/a/b/c'), common.mustNotCall());
<add> },
<add> {
<add> code: 'ERR_INVALID_FILE_URL_HOST',
<add> type: TypeError,
<add> message: `File URL host must be "localhost" or empty on ${os.platform()}`
<add> }
<add> );
<add> common.expectsError(
<add> () => {
<add> fs.readFile(new URL('file:///tmp/%00test'), common.mustNotCall());
<add> },
<add> {
<add> code: 'ERR_INVALID_ARG_VALUE',
<add> type: Error,
<add> message: 'The argument \'path\' must be a string or Uint8Array without ' +
<add> 'null bytes. Received \'/tmp/\\u0000test\''
<add> }
<add> );
<ide> }
<ide><path>test/parallel/test-repl-persistent-history.js
<ide> const CLEAR = { ctrl: true, name: 'u' };
<ide> // File paths
<ide> const historyFixturePath = fixtures.path('.node_repl_history');
<ide> const historyPath = path.join(tmpdir.path, '.fixture_copy_repl_history');
<del>const historyPathFail = path.join(tmpdir.path, '.node_repl\u0000_history');
<add>const historyPathFail = fixtures.path('nonexistent_folder', 'filename');
<ide> const oldHistoryPathObj = fixtures.path('old-repl-history-file-obj.json');
<ide> const oldHistoryPathFaulty = fixtures.path('old-repl-history-file-faulty.json');
<ide> const oldHistoryPath = fixtures.path('old-repl-history-file.json'); | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.