content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Go | Go | migrate some ipcmode tests to integration | e0403604e26868b1546a766ab0b40b6cb1677ee6 | <ide><path>integration-cli/docker_api_ipcmode_test.go
<ide> func testIpcCheckDevExists(mm string) (bool, error) {
<ide> return false, s.Err()
<ide> }
<ide>
<del>// testIpcNonePrivateShareable is a helper function to test "none",
<del>// "private" and "shareable" modes.
<del>func testIpcNonePrivateShareable(c *check.C, mode string, mustBeMounted bool, mustBeShared bool) {
<del> cfg := container.Config{
<del> Image: "busybox",
<del> Cmd: []string{"top"},
<del> }
<del> hostCfg := container.HostConfig{
<del> IpcMode: container.IpcMode(mode),
<del> }
<del> ctx := context.Background()
<del>
<del> client := testEnv.APIClient()
<del>
<del> resp, err := client.ContainerCreate(ctx, &cfg, &hostCfg, nil, "")
<del> c.Assert(err, checker.IsNil)
<del> c.Assert(len(resp.Warnings), checker.Equals, 0)
<del>
<del> err = client.ContainerStart(ctx, resp.ID, types.ContainerStartOptions{})
<del> c.Assert(err, checker.IsNil)
<del>
<del> // get major:minor pair for /dev/shm from container's /proc/self/mountinfo
<del> cmd := "awk '($5 == \"/dev/shm\") {printf $3}' /proc/self/mountinfo"
<del> mm := cli.DockerCmd(c, "exec", "-i", resp.ID, "sh", "-c", cmd).Combined()
<del> if !mustBeMounted {
<del> c.Assert(mm, checker.Equals, "")
<del> // no more checks to perform
<del> return
<del> }
<del> c.Assert(mm, checker.Matches, "^[0-9]+:[0-9]+$")
<del>
<del> shared, err := testIpcCheckDevExists(mm)
<del> c.Assert(err, checker.IsNil)
<del> c.Logf("[testIpcPrivateShareable] ipcmode: %v, ipcdev: %v, shared: %v, mustBeShared: %v\n", mode, mm, shared, mustBeShared)
<del> c.Assert(shared, checker.Equals, mustBeShared)
<del>}
<del>
<del>/* TestAPIIpcModeNone checks the container "none" IPC mode
<del> * (--ipc none) works as expected. It makes sure there is no
<del> * /dev/shm mount inside the container.
<del> */
<del>func (s *DockerSuite) TestAPIIpcModeNone(c *check.C) {
<del> testRequires(c, DaemonIsLinux, MinimumAPIVersion("1.32"))
<del> testIpcNonePrivateShareable(c, "none", false, false)
<del>}
<del>
<del>/* TestAPIIpcModePrivate checks the container private IPC mode
<del> * (--ipc private) works as expected. It gets the minor:major pair
<del> * of /dev/shm mount from the container, and makes sure there is no
<del> * such pair on the host.
<del> */
<del>func (s *DockerSuite) TestAPIIpcModePrivate(c *check.C) {
<del> testRequires(c, DaemonIsLinux, SameHostDaemon)
<del> testIpcNonePrivateShareable(c, "private", true, false)
<del>}
<del>
<del>/* TestAPIIpcModeShareable checks the container shareable IPC mode
<del> * (--ipc shareable) works as expected. It gets the minor:major pair
<del> * of /dev/shm mount from the container, and makes sure such pair
<del> * also exists on the host.
<del> */
<del>func (s *DockerSuite) TestAPIIpcModeShareable(c *check.C) {
<del> testRequires(c, DaemonIsLinux, SameHostDaemon)
<del> testIpcNonePrivateShareable(c, "shareable", true, true)
<del>}
<del>
<ide> // testIpcContainer is a helper function to test --ipc container:NNN mode in various scenarios
<ide> func testIpcContainer(s *DockerSuite, c *check.C, donorMode string, mustWork bool) {
<ide> cfg := container.Config{
<ide><path>integration/container/ipcmode_test.go
<add>package container // import "github.com/docker/docker/integration/container"
<add>
<add>import (
<add> "bufio"
<add> "context"
<add> "os"
<add> "regexp"
<add> "strings"
<add> "testing"
<add>
<add> "github.com/docker/docker/api/types"
<add> containertypes "github.com/docker/docker/api/types/container"
<add> "github.com/docker/docker/integration/internal/container"
<add> "github.com/docker/docker/internal/test/request"
<add> "gotest.tools/assert"
<add> is "gotest.tools/assert/cmp"
<add> "gotest.tools/skip"
<add>)
<add>
<add>// testIpcCheckDevExists checks whether a given mount (identified by its
<add>// major:minor pair from /proc/self/mountinfo) exists on the host system.
<add>//
<add>// The format of /proc/self/mountinfo is like:
<add>//
<add>// 29 23 0:24 / /dev/shm rw,nosuid,nodev shared:4 - tmpfs tmpfs rw
<add>// ^^^^\
<add>// - this is the minor:major we look for
<add>func testIpcCheckDevExists(mm string) (bool, error) {
<add> f, err := os.Open("/proc/self/mountinfo")
<add> if err != nil {
<add> return false, err
<add> }
<add> defer f.Close()
<add>
<add> s := bufio.NewScanner(f)
<add> for s.Scan() {
<add> fields := strings.Fields(s.Text())
<add> if len(fields) < 7 {
<add> continue
<add> }
<add> if fields[2] == mm {
<add> return true, nil
<add> }
<add> }
<add>
<add> return false, s.Err()
<add>}
<add>
<add>// testIpcNonePrivateShareable is a helper function to test "none",
<add>// "private" and "shareable" modes.
<add>func testIpcNonePrivateShareable(t *testing.T, mode string, mustBeMounted bool, mustBeShared bool) {
<add> defer setupTest(t)()
<add>
<add> cfg := containertypes.Config{
<add> Image: "busybox",
<add> Cmd: []string{"top"},
<add> }
<add> hostCfg := containertypes.HostConfig{
<add> IpcMode: containertypes.IpcMode(mode),
<add> }
<add> client := request.NewAPIClient(t)
<add> ctx := context.Background()
<add>
<add> resp, err := client.ContainerCreate(ctx, &cfg, &hostCfg, nil, "")
<add> assert.NilError(t, err)
<add> assert.Check(t, is.Equal(len(resp.Warnings), 0))
<add>
<add> err = client.ContainerStart(ctx, resp.ID, types.ContainerStartOptions{})
<add> assert.NilError(t, err)
<add>
<add> // get major:minor pair for /dev/shm from container's /proc/self/mountinfo
<add> cmd := "awk '($5 == \"/dev/shm\") {printf $3}' /proc/self/mountinfo"
<add> result, err := container.Exec(ctx, client, resp.ID, []string{"sh", "-c", cmd})
<add> assert.NilError(t, err)
<add> mm := result.Combined()
<add> if !mustBeMounted {
<add> assert.Check(t, is.Equal(mm, ""))
<add> // no more checks to perform
<add> return
<add> }
<add> assert.Check(t, is.Equal(true, regexp.MustCompile("^[0-9]+:[0-9]+$").MatchString(mm)))
<add>
<add> shared, err := testIpcCheckDevExists(mm)
<add> assert.NilError(t, err)
<add> t.Logf("[testIpcPrivateShareable] ipcmode: %v, ipcdev: %v, shared: %v, mustBeShared: %v\n", mode, mm, shared, mustBeShared)
<add> assert.Check(t, is.Equal(shared, mustBeShared))
<add>}
<add>
<add>// TestIpcModeNone checks the container "none" IPC mode
<add>// (--ipc none) works as expected. It makes sure there is no
<add>// /dev/shm mount inside the container.
<add>func TestIpcModeNone(t *testing.T) {
<add> skip.If(t, testEnv.DaemonInfo.OSType != "linux" || testEnv.IsRemoteDaemon())
<add>
<add> testIpcNonePrivateShareable(t, "none", false, false)
<add>}
<add>
<add>// TestAPIIpcModePrivate checks the container private IPC mode
<add>// (--ipc private) works as expected. It gets the minor:major pair
<add>// of /dev/shm mount from the container, and makes sure there is no
<add>// such pair on the host.
<add>func TestIpcModePrivate(t *testing.T) {
<add> skip.If(t, testEnv.DaemonInfo.OSType != "linux" || testEnv.IsRemoteDaemon())
<add>
<add> testIpcNonePrivateShareable(t, "private", true, false)
<add>}
<add>
<add>// TestAPIIpcModeShareable checks the container shareable IPC mode
<add>// (--ipc shareable) works as expected. It gets the minor:major pair
<add>// of /dev/shm mount from the container, and makes sure such pair
<add>// also exists on the host.
<add>func TestIpcModeShareable(t *testing.T) {
<add> skip.If(t, testEnv.DaemonInfo.OSType != "linux" || testEnv.IsRemoteDaemon())
<add>
<add> testIpcNonePrivateShareable(t, "shareable", true, true)
<add>} | 2 |
Ruby | Ruby | handle failures in temporary_install better | 4c0d2c2bfb99d91be404e9c05e3b569ffa7dcc39 | <ide><path>Library/Homebrew/test/test_formula_install.rb
<ide> def teardown
<ide> end
<ide>
<ide> def temporary_install f
<del> # Brew and install the given formula
<add> f.prefix.mkpath
<add> keg = Keg.new(f.prefix)
<add>
<ide> shutup do
<ide> f.brew { f.install }
<ide> end
<ide>
<del> # Allow the test to do some processing
<del> yield
<add> begin
<add> yield
<add> ensure
<add> keg.unlink
<add> keg.uninstall
<add> end
<ide>
<del> # Remove the brewed formula and double check
<del> # that it did get removed. This lets multiple
<del> # tests use the same formula name without
<del> # stepping on each other.
<del> keg=Keg.new f.prefix
<del> keg.unlink
<del> keg.uninstall
<ide> assert !keg.exist?
<ide> assert !f.installed?
<ide> end | 1 |
Javascript | Javascript | add fallback for undefined cpus | 2b401e33de81428dc5f6dfc60343e65ee5167886 | <ide><path>lib/os.js
<ide> function loadavg() {
<ide> }
<ide>
<ide> function cpus() {
<del> const data = getCPUs();
<add> // [] is a bugfix for a regression introduced in 51cea61
<add> const data = getCPUs() || [];
<ide> const result = [];
<ide> for (var i = 0; i < data.length; i += 7) {
<ide> result.push({ | 1 |
Javascript | Javascript | remove buggy unstable_deferredupdates() | 2967ebdbeabde4aab37b45946b891170c7670164 | <ide><path>fixtures/unstable-async/suspense/src/components/App.js
<ide> import React, {Placeholder, PureComponent} from 'react';
<del>import {unstable_deferredUpdates} from 'react-dom';
<ide> import {createResource} from 'simple-cache-provider';
<ide> import {cache} from '../cache';
<ide> import Spinner from './Spinner';
<ide> export default class App extends PureComponent {
<ide> this.setState({
<ide> currentId: id,
<ide> });
<del> unstable_deferredUpdates(() => {
<add> requestIdleCallback(() => {
<ide> this.setState({
<ide> showDetail: true,
<ide> });
<ide><path>fixtures/unstable-async/time-slicing/src/index.js
<del>import React, {PureComponent, unstable_AsyncMode} from 'react';
<del>import {flushSync, render, unstable_deferredUpdates} from 'react-dom';
<add>import React, {PureComponent} from 'react';
<add>import {flushSync, render} from 'react-dom';
<ide> import _ from 'lodash';
<ide> import Charts from './Charts';
<ide> import Clock from './Clock';
<ide> class App extends PureComponent {
<ide> return;
<ide> }
<ide> if (this.state.strategy !== 'async') {
<del> this.setState(state => ({
<del> showDemo: !state.showDemo,
<del> }));
<add> flushSync(() => {
<add> this.setState(state => ({
<add> showDemo: !state.showDemo,
<add> }));
<add> });
<ide> return;
<ide> }
<ide> if (this._ignoreClick) {
<ide> return;
<ide> }
<ide> this._ignoreClick = true;
<ide>
<del> // TODO: needing setTimeout here seems like a React bug.
<del> setTimeout(() => {
<del> unstable_deferredUpdates(() => {
<del> this.setState({showDemo: true}, () => {
<del> this._ignoreClick = false;
<del> });
<add> requestIdleCallback(() => {
<add> this.setState({showDemo: true}, () => {
<add> this._ignoreClick = false;
<ide> });
<ide> });
<ide> };
<ide> class App extends PureComponent {
<ide> this.debouncedHandleChange(value);
<ide> break;
<ide> case 'async':
<del> // TODO: needing setTimeout here seems like a React bug.
<del> setTimeout(() => {
<del> unstable_deferredUpdates(() => {
<del> this.setState({value});
<del> });
<add> requestIdleCallback(() => {
<add> this.setState({value});
<ide> });
<ide> break;
<ide> default:
<ide> class App extends PureComponent {
<ide> };
<ide>
<ide> render() {
<del> const Wrapper =
<del> this.state.strategy === 'async' ? unstable_AsyncMode : 'div';
<ide> const {showClock} = this.state;
<ide> const data = this.getStreamData(this.state.value);
<ide> return (
<ide> class App extends PureComponent {
<ide> defaultValue={this.state.input}
<ide> onChange={this.handleChange}
<ide> />
<del> <Wrapper>
<del> <div className="demo" onClick={this.handleChartClick}>
<del> {this.state.showDemo && (
<del> <Charts data={data} onClick={this.handleChartClick} />
<del> )}
<del> <div style={{display: showClock ? 'block' : 'none'}}>
<del> <Clock />
<del> </div>
<add> <div className="demo" onClick={this.handleChartClick}>
<add> {this.state.showDemo && (
<add> <Charts data={data} onClick={this.handleChartClick} />
<add> )}
<add> <div style={{display: showClock ? 'block' : 'none'}}>
<add> <Clock />
<ide> </div>
<del> </Wrapper>
<add> </div>
<ide> </div>
<ide> );
<ide> }
<ide> }
<ide>
<ide> const container = document.getElementById('root');
<del>render(<App />, container);
<add>render(
<add> <React.unstable_AsyncMode>
<add> <App />
<add> </React.unstable_AsyncMode>,
<add> container
<add>);
<ide><path>packages/react-dom/src/__tests__/ReactDOMFiberAsync-test.internal.js
<ide> let ReactDOM;
<ide>
<ide> const AsyncMode = React.unstable_AsyncMode;
<ide>
<add>const setUntrackedInputValue = Object.getOwnPropertyDescriptor(
<add> HTMLInputElement.prototype,
<add> 'value',
<add>).set;
<add>
<ide> describe('ReactDOMFiberAsync', () => {
<ide> let container;
<ide>
<ide> describe('ReactDOMFiberAsync', () => {
<ide> jest.resetModules();
<ide> container = document.createElement('div');
<ide> ReactDOM = require('react-dom');
<add>
<add> document.body.appendChild(container);
<add> });
<add>
<add> afterEach(() => {
<add> document.body.removeChild(container);
<ide> });
<ide>
<ide> it('renders synchronously by default', () => {
<ide> describe('ReactDOMFiberAsync', () => {
<ide> expect(ops).toEqual(['Hi', 'Bye']);
<ide> });
<ide>
<add> it('does not perform deferred updates synchronously', () => {
<add> let inputRef = React.createRef();
<add> let asyncValueRef = React.createRef();
<add> let syncValueRef = React.createRef();
<add>
<add> class Counter extends React.Component {
<add> state = {asyncValue: '', syncValue: ''};
<add>
<add> handleChange = e => {
<add> const nextValue = e.target.value;
<add> requestIdleCallback(() => {
<add> this.setState({
<add> asyncValue: nextValue,
<add> });
<add> });
<add> this.setState({
<add> syncValue: nextValue,
<add> });
<add> };
<add>
<add> render() {
<add> return (
<add> <div>
<add> <input
<add> ref={inputRef}
<add> onChange={this.handleChange}
<add> defaultValue=""
<add> />
<add> <p ref={asyncValueRef}>{this.state.asyncValue}</p>
<add> <p ref={syncValueRef}>{this.state.syncValue}</p>
<add> </div>
<add> );
<add> }
<add> }
<add> ReactDOM.render(<Counter />, container);
<add> expect(asyncValueRef.current.textContent).toBe('');
<add> expect(syncValueRef.current.textContent).toBe('');
<add>
<add> setUntrackedInputValue.call(inputRef.current, 'hello');
<add> inputRef.current.dispatchEvent(new MouseEvent('input', {bubbles: true}));
<add> // Should only flush non-deferred update.
<add> expect(asyncValueRef.current.textContent).toBe('');
<add> expect(syncValueRef.current.textContent).toBe('hello');
<add>
<add> // Should flush both updates now.
<add> jest.runAllTimers();
<add> expect(asyncValueRef.current.textContent).toBe('hello');
<add> expect(syncValueRef.current.textContent).toBe('hello');
<add> });
<add>
<ide> describe('with feature flag disabled', () => {
<ide> beforeEach(() => {
<ide> jest.resetModules();
<ide> ReactFeatureFlags = require('shared/ReactFeatureFlags');
<del> container = document.createElement('div');
<ide> ReactDOM = require('react-dom');
<ide> });
<ide>
<ide> describe('ReactDOMFiberAsync', () => {
<ide> beforeEach(() => {
<ide> jest.resetModules();
<ide> ReactFeatureFlags = require('shared/ReactFeatureFlags');
<del> container = document.createElement('div');
<ide> ReactFeatureFlags.debugRenderPhaseSideEffectsForStrictMode = false;
<ide> ReactDOM = require('react-dom');
<ide> });
<ide><path>packages/react-dom/src/client/ReactDOM.js
<ide> const ReactDOM: Object = {
<ide>
<ide> unstable_batchedUpdates: DOMRenderer.batchedUpdates,
<ide>
<del> unstable_deferredUpdates: DOMRenderer.deferredUpdates,
<del>
<ide> unstable_interactiveUpdates: DOMRenderer.interactiveUpdates,
<ide>
<ide> flushSync: DOMRenderer.flushSync,
<ide><path>packages/react-reconciler/src/ReactFiberScheduler.js
<ide> function scheduleWork(fiber: Fiber, expirationTime: ExpirationTime) {
<ide> function deferredUpdates<A>(fn: () => A): A {
<ide> const currentTime = requestCurrentTime();
<ide> const previousExpirationContext = expirationContext;
<add> const previousIsBatchingInteractiveUpdates = isBatchingInteractiveUpdates;
<ide> expirationContext = computeAsyncExpiration(currentTime);
<add> isBatchingInteractiveUpdates = false;
<ide> try {
<ide> return fn();
<ide> } finally {
<ide> expirationContext = previousExpirationContext;
<add> isBatchingInteractiveUpdates = previousIsBatchingInteractiveUpdates;
<ide> }
<ide> }
<ide> | 5 |
PHP | PHP | fix path handling | 0602fe003c503c083d1b04510ee9dead49cb2dd3 | <ide><path>src/Illuminate/Exception/ExceptionServiceProvider.php
<ide> protected function registerWhoopsHandler()
<ide> }
<ide> else
<ide> {
<del> $this->app['whoops.handler'] = function()
<del> {
<del> with($handler = new PrettyPageHandler)->setResourcesPath(__DIR__.'/resources');
<del>
<del> return $handler;
<del> };
<add> $this->registerPrettyWhoopsHandler();
<add> }
<add> }
<add>
<add> /**
<add> * Register the "pretty" Whoops handler.
<add> *
<add> * @return void
<add> */
<add> protected function registerPrettyWhoopsHandler()
<add> {
<add> $this->app['whoops.handler'] = function()
<add> {
<add> $handler = new PrettyPageHandler;
<add>
<add> if ( ! is_null($path = $this->resourcePath())) $handler->setResourcesPath($path);
<add>
<add> return $handler;
<add> };
<add> }
<add>
<add> /**
<add> * Get the resource path for Whoops.
<add> *
<add> * @return string
<add> */
<add> protected function resourcePath()
<add> {
<add> if (is_dir($path = $this->app['path.base'].'/vendor/laravel/framework/src/Exception/resources'))
<add> {
<add> return $path;
<ide> }
<ide> }
<ide> | 1 |
Text | Text | describe solution to centralized routing | d2276179bea211fa9ad9b77b46df26271f53c768 | <ide><path>readme.md
<ide> So, you could only use `pathname`, `query` and `asPath` fields of the `context`
<ide>
<ide> > Basically, you won't be able to render HTML content dynamically as we pre-build HTML files. If you need that, you need run your app with `next start`.
<ide>
<del>
<ide> ## Recipes
<ide>
<ide> - [Setting up 301 redirects](https://www.raygesualdo.com/posts/301-redirects-with-nextjs/)
<ide> Yes! Here's an example with [Apollo](./examples/with-apollo).
<ide> Yes! Here's an [example](./examples/with-redux)
<ide> </details>
<ide>
<add><details>
<add><summary>Why aren't routes I have for my static export accessible in the development server?</summary>
<add>
<add>This is a known issue with the architecture of Next.js. Until a solution is built into the framework, take a look at [this example solution](https://github.com/zeit/next.js/wiki/Centralizing-Routing) to centralize your routing.
<add></details>
<add>
<ide> <details>
<ide> <summary>Can I use Next with my favorite Javascript library or toolkit?</summary>
<ide> | 1 |
PHP | PHP | move the normalizing logic to the public function | e118155b92832c6ed742b0a08aff2502c4726939 | <ide><path>src/Network/Request.php
<ide> public function __isset($name) {
<ide> * @return bool Whether or not the request is the type you are checking.
<ide> */
<ide> public function is($type) {
<add> if (is_array($type)) {
<add> $result = array_map(array($this, 'is'), $type);
<add> return count(array_filter($result)) > 0;
<add> }
<add>
<add> $type = strtolower($type);
<add> if (!isset(static::$_detectors[$type])) {
<add> return false;
<add> }
<add>
<ide> if (!isset($this->_isResults[$type])) {
<ide> $this->_isResults[$type] = $this->_is($type);
<ide> }
<ide> public function is($type) {
<ide> * @return bool Whether or not the request is the type you are checking.
<ide> */
<ide> protected function _is($type) {
<del> if (is_array($type)) {
<del> $result = array_map(array($this, 'is'), $type);
<del> return count(array_filter($result)) > 0;
<del> }
<del> $type = strtolower($type);
<del> if (!isset(static::$_detectors[$type])) {
<del> return false;
<del> }
<ide> $detect = static::$_detectors[$type];
<ide> if (is_callable($detect)) {
<ide> return call_user_func($detect, $this); | 1 |
Javascript | Javascript | allow getinitialstate() for mixins | adb666e67fa2a4a525a641f6fa7a239c3dd1cd1c | <ide><path>src/core/ReactCompositeComponent.js
<ide> var invariant = require('invariant');
<ide> var keyMirror = require('keyMirror');
<ide> var merge = require('merge');
<ide> var mixInto = require('mixInto');
<add>var objMap = require('objMap');
<ide>
<ide> /**
<ide> * Policies that describe methods in `ReactCompositeComponentInterface`.
<ide> var SpecPolicy = keyMirror({
<ide> /**
<ide> * These methods are overriding the base ReactCompositeComponent class.
<ide> */
<del> OVERRIDE_BASE: null
<add> OVERRIDE_BASE: null,
<add> /**
<add> * These methods are similar to DEFINE_MANY, except we assume they return
<add> * objects. We try to merge the keys of the return values of all the mixed in
<add> * functions. If there is a key conflict we throw.
<add> */
<add> DEFINE_MANY_MERGED: null
<ide> });
<ide>
<ide> /**
<ide> var ReactCompositeComponentInterface = {
<ide> * @return {object}
<ide> * @optional
<ide> */
<del> getInitialState: SpecPolicy.DEFINE_ONCE,
<add> getInitialState: SpecPolicy.DEFINE_MANY_MERGED,
<ide>
<ide> /**
<ide> * Uses props from `this.props` and state from `this.state` to render the
<ide> function validateMethodOverride(proto, name) {
<ide> // Disallow defining methods more than once unless explicitly allowed.
<ide> if (proto.hasOwnProperty(name)) {
<ide> invariant(
<del> specPolicy === SpecPolicy.DEFINE_MANY,
<add> specPolicy === SpecPolicy.DEFINE_MANY ||
<add> specPolicy === SpecPolicy.DEFINE_MANY_MERGED,
<ide> 'ReactCompositeComponentInterface: You are attempting to define ' +
<ide> '`%s` on your component more than once. This conflict may be due ' +
<ide> 'to a mixin.',
<ide> function mixSpecIntoComponent(Constructor, spec) {
<ide> if (isInherited) {
<ide> // For methods which are defined more than once, call the existing
<ide> // methods before calling the new property.
<del> proto[name] = createChainedFunction(proto[name], property);
<add> if (ReactCompositeComponentInterface[name] ===
<add> SpecPolicy.DEFINE_MANY_MERGED) {
<add> proto[name] = createMergedResultFunction(proto[name], property);
<add> } else {
<add> proto[name] = createChainedFunction(proto[name], property);
<add> }
<ide> } else {
<ide> proto[name] = property;
<ide> }
<ide> function mixSpecIntoComponent(Constructor, spec) {
<ide> }
<ide> }
<ide>
<add>/**
<add> * Merge two objects, but throw if both contain the same key.
<add> *
<add> * @param {object} one The first object, which is mutated.
<add> * @param {object} two The second object
<add> * @return {object} one after it has been mutated to contain everything in two.
<add> */
<add>function mergeObjectsWithNoDuplicateKeys(one, two) {
<add> invariant(
<add> one && two && typeof one === 'object' && typeof two === 'object',
<add> 'mergeObjectsWithNoDuplicateKeys(): Cannot merge non-objects'
<add> );
<add>
<add> objMap(two, function(value, key) {
<add> invariant(
<add> one[key] === undefined,
<add> 'mergeObjectsWithNoDuplicateKeys(): ' +
<add> 'Tried to merge two objects with the same key: %s',
<add> key
<add> );
<add> one[key] = value;
<add> });
<add> return one;
<add>}
<add>
<add>/**
<add> * Creates a function that invokes two functions and merges their return values.
<add> *
<add> * @param {function} one Function to invoke first.
<add> * @param {function} two Function to invoke second.
<add> * @return {function} Function that invokes the two argument functions.
<add> * @private
<add> */
<add>function createMergedResultFunction(one, two) {
<add> return function mergedResult() {
<add> return mergeObjectsWithNoDuplicateKeys(
<add> one.apply(this, arguments),
<add> two.apply(this, arguments)
<add> );
<add> };
<add>}
<add>
<ide> /**
<ide> * Creates a function that invokes two functions and ignores their return vales.
<ide> *
<ide><path>src/core/__tests__/ReactCompositeComponent-test.js
<ide> describe('ReactCompositeComponent', function() {
<ide> expect(ReactCurrentOwner.current).toBe(null);
<ide> });
<ide>
<add> it('should support mixins with getInitialState()', function() {
<add> var Mixin = {
<add> getInitialState: function() {
<add> return {mixin: true};
<add> }
<add> };
<add> var Component = React.createClass({
<add> mixins: [Mixin],
<add> getInitialState: function() {
<add> return {component: true};
<add> },
<add> render: function() {
<add> return <span />;
<add> }
<add> });
<add> var instance = <Component />;
<add> ReactTestUtils.renderIntoDocument(instance);
<add> expect(instance.state.component).toBe(true);
<add> expect(instance.state.mixin).toBe(true);
<add> });
<add>
<add> it('should throw with conflicting getInitialState() methods', function() {
<add> var Mixin = {
<add> getInitialState: function() {
<add> return {x: true};
<add> }
<add> };
<add> var Component = React.createClass({
<add> mixins: [Mixin],
<add> getInitialState: function() {
<add> return {x: true};
<add> },
<add> render: function() {
<add> return <span />;
<add> }
<add> });
<add> var instance = <Component />;
<add> expect(function() {
<add> ReactTestUtils.renderIntoDocument(instance);
<add> }).toThrow(
<add> 'Invariant Violation: mergeObjectsWithNoDuplicateKeys(): ' +
<add> 'Tried to merge two objects with the same key: x'
<add> );
<add> });
<add>
<add> it('should throw with bad getInitialState() return values', function() {
<add> var Mixin = {
<add> getInitialState: function() {
<add> return null;
<add> }
<add> };
<add> var Component = React.createClass({
<add> mixins: [Mixin],
<add> getInitialState: function() {
<add> return {x: true};
<add> },
<add> render: function() {
<add> return <span />;
<add> }
<add> });
<add> var instance = <Component />;
<add> expect(function() {
<add> ReactTestUtils.renderIntoDocument(instance);
<add> }).toThrow(
<add> 'Invariant Violation: mergeObjectsWithNoDuplicateKeys(): ' +
<add> 'Cannot merge non-objects'
<add> );
<add> });
<add>
<ide> }); | 2 |
Text | Text | add info about environment variables and replit | 9a637e2667ad067f0fa8898d9e2aeed34dc6ed86 | <ide><path>curriculum/challenges/english/05-back-end-development-and-apis/mongodb-and-mongoose/install-and-set-up-mongoose.md
<ide> Follow <a href='https://www.freecodecamp.org/news/get-started-with-mongodb-atlas
<ide>
<ide> # --instructions--
<ide>
<del>Add `mongodb@~3.6.0` and `mongoose@~5.4.0` to the project’s `package.json`. Then, require mongoose as `mongoose` in `myApp.js`. Create a `.env` file and add a `MONGO_URI` variable to it. Its value should be your MongoDB Atlas database URI. Be sure to surround the URI with single or double quotes, and remember that you can't use spaces around the `=` in environment variables. For example, `MONGO_URI='VALUE'`. When you are done, connect to the database using the following syntax:
<add>Add `mongodb@~3.6.0` and `mongoose@~5.4.0` to the project’s `package.json`. Then, require mongoose as `mongoose` in `myApp.js`. Create a `.env` file and add a `MONGO_URI` variable to it. Its value should be your MongoDB Atlas database URI. Be sure to surround the URI with single or double quotes, and remember that you can't use spaces around the `=` in environment variables. For example, `MONGO_URI='VALUE'`.
<add>
<add>**Note:** If you are using Replit, you cannot create a `.env` file. Instead, use the built-in <dfn>SECRETS</dfn> tab to add the variable. <em>Do not</em> surround the values with quotes when using the <em>SECRETS</em> tab.
<add>
<add>When you are done, connect to the database using the following syntax:
<ide>
<ide> ```js
<ide> mongoose.connect(<Your URI>, { useNewUrlParser: true, useUnifiedTopology: true }); | 1 |
Mixed | Ruby | load silent failure on sql error | 09a90bb6a06b1dafc9881651f585e8b40dda4227 | <ide><path>activerecord/CHANGELOG.md
<ide>
<add>* PostgreSQL: Fix db:structure:load silent failure on SQL error
<add>
<add> The command line flag "-v ON_ERROR_STOP=1" should be used
<add> when invoking psql to make sure errors are not suppressed.
<add>
<add> Example:
<add>
<add> psql -v ON_ERROR_STOP=1 -q -f awesome-file.sql my-app-db
<add>
<add> Fixes #23818.
<add>
<add> *Ralin Chimev*
<add>
<add>
<ide> Please check [5-0-stable](https://github.com/rails/rails/blob/5-0-stable/activerecord/CHANGELOG.md) for previous changes.
<ide><path>activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
<ide> module ActiveRecord
<ide> module Tasks # :nodoc:
<ide> class PostgreSQLDatabaseTasks # :nodoc:
<ide> DEFAULT_ENCODING = ENV['CHARSET'] || 'utf8'
<add> ON_ERROR_STOP_1 = 'ON_ERROR_STOP=1'.freeze
<ide>
<ide> delegate :connection, :establish_connection, :clear_active_connections!,
<ide> to: ActiveRecord::Base
<ide> def structure_dump(filename)
<ide>
<ide> def structure_load(filename)
<ide> set_psql_env
<del> args = [ '-q', '-f', filename, configuration['database'] ]
<add> args = [ '-v', ON_ERROR_STOP_1, '-q', '-f', filename, configuration['database'] ]
<ide> run_cmd('psql', args, 'loading' )
<ide> end
<ide>
<ide><path>activerecord/test/cases/tasks/postgresql_rake_test.rb
<ide> def setup
<ide>
<ide> def test_structure_load
<ide> filename = "awesome-file.sql"
<del> Kernel.expects(:system).with('psql', '-q', '-f', filename, @configuration['database']).returns(true)
<add> Kernel.expects(:system).with('psql', '-v', 'ON_ERROR_STOP=1', '-q', '-f', filename, @configuration['database']).returns(true)
<ide>
<ide> ActiveRecord::Tasks::DatabaseTasks.structure_load(@configuration, filename)
<ide> end
<ide>
<ide> def test_structure_load_accepts_path_with_spaces
<ide> filename = "awesome file.sql"
<del> Kernel.expects(:system).with('psql', '-q', '-f', filename, @configuration['database']).returns(true)
<add> Kernel.expects(:system).with('psql', '-v', 'ON_ERROR_STOP=1', '-q', '-f', filename, @configuration['database']).returns(true)
<ide>
<ide> ActiveRecord::Tasks::DatabaseTasks.structure_load(@configuration, filename)
<ide> end | 3 |
PHP | PHP | fix method annotation as per review | 6d2f94689af2a8f0f174ce3fd214debd015a8025 | <ide><path>src/Console/Shell.php
<ide> *
<ide> * Is the equivalent of Cake\Controller\Controller on the command line.
<ide> *
<del> * @method int|bool main()
<add> * @method int|bool|null main(...$args)
<ide> */
<ide> class Shell
<ide> { | 1 |
PHP | PHP | maintain alphabetical order | d64b5a52af6b7d6eb338b12510611e4c043b1afb | <ide><path>resources/lang/en/validation.php
<ide> 'string' => 'The :attribute must be :size characters.',
<ide> 'array' => 'The :attribute must contain :size items.',
<ide> ],
<del> 'timezone' => 'The :attribute must be a valid zone.',
<ide> 'string' => 'The :attribute must be a string.',
<add> 'timezone' => 'The :attribute must be a valid zone.',
<ide> 'unique' => 'The :attribute has already been taken.',
<ide> 'url' => 'The :attribute format is invalid.',
<ide> | 1 |
Text | Text | revise stability section of values doc | c31ef9827e5b2edf44734bc2eac6ea0f7989e914 | <ide><path>doc/guides/technical-values.md
<ide> with Node.js. Some key elements of this include:
<ide> * Enabling/supporting external packages to ensure overall developer experience
<ide>
<ide> ### 2 - Stability
<del>Whenever possible, we seek to insure that currently-working code continues to
<del>work. We seek to keep the trust of developers and end-users. Therefore, we value
<del>stability.
<add>Whenever possible, we seek to ensure that working code continues to work. To
<add>keep the trust of developers and users, we value stability.
<ide> Some key elements of this include:
<ide> * Backwards compatibility
<ide> * Stable releases on a predictable schedule | 1 |
Ruby | Ruby | move inheritableoptions into activesupport | 664090348154ccbf1274a13bbc3d3c37ba35bc7d | <ide><path>actionpack/lib/action_controller/metal.rb
<ide> require 'active_support/core_ext/class/attribute'
<ide> require 'active_support/ordered_options'
<ide>
<del>module ActiveSupport
<del> class InheritableOptions < OrderedOptions
<del> def initialize(parent)
<del> super() { |h,k| parent[k] }
<del> end
<del> end
<del>end
<del>
<ide> module ActionController
<ide> # ActionController::Metal provides a way to get a valid Rack application from a controller.
<ide> #
<ide><path>activesupport/lib/active_support/ordered_options.rb
<ide> def method_missing(name, *args)
<ide> end
<ide> end
<ide> end
<add>
<add> class InheritableOptions < OrderedOptions
<add> def initialize(parent)
<add> super() { |h,k| parent[k] }
<add> end
<add> end
<ide> end | 2 |
Text | Text | add react 16.8.2 changelog | ff188d666bcb4c7aad38009ee61ad292349244b0 | <ide><path>CHANGELOG.md
<ide> </summary>
<ide> </details>
<ide>
<add>## 16.8.2 (February 14, 2019)
<add>
<add>### React DOM
<add>
<add>* Fix `ReactDOM.render` being ignored inside `useEffect`. ([@gaearon](https://github.com/gaearon) in [#14799](https://github.com/facebook/react/pull/14799))
<add>* Fix a crash when unmounting empty portals. ([@gaearon](https://github.com/gaearon) in [#14820](https://github.com/facebook/react/pull/14820))
<add>* Fix `useImperativeHandle` to work correctly when no deps are specified. ([@gaearon](https://github.com/gaearon) in [#14801](https://github.com/facebook/react/pull/14801))
<add>* Fix `crossOrigin` attribute to work in SVG `image` elements. ([@aweary](https://github.com/aweary) in [#14832](https://github.com/facebook/react/pull/14832))
<add>* Fix a false positive warning when using Suspense with Hooks. ([@gaearon](https://github.com/gaearon) in [#14821](https://github.com/facebook/react/pull/14821))
<add>
<ide> ## 16.8.1 (February 6, 2019)
<ide>
<ide> ### React DOM and React Test Renderer | 1 |
Text | Text | add git req to linux build instructions | e046bb52d78b40aa14bebaa0a35d61b937bf16bf | <ide><path>docs/build-instructions/linux.md
<ide> Ubuntu LTS 12.04 64-bit is the recommended platform.
<ide> * `npm config set python /usr/bin/python2 -g` to ensure that gyp uses Python 2
<ide> * This command may require `sudo` depending on how you have
<ide> [configured npm](https://github.com/joyent/node/wiki/Installing-Node.js-via-package-manager#ubuntu-mint-elementary-os).
<add> * Git
<add> * on Ubuntu/Debian: `sudo apt-get install git`
<add> * on Fedora: `sudo yum install git-core`
<ide>
<ide> ## Instructions
<ide> | 1 |
Text | Text | remove outdated step in onboarding exercise | c154c6c5cfc464658dfae331b2d68c9daaca3f8e | <ide><path>onboarding.md
<ide> needs to be pointed out separately during the onboarding.
<ide> so that when the commit lands, the nomination issue url will be
<ide> automatically closed.
<ide> * Label your pull request with the `doc`, `notable-change`, and `fast-track`
<del> labels.
<add> labels. The `fast-track` label should cause the Node.js GitHub bot to post a
<add> comment in the pull request asking collaborators to approve the pull request
<add> by leaving a 👍 reaction on the comment.
<ide> * Run CI on the pull request. Use the `node-test-pull-request` CI task.
<ide> * After two Collaborator approvals for the change and two Collaborator approvals
<del> for fast-tracking, land the pull request.
<del>* Leave a comment in the pull request:
<del> `Please 👍 this comment to approve fast-tracking`.
<add> for fast-tracking, land the PR.
<ide> * If there are not enough approvals within a reasonable time, consider the
<ide> single approval of the onboarding TSC member sufficient, and land the pull
<ide> request. | 1 |
Javascript | Javascript | add userdata to geometry | 2928e097a2050675791e449345a7826a9ccaa20b | <ide><path>src/core/Geometry.js
<ide> function Geometry() {
<ide> this.boundingBox = null;
<ide> this.boundingSphere = null;
<ide>
<add> this.userData = {};
<add>
<ide> // update flags
<ide>
<ide> this.elementsNeedUpdate = false;
<ide> Geometry.prototype = Object.assign( Object.create( EventDispatcher.prototype ),
<ide> data.uuid = this.uuid;
<ide> data.type = this.type;
<ide> if ( this.name !== '' ) data.name = this.name;
<add> if ( Object.keys( this.userData ).length > 0 ) data.userData = this.userData;
<ide>
<ide> if ( this.parameters !== undefined ) {
<ide>
<ide> Geometry.prototype = Object.assign( Object.create( EventDispatcher.prototype ),
<ide>
<ide> }
<ide>
<add> // user data
<add>
<add> this.userData = source.userData;
<add>
<ide> // update flags
<ide>
<ide> this.elementsNeedUpdate = source.elementsNeedUpdate; | 1 |
Ruby | Ruby | fix formulaunavailableerror handling | 89ba5b2b0a9ba25aacd51baa367d963d9915f62f | <ide><path>Library/Contributions/cmd/brew-test-bot.rb
<ide> def initialize argument
<ide> @formulae = []
<ide>
<ide> url_match = argument.match HOMEBREW_PULL_OR_COMMIT_URL_REGEX
<del> formula = Formula.factory argument rescue FormulaUnavailableError
<add>
<add> begin
<add> formula = Formulary.factory(argument)
<add> rescue FormulaUnavailableError
<add> end
<add>
<ide> git "rev-parse", "--verify", "-q", argument
<ide> if $?.success?
<ide> @hash = argument | 1 |
Javascript | Javascript | extract polyfillglobal from initializecore | f7f5dc66493ad25a85927a9503728b0491c8aab9 | <ide><path>Libraries/Core/InitializeCore.js
<ide> */
<ide> 'use strict';
<ide>
<add>const {polyfillObjectProperty, polyfillGlobal} = require('PolyfillFunctions');
<add>
<ide> if (global.GLOBAL === undefined) {
<ide> global.GLOBAL = global;
<ide> }
<ide> if (global.window === undefined) {
<ide> global.window = global;
<ide> }
<ide>
<del>const defineLazyObjectProperty = require('defineLazyObjectProperty');
<del>
<ide> // Set up collections
<ide> const _shouldPolyfillCollection = require('_shouldPolyfillES6Collection');
<ide> if (_shouldPolyfillCollection('Map')) {
<ide> if (_shouldPolyfillCollection('Set')) {
<ide> polyfillGlobal('Set', () => require('Set'));
<ide> }
<ide>
<del>/**
<del> * Sets an object's property. If a property with the same name exists, this will
<del> * replace it but maintain its descriptor configuration. The property will be
<del> * replaced with a lazy getter.
<del> *
<del> * In DEV mode the original property value will be preserved as `original[PropertyName]`
<del> * so that, if necessary, it can be restored. For example, if you want to route
<del> * network requests through DevTools (to trace them):
<del> *
<del> * global.XMLHttpRequest = global.originalXMLHttpRequest;
<del> *
<del> * @see https://github.com/facebook/react-native/issues/934
<del> */
<del>function defineLazyProperty<T>(
<del> object: Object,
<del> name: string,
<del> getValue: () => T,
<del>): void {
<del> const descriptor = Object.getOwnPropertyDescriptor(object, name);
<del> if (__DEV__ && descriptor) {
<del> const backupName = `original${name[0].toUpperCase()}${name.substr(1)}`;
<del> Object.defineProperty(object, backupName, {
<del> ...descriptor,
<del> value: object[name],
<del> });
<del> }
<del>
<del> const {enumerable, writable, configurable} = descriptor || {};
<del> if (descriptor && !configurable) {
<del> console.error('Failed to set polyfill. ' + name + ' is not configurable.');
<del> return;
<del> }
<del>
<del> defineLazyObjectProperty(object, name, {
<del> get: getValue,
<del> enumerable: enumerable !== false,
<del> writable: writable !== false,
<del> });
<del>}
<del>
<del>function polyfillGlobal<T>(name: string, getValue: () => T): void {
<del> defineLazyProperty(global, name, getValue);
<del>}
<del>
<ide> // Set up process
<ide> global.process = global.process || {};
<ide> global.process.env = global.process.env || {};
<ide> if (navigator === undefined) {
<ide> }
<ide>
<ide> // see https://github.com/facebook/react-native/issues/10881
<del>defineLazyProperty(navigator, 'product', () => 'ReactNative');
<del>defineLazyProperty(navigator, 'geolocation', () => require('Geolocation'));
<add>polyfillObjectProperty(navigator, 'product', () => 'ReactNative');
<add>polyfillObjectProperty(navigator, 'geolocation', () => require('Geolocation'));
<ide>
<ide> // Just to make sure the JS gets packaged up. Wait until the JS environment has
<ide> // been initialized before requiring them.
<ide><path>Libraries/Utilities/PolyfillFunctions.js
<add>/**
<add> * Copyright (c) 2013-present, Facebook, Inc.
<add> * All rights reserved.
<add> *
<add> * This source code is licensed under the BSD-style license found in the
<add> * LICENSE file in the root directory of this source tree. An additional grant
<add> * of patent rights can be found in the PATENTS file in the same directory.
<add> *
<add> * @providesModule PolyfillFunctions
<add> * @flow
<add> * @format
<add> */
<add>
<add>'use strict';
<add>
<add>const defineLazyObjectProperty = require('defineLazyObjectProperty');
<add>
<add>/**
<add> * Sets an object's property. If a property with the same name exists, this will
<add> * replace it but maintain its descriptor configuration. The property will be
<add> * replaced with a lazy getter.
<add> *
<add> * In DEV mode the original property value will be preserved as `original[PropertyName]`
<add> * so that, if necessary, it can be restored. For example, if you want to route
<add> * network requests through DevTools (to trace them):
<add> *
<add> * global.XMLHttpRequest = global.originalXMLHttpRequest;
<add> *
<add> * @see https://github.com/facebook/react-native/issues/934
<add> */
<add>function polyfillObjectProperty<T>(
<add> object: Object,
<add> name: string,
<add> getValue: () => T,
<add>): void {
<add> const descriptor = Object.getOwnPropertyDescriptor(object, name);
<add> if (__DEV__ && descriptor) {
<add> const backupName = `original${name[0].toUpperCase()}${name.substr(1)}`;
<add> Object.defineProperty(object, backupName, {
<add> ...descriptor,
<add> value: object[name],
<add> });
<add> }
<add>
<add> const {enumerable, writable, configurable} = descriptor || {};
<add> if (descriptor && !configurable) {
<add> console.error('Failed to set polyfill. ' + name + ' is not configurable.');
<add> return;
<add> }
<add>
<add> defineLazyObjectProperty(object, name, {
<add> get: getValue,
<add> enumerable: enumerable !== false,
<add> writable: writable !== false,
<add> });
<add>}
<add>
<add>function polyfillGlobal<T>(name: string, getValue: () => T): void {
<add> polyfillObjectProperty(global, name, getValue);
<add>}
<add>
<add>module.exports = {polyfillObjectProperty, polyfillGlobal}; | 2 |
Go | Go | update incorrect comments of checkpointlist | 98ffe52fbc953ec2fce0d3b6c3b2405188b96cd5 | <ide><path>client/checkpoint_list.go
<ide> import (
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<del>// CheckpointList returns the volumes configured in the docker host.
<add>// CheckpointList returns the checkpoints of the given container in the docker host
<ide> func (cli *Client) CheckpointList(ctx context.Context, container string, options types.CheckpointListOptions) ([]types.Checkpoint, error) {
<ide> var checkpoints []types.Checkpoint
<ide> | 1 |
PHP | PHP | remove empty line | 87bcbacabfe902360c490750f69c0f5d6e83d104 | <ide><path>src/View/Helper/FormHelper.php
<ide> public function getSourceValue($fieldname)
<ide> }
<ide> }
<ide> }
<del>
<ide> } | 1 |
Java | Java | remove unused import, followup to d14114388 | c933755c6a04447acb15210bdee30f3985df2aa5 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/FabricUIManager.java
<ide> import com.facebook.react.fabric.mounting.mountitems.UpdatePropsMountItem;
<ide> import com.facebook.react.modules.core.ReactChoreographer;
<ide> import com.facebook.react.uimanager.ReactRootViewTagGenerator;
<del>import com.facebook.react.uimanager.ReactStylesDiffMap;
<ide> import com.facebook.react.uimanager.ThemedReactContext;
<ide> import com.facebook.react.uimanager.ViewManagerPropertyUpdater;
<ide> import com.facebook.react.uimanager.ViewManagerRegistry; | 1 |
Javascript | Javascript | improve error output | ccb303e03795cee25527edb867f7903477ed5de6 | <ide><path>lib/repl.js
<ide> function REPLServer(prompt,
<ide>
<ide> self._domain.on('error', function debugDomainError(e) {
<ide> debug('domain error');
<del> const top = replMap.get(self);
<del> const pstrace = Error.prepareStackTrace;
<del> Error.prepareStackTrace = prepareStackTrace(pstrace);
<del> if (typeof e === 'object')
<add> let errStack = '';
<add>
<add> if (typeof e === 'object' && e !== null) {
<add> const pstrace = Error.prepareStackTrace;
<add> Error.prepareStackTrace = prepareStackTrace(pstrace);
<ide> internalUtil.decorateErrorStack(e);
<del> Error.prepareStackTrace = pstrace;
<del> const isError = internalUtil.isError(e);
<del> if (!self.underscoreErrAssigned)
<del> self.lastError = e;
<del> if (e instanceof SyntaxError && e.stack) {
<del> // remove repl:line-number and stack trace
<del> e.stack = e.stack
<del> .replace(/^repl:\d+\r?\n/, '')
<del> .replace(/^\s+at\s.*\n?/gm, '');
<del> } else if (isError && self.replMode === exports.REPL_MODE_STRICT) {
<del> e.stack = e.stack.replace(/(\s+at\s+repl:)(\d+)/,
<del> (_, pre, line) => pre + (line - 1));
<add> Error.prepareStackTrace = pstrace;
<add>
<add> if (e.domainThrown) {
<add> delete e.domain;
<add> delete e.domainThrown;
<add> }
<add>
<add> if (internalUtil.isError(e)) {
<add> if (e.stack) {
<add> if (e.name === 'SyntaxError') {
<add> // Remove stack trace.
<add> e.stack = e.stack
<add> .replace(/^repl:\d+\r?\n/, '')
<add> .replace(/^\s+at\s.*\n?/gm, '');
<add> } else if (self.replMode === exports.REPL_MODE_STRICT) {
<add> e.stack = e.stack.replace(/(\s+at\s+repl:)(\d+)/,
<add> (_, pre, line) => pre + (line - 1));
<add> }
<add> }
<add> errStack = util.inspect(e);
<add>
<add> // Remove one line error braces to keep the old style in place.
<add> if (errStack[errStack.length - 1] === ']') {
<add> errStack = errStack.slice(1, -1);
<add> }
<add> }
<ide> }
<del> if (isError && e.stack) {
<del> top.outputStream.write(`${e.stack}\n`);
<del> } else {
<del> top.outputStream.write(`Thrown: ${String(e)}\n`);
<add>
<add> if (errStack === '') {
<add> errStack = `Thrown: ${util.inspect(e)}`;
<ide> }
<add>
<add> if (!self.underscoreErrAssigned) {
<add> self.lastError = e;
<add> }
<add>
<add> const top = replMap.get(self);
<add> top.outputStream.write(`${errStack}\n`);
<ide> top.clearBufferedCommand();
<ide> top.lines.level = [];
<ide> top.displayPrompt();
<ide><path>test/parallel/test-repl-top-level-await.js
<ide> async function ctrlCTest() {
<ide> { ctrl: true, name: 'c' }
<ide> ]), [
<ide> 'await timeout(100000)\r',
<del> 'Thrown: Error [ERR_SCRIPT_EXECUTION_INTERRUPTED]: ' +
<add> 'Error [ERR_SCRIPT_EXECUTION_INTERRUPTED]: ' +
<ide> 'Script execution was interrupted by `SIGINT`',
<ide> PROMPT
<ide> ]);
<ide><path>test/parallel/test-repl-underscore.js
<ide> function testError() {
<ide> '[Error: foo]',
<ide>
<ide> // The sync error, with individual property echoes
<del> /Error: ENOENT: no such file or directory, scandir '.*nonexistent.*'/,
<add> /^{ Error: ENOENT: no such file or directory, scandir '.*nonexistent.*'/,
<ide> /Object\.readdirSync/,
<add> /^ errno: -(2|4058),$/,
<add> " syscall: 'scandir',",
<add> " code: 'ENOENT',",
<add> " path: '/nonexistent?' }",
<ide> "'ENOENT'",
<ide> "'scandir'",
<ide>
<ide><path>test/parallel/test-repl.js
<ide> const errorTests = [
<ide> // Uncaught error throws and prints out
<ide> {
<ide> send: 'throw new Error(\'test error\');',
<del> expect: /^Error: test error/
<add> expect: 'Error: test error'
<add> },
<add> {
<add> send: "throw { foo: 'bar' };",
<add> expect: "Thrown: { foo: 'bar' }"
<ide> },
<ide> // Common syntax error is treated as multiline command
<ide> {
<ide> const errorTests = [
<ide> {
<ide> send: 'require("internal/repl")',
<ide> expect: [
<del> /^Error: Cannot find module 'internal\/repl'/,
<add> /^{ Error: Cannot find module 'internal\/repl'/,
<ide> /^ at .*/,
<ide> /^ at .*/,
<ide> /^ at .*/, | 4 |
PHP | PHP | simplify email check | 52664a9a7b9fcafff76b285aaaa0c156eaf72441 | <ide><path>src/Illuminate/Foundation/Exceptions/Handler.php
<ide> protected function context()
<ide> {
<ide> return array_filter([
<ide> 'userId' => Auth::id(),
<del> 'email' => Auth::check() && isset(Auth::user()->email)
<del> ? Auth::user()->email : null,
<add> 'email' => Auth::user()->email ?? null,
<ide> ]);
<ide> }
<ide> | 1 |
PHP | PHP | fix failing tests and merge mistakes | 4ac7972eb8a96f754855f130612765aa710c93f2 | <ide><path>lib/Cake/Routing/DispatcherFilter.php
<ide> */
<ide>
<ide> namespace Cake\Routing;
<add>
<ide> use Cake\Event\EventListener;
<add>use Cake\Event\Event;
<ide>
<ide> /**
<ide> * This abstract class represents a filter to be applied to a dispatcher cycle. It acts as as
<ide> public function implementedEvents() {
<ide> * keys in the data property.
<ide> * @return Cake\Network\Response|boolean
<ide> **/
<del> public function beforeDispatch(CakeEvent $event) {
<add> public function beforeDispatch(Event $event) {
<ide> }
<ide>
<ide> /**
<ide> public function beforeDispatch(CakeEvent $event) {
<ide> * keys in the data property.
<ide> * @return mixed boolean to stop the event dispatching or null to continue
<ide> **/
<del> public function afterDispatch(CakeEvent $event) {
<add> public function afterDispatch(Event $event) {
<ide> }
<ide> }
<ide><path>lib/Cake/Routing/Filter/AssetDispatcher.php
<ide> use Cake\Core\App;
<ide> use Cake\Core\Configure;
<ide> use Cake\Core\Plugin;
<add>use Cake\Event\Event;
<ide> use Cake\Network\Response;
<ide> use Cake\Routing\DispatcherFilter;
<ide> use Cake\Utility\Inflector;
<ide> class AssetDispatcher extends DispatcherFilter {
<ide> * @param Cake\Event\Event $event containing the request and response object
<ide> * @return Cake\Network\Response if the client is requesting a recognized asset, null otherwise
<ide> */
<del> public function beforeDispatch(CakeEvent $event) {
<add> public function beforeDispatch(Event $event) {
<ide> $url = $event->data['request']->url;
<ide> if (strpos($url, '..') !== false || strpos($url, '.') === false) {
<ide> return;
<ide><path>lib/Cake/Routing/Filter/CacheDispatcher.php
<ide>
<ide> namespace Cake\Routing\Filter;
<ide> use Cake\Core\Configure;
<add>use Cake\Event\Event;
<ide> use Cake\Routing\DispatcherFilter;
<ide> use Cake\Utility\Inflector;
<ide> use Cake\View\View;
<ide> class CacheDispatcher extends DispatcherFilter {
<ide> * @param Cake\Event\Event $event containing the request and response object
<ide> * @return Cake\NetworkResponse with cached content if found, null otherwise
<ide> */
<del> public function beforeDispatch(CakeEvent $event) {
<add> public function beforeDispatch(Event $event) {
<ide> if (Configure::read('Cache.check') !== true) {
<ide> return;
<ide> }
<ide><path>lib/Cake/Test/TestApp/Plugin/TestPlugin/Routing/Filter/Test2DispatcherFilter.php
<ide> <?php
<ide> /**
<del> *
<del> * PHP 5
<del> *
<ide> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
<ide> * Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
<ide> *
<ide> *
<ide> * @copyright Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
<ide> * @link http://cakephp.org CakePHP(tm) Project
<del> * @package Cake.Test.TestApp.Routing.Filter
<ide> * @since CakePHP(tm) v 2.2
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<del>
<ide> namespace TestPlugin\Routing\Filter;
<add>
<add>use Cake\Event\Event;
<ide> use Cake\Routing\DispatcherFilter;
<ide>
<ide> class Test2DispatcherFilter extends DispatcherFilter {
<ide>
<del> public function beforeDispatch(CakeEvent $event) {
<add> public function beforeDispatch(Event $event) {
<ide> $event->data['response']->statusCode(500);
<ide> $event->stopPropagation();
<ide> return $event->data['response'];
<ide> }
<ide>
<del> public function afterDispatch(CakeEvent $event) {
<add> public function afterDispatch(Event $event) {
<ide> $event->data['response']->statusCode(200);
<ide> }
<ide>
<ide><path>lib/Cake/Test/TestApp/Plugin/TestPlugin/Routing/Filter/TestDispatcherFilter.php
<ide> * @since CakePHP(tm) v 2.2
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<del>
<ide> namespace TestPlugin\Routing\Filter;
<add>
<add>use Cake\Event\Event;
<ide> use Cake\Routing\DispatcherFilter;
<ide>
<ide> class TestDispatcherFilter extends DispatcherFilter {
<ide>
<del> public function beforeDispatch(CakeEvent $event) {
<add> public function beforeDispatch(Event $event) {
<ide> $event->data['request']->params['altered'] = true;
<ide> }
<ide>
<del> public function afterDispatch(CakeEvent $event) {
<add> public function afterDispatch(Event $event) {
<ide> $event->data['response']->statusCode(304);
<ide> }
<ide>
<ide><path>lib/Cake/Test/TestCase/Routing/RouterTest.php
<ide> public function testUrlProtocol() {
<ide>
<ide> $url = '#here';
<ide> $this->assertEquals($url, Router::url($url));
<del> $url = 'posts/index#here';
<add> $url = '/posts/index#here';
<add>
<ide> $expected = FULL_BASE_URL . '/posts/index#here';
<ide> $this->assertEquals($expected, Router::url($url, true));
<ide> }
<ide><path>lib/Cake/Test/TestCase/TestSuite/TestCaseTest.php
<ide> <?php
<ide> /**
<del> * TestCaseTest file
<del> *
<del> * Test Case for TestCase class
<del> *
<del> * PHP version 5
<del> *
<ide> * CakePHP : Rapid Development Framework (http://cakephp.org)
<ide> * Copyright 2005-2012, Cake Software Foundation, Inc.
<ide> *
<ide> *
<ide> * @copyright Copyright 2005-2012, Cake Software Foundation, Inc.
<ide> * @link http://cakephp.org CakePHP Project
<del> * @package Cake.Test.Case.TestSuite
<ide> * @since CakePHP v 1.2.0.4487
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<ide> namespace Cake\Test\TestCase\TestSuite;
<add>
<add>use Cake\Core\App;
<add>use Cake\Core\Configure;
<add>use Cake\Core\Plugin;
<ide> use Cake\Controller\Controller;
<ide> use Cake\TestSuite\TestCase;
<ide> use Cake\Test\Fixture\AssertTagsTestCase;
<ide> use Cake\Test\Fixture\FixturizedTestCase;
<add>use Cake\Utility\ClassRegistry;
<ide>
<ide> /**
<ide> * TestCaseTest
<ide> public function testAssertTextNotContains() {
<ide> * @return void
<ide> */
<ide> public function testGetMockForModel() {
<add> Configure::write('App.namespace', 'TestApp');
<add>
<ide> $Post = $this->getMockForModel('Post');
<ide>
<del> $this->assertInstanceOf('Post', $Post);
<add> $this->assertInstanceOf('TestApp\Model\Post', $Post);
<ide> $this->assertNull($Post->save(array()));
<del> $this->assertNull($Post->find('all'));
<add> $this->assertNull($Post->implementedEvents());
<ide> $this->assertEquals('posts', $Post->useTable);
<ide>
<ide> $Post = $this->getMockForModel('Post', array('save'));
<ide>
<ide> $this->assertNull($Post->save(array()));
<del> $this->assertInternalType('array', $Post->find('all'));
<add> $this->assertInternalType('array', $Post->implementedEvents());
<ide> }
<ide>
<ide> /**
<ide> public function testGetMockForModel() {
<ide> * @return void
<ide> */
<ide> public function testGetMockForModelWithPlugin() {
<add> Configure::write('App.namespace', 'TestApp');
<add> App::build(array(
<add> 'Plugin' => array(CAKE . 'Test/TestApp/Plugin/')
<add> ), App::RESET);
<add> Plugin::load('TestPlugin');
<add>
<ide> $TestPluginComment = $this->getMockForModel('TestPlugin.TestPluginComment');
<ide>
<ide> $result = ClassRegistry::init('TestPlugin.TestPluginComment');
<del> $this->assertInstanceOf('TestPluginComment', $result);
<add> $this->assertInstanceOf('\TestPlugin\Model\TestPluginComment', $result);
<ide>
<ide> $TestPluginComment = $this->getMockForModel('TestPlugin.TestPluginComment', array('save'));
<ide>
<del> $this->assertInstanceOf('TestPluginComment', $TestPluginComment);
<add> $this->assertInstanceOf('\TestPlugin\Model\TestPluginComment', $TestPluginComment);
<ide> $TestPluginComment->expects($this->at(0))
<ide> ->method('save')
<ide> ->will($this->returnValue(true));
<ide><path>lib/Cake/Test/TestCase/View/Helper/PaginatorHelperTest.php
<ide> public function testPagingLinks() {
<ide>
<ide> $result = $this->Paginator->prev('<< Previous', array('tag' => false), null, array('class' => 'disabled'));
<ide> $expected = array(
<del> 'a' => array('href' => '/index/page:1', 'rel' => 'prev', 'class' => 'prev'),
<add> 'a' => array('href' => '/index?page=1', 'rel' => 'prev', 'class' => 'prev'),
<ide> '<< Previous',
<ide> '/a'
<ide> );
<ide> public function testPagingLinks() {
<ide>
<ide> $result = $this->Paginator->next('Next', array('tag' => false));
<ide> $expected = array(
<del> 'a' => array('href' => '/index/page:3', 'rel' => 'next', 'class' => 'next'),
<add> 'a' => array('href' => '/index?page=3', 'rel' => 'next', 'class' => 'next'),
<ide> 'Next',
<ide> '/a'
<ide> );
<ide><path>lib/Cake/TestSuite/ControllerTestCase.php
<ide> public function generate($controller, $mocks = array()) {
<ide> $controller->Components->set($name, $component);
<ide> }
<ide>
<del><<<<<<< HEAD
<ide> $controller->constructClasses();
<del> $this->__dirtyController = false;
<del>=======
<del> $_controller->constructClasses();
<ide> $this->_dirtyController = false;
<del>>>>>>>> origin/2.3
<ide>
<ide> $this->controller = $controller;
<ide> return $this->controller;
<ide><path>lib/Cake/TestSuite/TestCase.php
<ide> public function getMockForModel($model, $methods = array(), $config = null) {
<ide> $config = ClassRegistry::config('Model');
<ide> }
<ide>
<del> $modelClass = get_class(ClassRegistry::init($model));
<del> list(, $modelName) = namespaceSplit($modelClass);
<add> $modelClass = App::className($model, 'Model');
<add> list(, $name) = namespaceSplit($modelClass);
<ide> $config = array_merge((array)$config, array('name' => $name));
<del> $mock = $this->getMock($name, $methods, array($config));
<add> $mock = $this->getMock($modelClass, $methods, array($config));
<ide> ClassRegistry::removeObject($name);
<ide> ClassRegistry::addObject($name, $mock);
<ide> return $mock;
<ide><path>lib/Cake/TestSuite/TestSuiteCommand.php
<ide> public function run(array $argv, $exit = true) {
<ide> * @return Cake\TestSuite\TestRunner
<ide> */
<ide> public function getRunner($loader) {
<del><<<<<<< HEAD:lib/Cake/TestSuite/TestSuiteCommand.php
<del> return new TestRunner($loader, $this->_params);
<del>=======
<del> return new CakeTestRunner($loader, $this->_params);
<del>>>>>>>> origin/2.3:lib/Cake/TestSuite/CakeTestSuiteCommand.php
<add> return new TestRunner($loader, $this->_params);
<ide> }
<ide>
<ide> /**
<ide><path>lib/Cake/View/Helper.php
<ide> public function assetTimestamp($path) {
<ide> $plugin = Inflector::camelize($segments[0]);
<ide> if (Plugin::loaded($plugin)) {
<ide> unset($segments[0]);
<del> $pluginPath = CakePlugin::path($plugin) . 'webroot' . DS . implode(DS, $segments);
<add> $pluginPath = Plugin::path($plugin) . 'webroot' . DS . implode(DS, $segments);
<ide> //@codingStandardsIgnoreStart
<ide> return $path . '?' . @filemtime($pluginPath);
<ide> //@codingStandardsIgnoreEnd | 12 |
Ruby | Ruby | support endless range values for range types | fd919ec881c8ae9e7c7e9251372109849b6888d8 | <ide><path>activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
<ide> def type_cast_single(value)
<ide> end
<ide>
<ide> def type_cast_single_for_database(value)
<del> infinity?(value) ? value : @subtype.serialize(value)
<add> infinity?(value) ? value : @subtype.serialize(@subtype.cast(value))
<ide> end
<ide>
<ide> def extract_bounds(value)
<ide><path>activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
<ide> def encode_array(array_data)
<ide> end
<ide>
<ide> def encode_range(range)
<del> "[#{type_cast_range_value(range.first)},#{type_cast_range_value(range.last)}#{range.exclude_end? ? ')' : ']'}"
<add> "[#{type_cast_range_value(range.begin)},#{type_cast_range_value(range.end)}#{range.exclude_end? ? ')' : ']'}"
<ide> end
<ide>
<ide> def determine_encoding_of_strings_in_array(value)
<ide><path>activerecord/test/cases/adapters/postgresql/range_test.rb
<ide> def test_infinity_values
<ide> assert_equal(-Float::INFINITY...Float::INFINITY, record.float_range)
<ide> end
<ide>
<add> if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new("2.6.0")
<add> def test_endless_range_values
<add> record = PostgresqlRange.create!(
<add> int4_range: eval("1.."),
<add> int8_range: eval("10.."),
<add> float_range: eval("0.5..")
<add> )
<add>
<add> record = PostgresqlRange.find(record.id)
<add>
<add> assert_equal 1...Float::INFINITY, record.int4_range
<add> assert_equal 10...Float::INFINITY, record.int8_range
<add> assert_equal 0.5...Float::INFINITY, record.float_range
<add> end
<add> end
<add>
<ide> private
<ide> def assert_equal_round_trip(range, attribute, value)
<ide> round_trip(range, attribute, value) | 3 |
Mixed | Javascript | assign deprecation code | 8a8a6865c092637515b286cd9575ea592b5f501e | <ide><path>doc/api/deprecations.md
<ide> Type: End-of-Life
<ide>
<ide> *Note*: change was made while `async_hooks` was an experimental API.
<ide>
<del><a id="DEP00XX"></a>
<del>### DEP00XX: Several internal properties of net.Server
<add><a id="DEP0073"></a>
<add>### DEP0073: Several internal properties of net.Server
<ide>
<ide> Type: Runtime
<ide>
<ide><path>lib/net.js
<ide> if (process.platform === 'win32') {
<ide> Object.defineProperty(Server.prototype, '_usingSlaves', {
<ide> get: internalUtil.deprecate(function() {
<ide> return this._usingWorkers;
<del> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP00XX'),
<add> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP0073'),
<ide> set: internalUtil.deprecate((val) => {
<ide> this._usingWorkers = val;
<del> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP00XX'),
<add> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP0073'),
<ide> configurable: true, enumerable: false
<ide> });
<ide>
<ide> Object.defineProperty(Server.prototype, '_slaves', {
<ide> get: internalUtil.deprecate(function() {
<ide> return this._workers;
<del> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP00XX'),
<add> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP0073'),
<ide> set: internalUtil.deprecate((val) => {
<ide> this._workers = val;
<del> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP00XX'),
<add> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP0073'),
<ide> configurable: true, enumerable: false
<ide> });
<ide>
<ide> Object.defineProperty(Server.prototype, '_setupSlave', {
<ide> value: internalUtil.deprecate(function(socketList) {
<ide> return this._setupWorker(socketList);
<del> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP00XX'),
<add> }, 'Accessing internal properties of net.Server is deprecated.', 'DEP0073'),
<ide> configurable: true, enumerable: false
<ide> });
<ide> | 2 |
Java | Java | add json matcher to assert on request body | d64f2eb03862dcef657fe1a37755184b1ecfda25 | <ide><path>spring-test/src/main/java/org/springframework/test/web/client/match/ContentRequestMatchers.java
<ide> /*
<del> * Copyright 2002-2017 the original author or authors.
<add> * Copyright 2002-2018 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import javax.xml.transform.dom.DOMSource;
<ide>
<ide> import org.hamcrest.Matcher;
<add>import org.springframework.test.util.JsonExpectationsHelper;
<ide> import org.w3c.dom.Node;
<ide>
<ide> import org.springframework.http.HttpHeaders;
<ide> public class ContentRequestMatchers {
<ide>
<ide> private final XmlExpectationsHelper xmlHelper;
<ide>
<add> private final JsonExpectationsHelper jsonHelper;
<add>
<ide>
<ide> /**
<ide> * Class constructor, not for direct instantiation.
<ide> * Use {@link MockRestRequestMatchers#content()}.
<ide> */
<ide> protected ContentRequestMatchers() {
<ide> this.xmlHelper = new XmlExpectationsHelper();
<add> this.jsonHelper = new JsonExpectationsHelper();
<ide> }
<ide>
<ide>
<ide> protected void matchInternal(MockClientHttpRequest request) throws Exception {
<ide> };
<ide> }
<ide>
<add> /**
<add> * Parse the expected and actual strings as JSON and assert the two
<add> * are "similar" - i.e. they contain the same attribute-value pairs
<add> * regardless of formatting with a lenient checking (extensible, and non-strict array
<add> * ordering).
<add> * <p>Use of this matcher requires the <a
<add> * href="http://jsonassert.skyscreamer.org/">JSONassert<a/> library.
<add> * @param expectedJsonContent the expected JSON content
<add> * @since 5.0.5
<add> */
<add> public RequestMatcher json(final String expectedJsonContent) {
<add> return json(expectedJsonContent, false);
<add> }
<add>
<add> /**
<add> * Parse the request body and the given string as JSON and assert the two
<add> * are "similar" - i.e. they contain the same attribute-value pairs
<add> * regardless of formatting.
<add> * <p>Can compare in two modes, depending on {@code strict} parameter value:
<add> * <ul>
<add> * <li>{@code true}: strict checking. Not extensible, and strict array ordering.</li>
<add> * <li>{@code false}: lenient checking. Extensible, and non-strict array ordering.</li>
<add> * </ul>
<add> * <p>Use of this matcher requires the <a
<add> * href="http://jsonassert.skyscreamer.org/">JSONassert<a/> library.
<add> * @param expectedJsonContent the expected JSON content
<add> * @param strict enables strict checking
<add> * @since 5.0.5
<add> */
<add> public RequestMatcher json(final String expectedJsonContent, final boolean strict) {
<add> return request -> {
<add> try {
<add> MockClientHttpRequest mockRequest = (MockClientHttpRequest) request;
<add> jsonHelper.assertJsonEqual(expectedJsonContent, mockRequest.getBodyAsString(), strict);
<add> } catch (Exception e) {
<add> throw new AssertionError("Failed to parse expected or actual JSON request content", e);
<add> }
<add> };
<add> }
<add>
<ide>
<ide> /**
<ide> * Abstract base class for XML {@link RequestMatcher}'s.
<ide><path>spring-test/src/test/java/org/springframework/test/web/client/match/ContentRequestMatchersTests.java
<ide> /*
<del> * Copyright 2002-2016 the original author or authors.
<add> * Copyright 2002-2018 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public void testNodeMatcherNoMatch() throws Exception {
<ide> MockRestRequestMatchers.content().node(hasXPath("/foo/bar/bar")).match(this.request);
<ide> }
<ide>
<add> @Test
<add> public void testJsonLenientMatch() throws Exception {
<add> String content = "{\n \"foo array\":[\"first\",\"second\"] , \"someExtraProperty\": \"which is allowed\" \n}";
<add> this.request.getBody().write(content.getBytes());
<add>
<add> MockRestRequestMatchers.content().json("{\n \"foo array\":[\"second\",\"first\"] \n}")
<add> .match(this.request);
<add> MockRestRequestMatchers.content().json("{\n \"foo array\":[\"second\",\"first\"] \n}", false)
<add> .match(this.request);
<add> }
<add>
<add> @Test
<add> public void testJsonStrictMatch() throws Exception {
<add> String content = "{\n \"foo\": \"bar\", \"foo array\":[\"first\",\"second\"] \n}";
<add> this.request.getBody().write(content.getBytes());
<add>
<add> MockRestRequestMatchers
<add> .content()
<add> .json("{\n \"foo array\":[\"first\",\"second\"] , \"foo\": \"bar\" \n}", true)
<add> .match(this.request);
<add> }
<add>
<add> @Test(expected = AssertionError.class)
<add> public void testJsonLenientNoMatch() throws Exception {
<add> String content = "{\n \"bar\" : \"foo\" \n}";
<add> this.request.getBody().write(content.getBytes());
<add>
<add> MockRestRequestMatchers
<add> .content()
<add> .json("{\n \"foo\" : \"bar\" \n}")
<add> .match(this.request);
<add> MockRestRequestMatchers
<add> .content()
<add> .json("{\n \"foo\" : \"bar\" \n}", false)
<add> .match(this.request);
<add> }
<add>
<add> @Test(expected = AssertionError.class)
<add> public void testJsonStrictNoMatch() throws Exception {
<add> String content = "{\n \"foo array\":[\"first\",\"second\"] , \"someExtraProperty\": \"which is NOT allowed\" \n}";
<add> this.request.getBody().write(content.getBytes());
<add>
<add> MockRestRequestMatchers
<add> .content()
<add> .json("{\n \"foo array\":[\"second\",\"first\"] \n}", true)
<add> .match(this.request);
<add> }
<add>
<ide> } | 2 |
Python | Python | remove unused functions from deprecated | 6e5bd4f2280dfd8714c5288fe437a393783709ad | <ide><path>spacy/deprecated.py
<ide> # coding: utf8
<ide> from __future__ import unicode_literals
<ide>
<del>from pathlib import Path
<del>
<ide> from . import about
<del>from . import util
<ide> from .util import prints
<del>from .compat import path2str
<del>from .cli import download, link
<del>
<del>
<del>def fix_glove_vectors_loading(overrides):
<del> """
<del> Special-case hack for loading the GloVe vectors, to support deprecated
<del> <1.0 stuff. Phase this out once the data is fixed.
<del> """
<del> if 'data_dir' in overrides and 'path' not in overrides:
<del> raise ValueError("The argument 'data_dir' has been renamed to 'path'")
<del> if overrides.get('path') is False:
<del> return overrides
<del> if overrides.get('path') in (None, True):
<del> data_path = util.get_data_path()
<del> else:
<del> path = util.ensure_path(overrides['path'])
<del> data_path = path.parent
<del> vec_path = None
<del> if 'add_vectors' not in overrides:
<del> if 'vectors' in overrides:
<del> vec_path = match_best_version(overrides['vectors'], None, data_path)
<del> if vec_path is None:
<del> return overrides
<del> else:
<del> vec_path = match_best_version('en_glove_cc_300_1m_vectors', None, data_path)
<del> if vec_path is not None:
<del> vec_path = vec_path / 'vocab' / 'vec.bin'
<del> if vec_path is not None:
<del> overrides['add_vectors'] = lambda vocab: vocab.load_vectors_from_bin_loc(vec_path)
<del> return overrides
<add>from .cli import download
<ide>
<ide>
<del>def match_best_version(target_name, target_version, path):
<del> def split_data_name(name):
<del> return name.split('-', 1) if '-' in name else (name, '')
<ide> PRON_LEMMA = "-PRON-"
<ide> DET_LEMMA = "-DET-"
<ide>
<del> path = util.ensure_path(path)
<del> if path is None or not path.exists():
<del> return None
<del> matches = []
<del> for data_name in path.iterdir():
<del> name, version = split_data_name(data_name.parts[-1])
<del> if name == target_name:
<del> matches.append((tuple(float(v) for v in version.split('.')), data_name))
<del> if matches:
<del> return Path(max(matches)[1])
<del> else:
<del> return None
<del>
<del>
<del>def resolve_model_name(name):
<del> """
<del> If spaCy is loaded with 'de', check if symlink already exists. If
<del> not, user may have upgraded from older version and have old models installed.
<del> Check if old model directory exists and if so, return that instead and create
<del> shortcut link. If English model is found and no shortcut exists, raise error
<del> and tell user to install new model.
<del> """
<del> if name == 'en' or name == 'de':
<del> versions = ['1.0.0', '1.1.0']
<del> data_path = util.get_data_path()
<del> model_path = data_path / name
<del> v_model_paths = [data_path / '%s-%s' % (name, v) for v in versions]
<del>
<del> if not model_path.exists(): # no shortcut found
<del> for v_path in v_model_paths:
<del> if v_path.exists(): # versioned model directory found
<del> if name == 'de':
<del> link(v_path, name)
<del> return name
<del> else:
<del> raise ValueError(
<del> "Found English model at %s. This model is not "
<del> "compatible with the current version. See "
<del> "https://spacy.io/docs/usage/models to download the "
<del> "new model." % path2str(v_path))
<del> return name
<del>
<ide>
<ide> def depr_model_download(lang):
<ide> """ | 1 |
Python | Python | enable cursor pagination of value querysets. | 70385711572e7ea141644f349b7180c68b4c15d2 | <ide><path>rest_framework/pagination.py
<ide> def encode_cursor(self, cursor):
<ide> return replace_query_param(self.base_url, self.cursor_query_param, encoded)
<ide>
<ide> def _get_position_from_instance(self, instance, ordering):
<del> attr = getattr(instance, ordering[0].lstrip('-'))
<add> field_name = ordering[0].lstrip('-')
<add> if isinstance(instance, dict):
<add> attr = instance[field_name]
<add> else:
<add> attr = getattr(instance, field_name)
<ide> return six.text_type(attr)
<ide>
<ide> def get_paginated_response(self, data):
<ide><path>tests/test_pagination.py
<ide>
<ide> import pytest
<ide> from django.core.paginator import Paginator as DjangoPaginator
<add>from django.db import models
<add>from django.test import TestCase
<ide>
<ide> from rest_framework import (
<ide> exceptions, filters, generics, pagination, serializers, status
<ide> def test_max_limit(self):
<ide> assert content.get('previous') == prev_url
<ide>
<ide>
<del>class TestCursorPagination:
<del> """
<del> Unit tests for `pagination.CursorPagination`.
<del> """
<del>
<del> def setup(self):
<del> class MockObject(object):
<del> def __init__(self, idx):
<del> self.created = idx
<del>
<del> class MockQuerySet(object):
<del> def __init__(self, items):
<del> self.items = items
<del>
<del> def filter(self, created__gt=None, created__lt=None):
<del> if created__gt is not None:
<del> return MockQuerySet([
<del> item for item in self.items
<del> if item.created > int(created__gt)
<del> ])
<del>
<del> assert created__lt is not None
<del> return MockQuerySet([
<del> item for item in self.items
<del> if item.created < int(created__lt)
<del> ])
<del>
<del> def order_by(self, *ordering):
<del> if ordering[0].startswith('-'):
<del> return MockQuerySet(list(reversed(self.items)))
<del> return self
<del>
<del> def __getitem__(self, sliced):
<del> return self.items[sliced]
<del>
<del> class ExamplePagination(pagination.CursorPagination):
<del> page_size = 5
<del> ordering = 'created'
<del>
<del> self.pagination = ExamplePagination()
<del> self.queryset = MockQuerySet([
<del> MockObject(idx) for idx in [
<del> 1, 1, 1, 1, 1,
<del> 1, 2, 3, 4, 4,
<del> 4, 4, 5, 6, 7,
<del> 7, 7, 7, 7, 7,
<del> 7, 7, 7, 8, 9,
<del> 9, 9, 9, 9, 9
<del> ]
<del> ])
<del>
<del> def get_pages(self, url):
<del> """
<del> Given a URL return a tuple of:
<del>
<del> (previous page, current page, next page, previous url, next url)
<del> """
<del> request = Request(factory.get(url))
<del> queryset = self.pagination.paginate_queryset(self.queryset, request)
<del> current = [item.created for item in queryset]
<del>
<del> next_url = self.pagination.get_next_link()
<del> previous_url = self.pagination.get_previous_link()
<del>
<del> if next_url is not None:
<del> request = Request(factory.get(next_url))
<del> queryset = self.pagination.paginate_queryset(self.queryset, request)
<del> next = [item.created for item in queryset]
<del> else:
<del> next = None
<del>
<del> if previous_url is not None:
<del> request = Request(factory.get(previous_url))
<del> queryset = self.pagination.paginate_queryset(self.queryset, request)
<del> previous = [item.created for item in queryset]
<del> else:
<del> previous = None
<del>
<del> return (previous, current, next, previous_url, next_url)
<add>class CursorPaginationTestsMixin:
<ide>
<ide> def test_invalid_cursor(self):
<ide> request = Request(factory.get('/', {'cursor': '123'}))
<ide> def test_cursor_pagination(self):
<ide> assert isinstance(self.pagination.to_html(), type(''))
<ide>
<ide>
<add>class TestCursorPagination(CursorPaginationTestsMixin):
<add> """
<add> Unit tests for `pagination.CursorPagination`.
<add> """
<add>
<add> def setup(self):
<add> class MockObject(object):
<add> def __init__(self, idx):
<add> self.created = idx
<add>
<add> class MockQuerySet(object):
<add> def __init__(self, items):
<add> self.items = items
<add>
<add> def filter(self, created__gt=None, created__lt=None):
<add> if created__gt is not None:
<add> return MockQuerySet([
<add> item for item in self.items
<add> if item.created > int(created__gt)
<add> ])
<add>
<add> assert created__lt is not None
<add> return MockQuerySet([
<add> item for item in self.items
<add> if item.created < int(created__lt)
<add> ])
<add>
<add> def order_by(self, *ordering):
<add> if ordering[0].startswith('-'):
<add> return MockQuerySet(list(reversed(self.items)))
<add> return self
<add>
<add> def __getitem__(self, sliced):
<add> return self.items[sliced]
<add>
<add> class ExamplePagination(pagination.CursorPagination):
<add> page_size = 5
<add> ordering = 'created'
<add>
<add> self.pagination = ExamplePagination()
<add> self.queryset = MockQuerySet([
<add> MockObject(idx) for idx in [
<add> 1, 1, 1, 1, 1,
<add> 1, 2, 3, 4, 4,
<add> 4, 4, 5, 6, 7,
<add> 7, 7, 7, 7, 7,
<add> 7, 7, 7, 8, 9,
<add> 9, 9, 9, 9, 9
<add> ]
<add> ])
<add>
<add> def get_pages(self, url):
<add> """
<add> Given a URL return a tuple of:
<add>
<add> (previous page, current page, next page, previous url, next url)
<add> """
<add> request = Request(factory.get(url))
<add> queryset = self.pagination.paginate_queryset(self.queryset, request)
<add> current = [item.created for item in queryset]
<add>
<add> next_url = self.pagination.get_next_link()
<add> previous_url = self.pagination.get_previous_link()
<add>
<add> if next_url is not None:
<add> request = Request(factory.get(next_url))
<add> queryset = self.pagination.paginate_queryset(self.queryset, request)
<add> next = [item.created for item in queryset]
<add> else:
<add> next = None
<add>
<add> if previous_url is not None:
<add> request = Request(factory.get(previous_url))
<add> queryset = self.pagination.paginate_queryset(self.queryset, request)
<add> previous = [item.created for item in queryset]
<add> else:
<add> previous = None
<add>
<add> return (previous, current, next, previous_url, next_url)
<add>
<add>
<add>class CursorPaginationModel(models.Model):
<add> created = models.IntegerField()
<add>
<add>
<add>class TestCursorPaginationWithValueQueryset(CursorPaginationTestsMixin, TestCase):
<add> """
<add> Unit tests for `pagination.CursorPagination` for value querysets.
<add> """
<add>
<add> def setUp(self):
<add> class ExamplePagination(pagination.CursorPagination):
<add> page_size = 5
<add> ordering = 'created'
<add>
<add> self.pagination = ExamplePagination()
<add> data = [
<add> 1, 1, 1, 1, 1,
<add> 1, 2, 3, 4, 4,
<add> 4, 4, 5, 6, 7,
<add> 7, 7, 7, 7, 7,
<add> 7, 7, 7, 8, 9,
<add> 9, 9, 9, 9, 9
<add> ]
<add> for idx in data:
<add> CursorPaginationModel.objects.create(created=idx)
<add>
<add> self.queryset = CursorPaginationModel.objects.values()
<add>
<add> def get_pages(self, url):
<add> """
<add> Given a URL return a tuple of:
<add>
<add> (previous page, current page, next page, previous url, next url)
<add> """
<add> request = Request(factory.get(url))
<add> queryset = self.pagination.paginate_queryset(self.queryset, request)
<add> current = [item['created'] for item in queryset]
<add>
<add> next_url = self.pagination.get_next_link()
<add> previous_url = self.pagination.get_previous_link()
<add>
<add> if next_url is not None:
<add> request = Request(factory.get(next_url))
<add> queryset = self.pagination.paginate_queryset(self.queryset, request)
<add> next = [item['created'] for item in queryset]
<add> else:
<add> next = None
<add>
<add> if previous_url is not None:
<add> request = Request(factory.get(previous_url))
<add> queryset = self.pagination.paginate_queryset(self.queryset, request)
<add> previous = [item['created'] for item in queryset]
<add> else:
<add> previous = None
<add>
<add> return (previous, current, next, previous_url, next_url)
<add>
<add>
<ide> def test_get_displayed_page_numbers():
<ide> """
<ide> Test our contextual page display function. | 2 |
PHP | PHP | use getsessionstore in request class | a55b886f08668a0aee65752be6c4c9695746c9d6 | <ide><path>src/Illuminate/Http/Request.php
<ide> public function flash($filter = null, $keys = array())
<ide> {
<ide> $flash = ( ! is_null($filter)) ? $this->$filter($keys) : $this->input();
<ide>
<del> $this->sessionStore->flashInput($flash);
<add> $this->getSessionStore()->flashInput($flash);
<ide> }
<ide>
<ide> /**
<ide> public function flashExcept()
<ide> */
<ide> public function flush()
<ide> {
<del> $this->sessionStore->flashInput(array());
<add> $this->getSessionStore()->flashInput(array());
<ide> }
<ide>
<ide> /** | 1 |
Javascript | Javascript | add injection for error dialog | 8de9c6c12c3e1ebf05b941add1d659321ea54930 | <ide><path>src/renderers/shared/fiber/ReactFiberErrorLogger.js
<ide>
<ide> 'use strict';
<ide>
<add>const emptyFunction = require('fbjs/lib/emptyFunction');
<add>const invariant = require('fbjs/lib/invariant');
<add>
<ide> import type {CapturedError} from 'ReactFiberScheduler';
<ide>
<add>let showDialog = emptyFunction;
<add>
<ide> function logCapturedError(capturedError: CapturedError): void {
<ide> if (__DEV__) {
<ide> const {
<ide> function logCapturedError(capturedError: CapturedError): void {
<ide> `React caught an error thrown by one of your components.\n\n${error.stack}`,
<ide> );
<ide> }
<add>
<add> showDialog(capturedError);
<ide> }
<ide>
<add>exports.injection = {
<add> injectDialog(fn: (CapturedError) => void) {
<add> invariant(
<add> showDialog === emptyFunction,
<add> 'The custom dialog was already injected.',
<add> );
<add> invariant(
<add> typeof fn === 'function',
<add> 'Injected showDialog() must be a function.',
<add> );
<add> showDialog = fn;
<add> },
<add>};
<add>
<ide> exports.logCapturedError = logCapturedError; | 1 |
Ruby | Ruby | use temp file for calculating hash | d37831219df2c4976eddeba3076cfba6f3486d1d | <ide><path>Library/Homebrew/utils/curl.rb
<ide> def curl_check_http_content(url, user_agents: [:default], check_content: false,
<ide> end
<ide>
<ide> def curl_http_content_headers_and_checksum(url, hash_needed: false, user_agent: :default)
<add> file = Tempfile.new.tap(&:close)
<add>
<ide> max_time = hash_needed ? "600" : "25"
<ide> output, = curl_output(
<del> "--connect-timeout", "15", "--include", "--max-time", max_time, "--location", url,
<add> "--dump-header", "-", "--output", file.path, "--include", "--location",
<add> "--connect-timeout", "15", "--max-time", max_time, url,
<ide> user_agent: user_agent
<ide> )
<ide>
<ide> def curl_http_content_headers_and_checksum(url, hash_needed: false, user_agent:
<ide> final_url = headers[/^Location:\s*(.*)$/i, 1]&.chomp
<ide> end
<ide>
<del> output_hash = Digest::SHA256.digest(output) if hash_needed
<add> output_hash = Digest::SHA256.file(file.path) if hash_needed
<ide>
<ide> final_url ||= url
<ide>
<ide> def curl_http_content_headers_and_checksum(url, hash_needed: false, user_agent:
<ide> file_hash: output_hash,
<ide> file: output,
<ide> }
<add>ensure
<add> file.unlink
<ide> end
<ide>
<ide> def http_status_ok?(status) | 1 |
Ruby | Ruby | avoid unnecessary float to string conversion | e9d245cf3a6e1ce783931e8e339c11fb0f3757d5 | <ide><path>Library/Homebrew/bottles.rb
<ide> def bottle_url f
<ide>
<ide> def bottle_tag
<ide> case MacOS.version
<del> when 10.8, 10.7, 10.5
<add> when "10.8", "10.7", "10.5"
<ide> MacOS.cat
<del> when 10.6
<add> when "10.6"
<ide> Hardware::CPU.is_64_bit? ? :snow_leopard : :snow_leopard_32
<ide> else
<ide> Hardware::CPU.type == :ppc ? Hardware::CPU.family : MacOS.cat
<ide><path>Library/Homebrew/macos.rb
<ide> def version
<ide>
<ide> def cat
<ide> case MacOS.version
<del> when 10.8 then :mountain_lion
<del> when 10.7 then :lion
<del> when 10.6 then :snow_leopard
<del> when 10.5 then :leopard
<del> when 10.4 then :tiger
<add> when "10.8" then :mountain_lion
<add> when "10.7" then :lion
<add> when "10.6" then :snow_leopard
<add> when "10.5" then :leopard
<add> when "10.4" then :tiger
<ide> end
<ide> end
<ide>
<ide><path>Library/Homebrew/os/mac/xcode.rb
<ide> def bad_xcode_select_path?
<ide>
<ide> def latest_version
<ide> case MacOS.version
<del> when 10.4 then "2.5"
<del> when 10.5 then "3.1.4"
<del> when 10.6 then "3.2.6"
<del> when 10.7, 10.8 then "4.6.3"
<del> when 10.9 then "5.0"
<add> when "10.4" then "2.5"
<add> when "10.5" then "3.1.4"
<add> when "10.6" then "3.2.6"
<add> when "10.7", "10.8" then "4.6.3"
<add> when "10.9" then "5.0"
<ide> else
<ide> # Default to newest known version of Xcode for unreleased OSX versions.
<ide> if MacOS.version > 10.9
<ide><path>Library/Homebrew/test/test_bottle_tag.rb
<ide>
<ide> class BottleTagTests < Test::Unit::TestCase
<ide> def test_cat_tiger_ppc
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.4))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.4"))
<ide> Hardware::CPU.stubs(:type).returns(:ppc)
<ide> Hardware::CPU.stubs(:family).returns(:foo)
<ide> assert_equal :foo, bottle_tag
<ide> end
<ide>
<ide> def test_cat_tiger_intel
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.4))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.4"))
<ide> Hardware::CPU.stubs(:type).returns(:intel)
<ide> assert_equal :tiger, bottle_tag
<ide> end
<ide>
<ide> def test_cat_leopard
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.5))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.5"))
<ide> assert_equal :leopard, bottle_tag
<ide> end
<ide>
<ide> def test_cat_snow_leopard_32
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.6))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.6"))
<ide> Hardware::CPU.stubs(:is_64_bit?).returns(false)
<ide> assert_equal :snow_leopard_32, bottle_tag
<ide> end
<ide>
<ide> def test_cat_snow_leopard_64
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.6))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.6"))
<ide> Hardware::CPU.stubs(:is_64_bit?).returns(true)
<ide> assert_equal :snow_leopard, bottle_tag
<ide> end
<ide>
<ide> def test_cat_lion
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.7))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.7"))
<ide> assert_equal :lion, bottle_tag
<ide> end
<ide>
<ide> def test_cat_mountain_lion
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.8))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.8"))
<ide> assert_equal :mountain_lion, bottle_tag
<ide> end
<ide> end
<ide><path>Library/Homebrew/test/test_dependency_collector.rb
<ide> def test_autotools_dep_system_autotools
<ide> end
<ide>
<ide> def test_x11_proxy_dep_mountain_lion
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.8))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.8"))
<ide> assert_equal Dependency.new("libpng"), @d.build(:libpng)
<ide> end
<ide>
<ide> def test_x11_proxy_dep_lion_or_older
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.7))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.7"))
<ide> assert_equal X11Dependency::Proxy.new(:libpng), @d.build(:libpng)
<ide> end
<ide>
<ide> def test_ld64_dep_pre_leopard
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.4))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.4"))
<ide> assert_equal LD64Dependency.new, @d.build(:ld64)
<ide> end
<ide>
<ide> def test_ld64_dep_leopard_or_newer
<del> MacOS.stubs(:version).returns(MacOS::Version.new(10.5))
<add> MacOS.stubs(:version).returns(MacOS::Version.new("10.5"))
<ide> assert_nil @d.build(:ld64)
<ide> end
<ide>
<ide><path>Library/Homebrew/test/test_version_subclasses.rb
<ide>
<ide> class MacOSVersionTests < Test::Unit::TestCase
<ide> def setup
<del> @v = MacOS::Version.new(10.7)
<add> @v = MacOS::Version.new("10.7")
<ide> end
<ide>
<ide> def test_compare_with_symbol
<ide> def test_compare_with_string
<ide> end
<ide>
<ide> def test_compare_with_version
<del> assert_operator @v, :>, Version.new(10.6)
<del> assert_operator @v, :==, Version.new(10.7)
<del> assert_operator @v, :===, Version.new(10.7)
<del> assert_operator @v, :<, Version.new(10.8)
<add> assert_operator @v, :>, Version.new("10.6")
<add> assert_operator @v, :==, Version.new("10.7")
<add> assert_operator @v, :===, Version.new("10.7")
<add> assert_operator @v, :<, Version.new("10.8")
<ide> end
<ide> end | 6 |
Javascript | Javascript | fix typo in webgpu/constants.js | e683d1910bf61acaa1c8149d12f81e2b5871f4b7 | <ide><path>examples/jsm/renderers/webgpu/WebGPURenderer.js
<ide> class WebGPURenderer {
<ide>
<ide> const swapChain = context.configure( {
<ide> device: device,
<del> format: GPUTextureFormat.BRGA8Unorm // this is the only valid swap chain format right now (r121)
<add> format: GPUTextureFormat.BGRA8Unorm // this is the only valid swap chain format right now (r121)
<ide> } );
<ide>
<ide> this._adapter = adapter;
<ide> class WebGPURenderer {
<ide>
<ide> } else {
<ide>
<del> format = GPUTextureFormat.BRGA8Unorm; // default swap chain format
<add> format = GPUTextureFormat.BGRA8Unorm; // default swap chain format
<ide>
<ide> }
<ide>
<ide> class WebGPURenderer {
<ide> depthOrArrayLayers: 1
<ide> },
<ide> sampleCount: this._parameters.sampleCount,
<del> format: GPUTextureFormat.BRGA8Unorm,
<add> format: GPUTextureFormat.BGRA8Unorm,
<ide> usage: GPUTextureUsage.RENDER_ATTACHMENT
<ide> } );
<ide>
<ide> class WebGPURenderer {
<ide>
<ide> this._context.configure( {
<ide> device: device,
<del> format: GPUTextureFormat.BRGA8Unorm,
<add> format: GPUTextureFormat.BGRA8Unorm,
<ide> usage: GPUTextureUsage.RENDER_ATTACHMENT,
<ide> size: {
<ide> width: Math.floor( this._width * this._pixelRatio ),
<ide><path>examples/jsm/renderers/webgpu/constants.js
<ide> export const GPUTextureFormat = {
<ide> RGBA8Snorm: 'rgba8snorm',
<ide> RGBA8Uint: 'rgba8uint',
<ide> RGBA8Sint: 'rgba8sint',
<del> BRGA8Unorm: 'bgra8unorm',
<del> BRGA8UnormSRGB: 'bgra8unorm-srgb',
<add> BGRA8Unorm: 'bgra8unorm',
<add> BGRA8UnormSRGB: 'bgra8unorm-srgb',
<ide> // Packed 32-bit formats
<ide> RGB9E5UFloat: 'rgb9e5ufloat',
<ide> RGB10A2Unorm: 'rgb10a2unorm', | 2 |
Python | Python | fix lint violations in demos/ directory | 106fa89f8229873bc4ef0074d1c52662195a7c62 | <ide><path>demos/compute_demo.py
<ide>
<ide> from libcloud.common.types import InvalidCredsError
<ide> from libcloud.compute.types import Provider
<del>from libcloud.providers import get_driver
<add>from libcloud.compute.providers import get_driver
<ide>
<ide> from pprint import pprint
<ide>
<ide><path>demos/gce_demo.py
<ide> from libcloud.dns.base import Record, Zone
<ide> from libcloud.utils.py3 import PY3
<ide> if PY3:
<del> import urllib.request as url_req
<add> import urllib.request as url_req # pylint: disable=no-name-in-module
<ide> else:
<ide> import urllib2 as url_req
<ide> | 2 |
Java | Java | fix javadoc for databaseclient | bde0931e51ff93ef679e9d79f7798f681858f81c | <ide><path>spring-r2dbc/src/main/java/org/springframework/r2dbc/core/DatabaseClient.java
<ide> *
<ide> * DatabaseClient client = DatabaseClient.create(factory);
<ide> * Mono<Actor> actor = client.sql("select first_name, last_name from t_actor")
<del> * .map(row -> new Actor(row.get("first_name, String.class"),
<del> * row.get("last_name, String.class")))
<add> * .map(row -> new Actor(row.get("first_name", String.class),
<add> * row.get("last_name", String.class)))
<ide> * .first();
<ide> * </pre>
<ide> * | 1 |
Javascript | Javascript | add key_events support to textarea | 4d492e426b3c92b432babedb770d8b74aef6317d | <ide><path>packages/sproutcore-handlebars/lib/controls/text_area.js
<ide> SC.TextArea = SC.View.extend({
<ide> value: "",
<ide> attributeBindings: ['placeholder'],
<ide> placeholder: null,
<add>
<add> insertNewline: SC.K,
<add> cancel: SC.K,
<ide>
<ide> focusOut: function(event) {
<ide> this._elementValueDidChange();
<ide> SC.TextArea = SC.View.extend({
<ide> },
<ide>
<ide> keyUp: function(event) {
<del> this._elementValueDidChange();
<add> this.interpretKeyEvents(event);
<ide> return false;
<ide> },
<ide>
<ide> SC.TextArea = SC.View.extend({
<ide> this._updateElementValue();
<ide> },
<ide>
<add> interpretKeyEvents: function(event) {
<add> var map = SC.TextArea.KEY_EVENTS;
<add> var method = map[event.keyCode];
<add>
<add> if (method) { return this[method](event); }
<add> else { this._elementValueDidChange(); }
<add> },
<add>
<ide> _elementValueDidChange: function() {
<ide> set(this, 'value', this.$().val());
<ide> },
<ide>
<ide> _updateElementValue: function() {
<ide> this.$().val(get(this, 'value'));
<ide> }.observes('value')
<del>});
<ide>\ No newline at end of file
<add>});
<add>
<add>SC.TextArea.KEY_EVENTS = {
<add> 13: 'insertNewline',
<add> 27: 'cancel'
<add>};
<ide><path>packages/sproutcore-handlebars/tests/controls/text_area_test.js
<ide> test("value binding works properly for inputs that haven't been created", functi
<ide> equals(textArea.$().val(), 'ohai', "value is reflected in the input element once it is created");
<ide> });
<ide>
<add>test("should call the insertNewline method when return key is pressed", function() {
<add> var wasCalled;
<add> var event = SC.Object.create({
<add> keyCode: 13
<add> });
<add>
<add> textArea.insertNewline = function() {
<add> wasCalled = true;
<add> };
<add>
<add> textArea.keyUp(event);
<add> ok(wasCalled, "invokes insertNewline method");
<add>});
<add>
<add>test("should call the cancel method when escape key is pressed", function() {
<add> var wasCalled;
<add> var event = SC.Object.create({
<add> keyCode: 27
<add> });
<add>
<add> textArea.cancel = function() {
<add> wasCalled = true;
<add> };
<add>
<add> textArea.keyUp(event);
<add> ok(wasCalled, "invokes cancel method");
<add>});
<add>
<ide> // test("listens for focus and blur events", function() {
<ide> // var focusCalled = 0;
<ide> // var blurCalled = 0; | 2 |
Javascript | Javascript | add spec for headlessjstasksupport | 56c3852384fad2e5dfbb9bad316543af972e902c | <ide><path>Libraries/ReactNative/AppRegistry.js
<ide>
<ide> const BatchedBridge = require('../BatchedBridge/BatchedBridge');
<ide> const BugReporting = require('../BugReporting/BugReporting');
<del>const NativeModules = require('../BatchedBridge/NativeModules');
<ide> const ReactNative = require('../Renderer/shims/ReactNative');
<ide> const SceneTracker = require('../Utilities/SceneTracker');
<ide>
<ide> const renderApplication = require('./renderApplication');
<ide> const createPerformanceLogger = require('../Utilities/createPerformanceLogger');
<ide> import type {IPerformanceLogger} from '../Utilities/createPerformanceLogger';
<ide>
<add>import NativeHeadlessJsTaskSupport from './NativeHeadlessJsTaskSupport';
<add>
<ide> type Task = (taskData: any) => Promise<void>;
<ide> type TaskProvider = () => Task;
<ide> type TaskCanceller = () => void;
<ide> const AppRegistry = {
<ide> const taskProvider = taskProviders.get(taskKey);
<ide> if (!taskProvider) {
<ide> console.warn(`No task registered for key ${taskKey}`);
<del> NativeModules.HeadlessJsTaskSupport.notifyTaskFinished(taskId);
<add> if (NativeHeadlessJsTaskSupport) {
<add> NativeHeadlessJsTaskSupport.notifyTaskFinished(taskId);
<add> }
<ide> return;
<ide> }
<ide> taskProvider()(data)
<del> .then(() =>
<del> NativeModules.HeadlessJsTaskSupport.notifyTaskFinished(taskId),
<del> )
<add> .then(() => {
<add> if (NativeHeadlessJsTaskSupport) {
<add> NativeHeadlessJsTaskSupport.notifyTaskFinished(taskId);
<add> }
<add> })
<ide> .catch(reason => {
<ide> console.error(reason);
<del> NativeModules.HeadlessJsTaskSupport.notifyTaskFinished(taskId);
<add> if (NativeHeadlessJsTaskSupport) {
<add> NativeHeadlessJsTaskSupport.notifyTaskFinished(taskId);
<add> }
<ide> });
<ide> },
<ide>
<ide><path>Libraries/ReactNative/NativeHeadlessJsTaskSupport.js
<add>/**
<add> * Copyright (c) Facebook, Inc. and its affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @flow
<add> * @format
<add> */
<add>
<add>'use strict';
<add>
<add>import type {TurboModule} from 'RCTExport';
<add>import * as TurboModuleRegistry from 'TurboModuleRegistry';
<add>
<add>export interface Spec extends TurboModule {
<add> +notifyTaskFinished: (taskId: number) => void;
<add>}
<add>
<add>export default TurboModuleRegistry.get<Spec>('HeadlessJsTaskSupport'); | 2 |
Java | Java | stop preallocation views on the main thread | 17e16940768080caa47a33141a0c93ed47d4189a | <ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/FabricUIManager.java
<ide> public void onCatalystInstanceDestroy() {
<ide>
<ide> @DoNotStrip
<ide> private void preallocateView(final int rootTag, final String componentName) {
<add> if (UiThreadUtil.isOnUiThread()) {
<add> // There is no reason to allocate views ahead of time on the main thread.
<add> return;
<add> }
<ide> synchronized (mPreMountItemsLock) {
<ide> ThemedReactContext context =
<ide> Assertions.assertNotNull(mReactContextForRootTag.get(rootTag)); | 1 |
Javascript | Javascript | add nummultiviewviews to parameters | 95cba890d318bb0b3f9b9293c71fc3f3f5ae4b3c | <ide><path>src/renderers/webgl/WebGLProgram.js
<ide> function WebGLProgram( renderer, extensions, code, material, shader, parameters
<ide>
<ide> var prefixVertex, prefixFragment;
<ide>
<del> var renderTarget = renderer.getRenderTarget();
<del> var numMultiviewViews = renderTarget && renderTarget.isWebGLMultiviewRenderTarget ? renderTarget.numViews : 0;
<del>
<ide> if ( material.isRawShaderMaterial ) {
<ide>
<ide> prefixVertex = [
<ide> function WebGLProgram( renderer, extensions, code, material, shader, parameters
<ide>
<ide> // Multiview
<ide>
<del> if ( numMultiviewViews > 0 ) {
<add> if ( parameters.numMultiviewViews > 0 ) {
<ide>
<ide> prefixVertex = prefixVertex.replace(
<ide> '#version 300 es\n',
<ide> [
<ide> '#version 300 es\n',
<ide> '#extension GL_OVR_multiview2 : require',
<del> 'layout(num_views = ' + numMultiviewViews + ') in;',
<add> 'layout(num_views = ' + parameters.numMultiviewViews + ') in;',
<ide> '#define VIEW_ID gl_ViewID_OVR'
<ide> ].join( '\n' )
<ide> );
<ide> function WebGLProgram( renderer, extensions, code, material, shader, parameters
<ide> 'uniform mat3 normalMatrix;'
<ide> ].join( '\n' ),
<ide> [
<del> 'uniform mat4 modelViewMatrices[' + numMultiviewViews + '];',
<del> 'uniform mat4 projectionMatrices[' + numMultiviewViews + '];',
<del> 'uniform mat4 viewMatrices[' + numMultiviewViews + '];',
<del> 'uniform mat3 normalMatrices[' + numMultiviewViews + '];',
<add> 'uniform mat4 modelViewMatrices[' + parameters.numMultiviewViews + '];',
<add> 'uniform mat4 projectionMatrices[' + parameters.numMultiviewViews + '];',
<add> 'uniform mat4 viewMatrices[' + parameters.numMultiviewViews + '];',
<add> 'uniform mat3 normalMatrices[' + parameters.numMultiviewViews + '];',
<ide>
<ide> '#define modelViewMatrix modelViewMatrices[VIEW_ID]',
<ide> '#define projectionMatrix projectionMatrices[VIEW_ID]',
<ide> function WebGLProgram( renderer, extensions, code, material, shader, parameters
<ide> prefixFragment = prefixFragment.replace(
<ide> 'uniform mat4 viewMatrix;',
<ide> [
<del> 'uniform mat4 viewMatrices[' + numMultiviewViews + '];',
<add> 'uniform mat4 viewMatrices[' + parameters.numMultiviewViews + '];',
<ide> '#define viewMatrix viewMatrices[VIEW_ID]'
<ide> ].join( '\n' )
<ide> );
<ide> function WebGLProgram( renderer, extensions, code, material, shader, parameters
<ide> this.program = program;
<ide> this.vertexShader = glVertexShader;
<ide> this.fragmentShader = glFragmentShader;
<del> this.numMultiviewViews = numMultiviewViews;
<add> this.numMultiviewViews = parameters.numMultiviewViews;
<ide>
<ide> return this;
<ide>
<ide><path>src/renderers/webgl/WebGLPrograms.js
<ide> function WebGLPrograms( renderer, extensions, capabilities ) {
<ide> };
<ide>
<ide> var parameterNames = [
<del> "precision", "supportsVertexTextures", "instancing",
<add> "precision", "supportsVertexTextures", "instancing", "numMultiviewViews",
<ide> "map", "mapEncoding", "matcap", "matcapEncoding", "envMap", "envMapMode", "envMapEncoding",
<ide> "lightMap", "aoMap", "emissiveMap", "emissiveMapEncoding", "bumpMap", "normalMap", "objectSpaceNormalMap", "tangentSpaceNormalMap", "clearcoatNormalMap", "displacementMap", "specularMap",
<ide> "roughnessMap", "metalnessMap", "gradientMap",
<ide> function WebGLPrograms( renderer, extensions, capabilities ) {
<ide> }
<ide>
<ide> var currentRenderTarget = renderer.getRenderTarget();
<add> var numMultiviewViews = currentRenderTarget && currentRenderTarget.isWebGLMultiviewRenderTarget ? currentRenderTarget.numViews : 0;
<ide>
<ide> var parameters = {
<ide>
<ide> function WebGLPrograms( renderer, extensions, capabilities ) {
<ide> instancing: object.isInstancedMesh === true,
<ide>
<ide> supportsVertexTextures: vertexTextures,
<add> numMultiviewViews: numMultiviewViews,
<ide> outputEncoding: getTextureEncodingFromMap( ( ! currentRenderTarget ) ? null : currentRenderTarget.texture, renderer.gammaOutput ),
<ide> map: !! material.map,
<ide> mapEncoding: getTextureEncodingFromMap( material.map, renderer.gammaInput ), | 2 |
PHP | PHP | fix failing tests | 6a9fdd841a9df78f0f837cab59612f4af2da6900 | <ide><path>lib/Cake/Utility/Set.php
<ide> class Set {
<ide> */
<ide> public static function merge($arr1, $arr2 = null) {
<ide> $args = func_get_args();
<add> if (empty($args[1])) {
<add> return (array)$args[0];
<add> }
<add> if (!is_array($args[0])) {
<add> $args[0] = (array)$args[0];
<add> }
<ide> return call_user_func_array('Hash::merge', $args);
<ide> }
<ide> | 1 |
Javascript | Javascript | remove test for deprecation warning | 02cd42b89f76bf4565f9b8aade2996af0b625d61 | <ide><path>packages/ember-runtime/tests/legacy_1x/system/binding_test.js
<ide> test("Binding value1 such that it will recieve only single values", function() {
<ide> equal("",get(bon1, "array1"));
<ide> });
<ide>
<del>test("Single binding using notEmpty function.", function() {
<del> // This should raise an exception for Ember 1.x developers who are using
<del> // the old syntax.
<del> raises(function() {
<del> var bond = Bon1.create ({
<del> array1Binding: Ember.Binding.single("TestNamespace.bon2.arr").notEmpty(null,'(EMPTY)')
<del> });
<del> });
<del>});
<del>
<ide> test("Binding with transforms, function to check the type of value", function() {
<ide> var jon = Bon1.create({
<ide> value1Binding: Ember.Binding.transform({ | 1 |
Text | Text | fix lint on react-18 docs | c7e4c85436394eed0580d01cc029ba9be1a75f3e | <ide><path>docs/advanced-features/react-18.md
<ide> export default function Home() {
<ide> <Suspense fallback={'Loading...'}>
<ide> <Profile />
<ide> </Suspense>
<del> <Content/>
<add> <Content />
<ide> </div>
<ide> )
<ide> } | 1 |
Javascript | Javascript | fix extension lookups for top-level main | cadc47fe076caa91bda6aa148e0b37be609781d7 | <ide><path>lib/module.js
<ide> Module._load = function(request, parent, isMain) {
<ide> ESMLoader.hook(hooks);
<ide> }
<ide> }
<del> await ESMLoader.import(getURLFromFilePath(request).href);
<add> await ESMLoader.import(getURLFromFilePath(request).pathname);
<ide> })()
<ide> .catch((e) => {
<ide> console.error(e);
<ide><path>test/parallel/test-module-main-extension-lookup.js
<add>'use strict';
<add>require('../common');
<add>const { execFileSync } = require('child_process');
<add>
<add>const node = process.argv[0];
<add>
<add>execFileSync(node, ['--experimental-modules', 'test/es-module/test-esm-ok']); | 2 |
Javascript | Javascript | call super.dispose() in dispose method | 6eb9fd3dfbdf054e944dfafc0f999ad49cfb9759 | <ide><path>src/js/resize-manager.js
<ide> class ResizeManager extends Component {
<ide> this.resizeObserver = null;
<ide> this.debouncedHandler_ = null;
<ide> this.loadListener_ = null;
<add> super.dispose();
<ide> }
<ide>
<ide> } | 1 |
PHP | PHP | add alias setter to helpformatter | 0b57ea656d985eff3a88adaf5a9ab28a06f8a6dd | <ide><path>src/Console/ConsoleOptionParser.php
<ide> class ConsoleOptionParser
<ide> */
<ide> protected $_tokens = [];
<ide>
<add> /**
<add> * Help alias use in the HelpFormatter.
<add> *
<add> * @see \Cake\Console\HelpFormatter::setAlias()
<add> * @var string
<add> */
<add> protected $_helpAlias = 'cake';
<add>
<ide> /**
<ide> * Construct an OptionParser so you can define its behavior
<ide> *
<ide> public function help($subcommand = null, $format = 'text', $width = 72)
<ide> }
<ide>
<ide> $formatter = new HelpFormatter($this);
<add> $formatter->setAlias($this->_helpAlias);
<add>
<ide> if ($format === 'text') {
<ide> return $formatter->text($width);
<ide> }
<ide> public function help($subcommand = null, $format = 'text', $width = 72)
<ide> }
<ide> }
<ide>
<add> /**
<add> * Set the alias used in the HelpFormatter
<add> *
<add> * @param string $alias The alias
<add> * @return void
<add> */
<add> public function setHelpAlias($alias)
<add> {
<add> $this->_helpAlias = $alias;
<add> }
<add>
<ide> /**
<ide> * Parse the value for a long option out of $this->_tokens. Will handle
<ide> * options with an `=` in them.
<ide><path>src/Console/HelpFormatter.php
<ide> */
<ide> namespace Cake\Console;
<ide>
<add>use Cake\Console\Exception\ConsoleException;
<ide> use Cake\Utility\Text;
<ide> use SimpleXmlElement;
<ide>
<ide> class HelpFormatter
<ide> */
<ide> protected $_parser;
<ide>
<add> /**
<add> * Alias to display in the output.
<add> *
<add> * @var string
<add> */
<add> protected $_alias = 'cake';
<add>
<ide> /**
<ide> * Build the help formatter for an OptionParser
<ide> *
<ide> public function __construct(ConsoleOptionParser $parser)
<ide> $this->_parser = $parser;
<ide> }
<ide>
<add> /**
<add> * Set the alias
<add> *
<add> * @return void
<add> * @throws \Cake\Console\Exception\ConsoleException When alias is not a string.
<add> */
<add> public function setAlias($alias)
<add> {
<add> if (is_string($alias)) {
<add> $this->_alias = $alias;
<add> } else {
<add> throw new ConsoleException('Alias must be of type string.');
<add> }
<add> }
<add>
<ide> /**
<ide> * Get the help as formatted text suitable for output on the command line.
<ide> *
<ide> public function text($width = 72)
<ide> ]);
<ide> }
<ide> $out[] = '';
<del> $out[] = sprintf('To see help on a subcommand use <info>`cake %s [subcommand] --help`</info>', $parser->getCommand());
<add> $out[] = sprintf('To see help on a subcommand use <info>`' . $this->_alias . ' %s [subcommand] --help`</info>', $parser->getCommand());
<ide> $out[] = '';
<ide> }
<ide>
<ide> public function text($width = 72)
<ide> */
<ide> protected function _generateUsage()
<ide> {
<del> $usage = ['cake ' . $this->_parser->getCommand()];
<add> $usage = [$this->_alias . ' ' . $this->_parser->getCommand()];
<ide> $subcommands = $this->_parser->subcommands();
<ide> if (!empty($subcommands)) {
<ide> $usage[] = '[subcommand]';
<ide><path>tests/TestCase/Console/HelpFormatterTest.php
<ide> public function testHelpWithLotsOfArguments()
<ide> $this->assertContains($expected, $result);
<ide> }
<ide>
<add> /**
<add> * Test setting a help alias
<add> *
<add> * @return void
<add> */
<add> public function testWithHelpAlias()
<add> {
<add> $parser = new ConsoleOptionParser('mycommand', false);
<add> $formatter = new HelpFormatter($parser);
<add> $formatter->setAlias('foo');
<add> $result = $formatter->text();
<add> $expected = 'foo mycommand [-h]';
<add> $this->assertContains($expected, $result);
<add> }
<add>
<add> /**
<add> * Tests that setting a none string help alias triggers an exception
<add> *
<add> * @expectedException \Cake\Console\Exception\ConsoleException
<add> * @expectedExceptionMessage Alias must be of type string.
<add> * @return void
<add> */
<add> public function testWithNoneStringHelpAlias()
<add> {
<add> $parser = new ConsoleOptionParser('mycommand', false);
<add> $formatter = new HelpFormatter($parser);
<add> $formatter->setAlias(['foo']);
<add> }
<add>
<ide> /**
<ide> * test help() with options and arguments that have choices.
<ide> * | 3 |
Javascript | Javascript | datatransfer property for drag and drop events | b019bed4daea16db7867ba806b6b00a0872a08e0 | <ide><path>packages/ember-views/lib/system.js
<ide> // License: Licensed under MIT license (see license.js)
<ide> // ==========================================================================
<ide>
<add>require("ember-views/system/jquery_ext");
<ide> require("ember-views/system/render_buffer");
<ide> require("ember-views/system/event_dispatcher");
<ide> require("ember-views/system/ext");
<ide><path>packages/ember-views/lib/system/jquery_ext.js
<add>// ==========================================================================
<add>// Project: Ember - JavaScript Application Framework
<add>// Copyright: ©2006-2011 Strobe Inc. and contributors.
<add>// Portions ©2008-2011 Apple Inc. All rights reserved.
<add>// License: Licensed under MIT license (see license.js)
<add>// ==========================================================================
<add>
<add>// http://www.whatwg.org/specs/web-apps/current-work/multipage/dnd.html#dndevents
<add>var dragEvents = Ember.String.w('dragstart drag dragenter dragleave dragover drop dragend');
<add>
<add>// Copies the `dataTransfer` property from a browser event object onto the
<add>// jQuery event object for the specified events
<add>Ember.EnumerableUtils.forEach(dragEvents, function(eventName) {
<add> Ember.$.event.fixHooks[eventName] = { props: ['dataTransfer'] };
<add>});
<ide><path>packages/ember-views/tests/system/jquery_ext_test.js
<add>// ==========================================================================
<add>// Project: Ember - JavaScript Application Framework
<add>// Copyright: ©2006-2011 Strobe Inc. and contributors.
<add>// Portions ©2008-2011 Apple Inc. All rights reserved.
<add>// License: Licensed under MIT license (see license.js)
<add>// ==========================================================================
<add>
<add>var view, dispatcher;
<add>
<add>// Adapted from https://github.com/jquery/jquery/blob/f30f7732e7775b6e417c4c22ced7adb2bf76bf89/test/data/testinit.js
<add>var fireNativeWithDataTransfer;
<add>if (document.createEvent) {
<add> fireNativeWithDataTransfer = function(node, type, dataTransfer) {
<add> var event = document.createEvent('HTMLEvents');
<add> event.initEvent(type, true, true);
<add> event.dataTransfer = dataTransfer;
<add> node.dispatchEvent(event);
<add> };
<add>} else {
<add> fireNativeWithDataTransfer = function(node, type, dataTransfer) {
<add> var event = document.createEventObject();
<add> event.dataTransfer = dataTransfer;
<add> node.fireEvent('on' + type, event);
<add> };
<add>}
<add>
<add>module("Ember.EventDispatcher", {
<add> setup: function() {
<add> Ember.run(function() {
<add> dispatcher = Ember.EventDispatcher.create();
<add> dispatcher.setup();
<add> });
<add> },
<add>
<add> teardown: function() {
<add> Ember.run(function() {
<add> if (view) { view.destroy(); }
<add> dispatcher.destroy();
<add> });
<add> }
<add>});
<add>
<add>test("jQuery.event.fix copies over the dataTransfer property", function() {
<add> var originalEvent;
<add> var receivedEvent;
<add>
<add> originalEvent = {
<add> type: 'drop',
<add> dataTransfer: 'success',
<add> target: document.body
<add> };
<add>
<add> receivedEvent = Ember.$.event.fix(originalEvent);
<add>
<add> ok(receivedEvent !== originalEvent, "attributes are copied to a new event object");
<add> equal(receivedEvent.dataTransfer, originalEvent.dataTransfer, "copies dataTransfer property to jQuery event");
<add>});
<add>
<add>test("drop handler should receive event with dataTransfer property", function() {
<add> var receivedEvent;
<add> var dropCalled = 0;
<add>
<add> view = Ember.View.create({
<add> render: function(buffer) {
<add> buffer.push('please drop stuff on me');
<add> this._super(buffer);
<add> },
<add>
<add> drop: function(evt) {
<add> receivedEvent = evt;
<add> dropCalled++;
<add> }
<add> });
<add>
<add> Ember.run(function() {
<add> view.append();
<add> });
<add>
<add> fireNativeWithDataTransfer(view.$().get(0), 'drop', 'success');
<add>
<add> equal(dropCalled, 1, "called drop handler once");
<add> equal(receivedEvent.dataTransfer, 'success', "copies dataTransfer property to jQuery event");
<add>}); | 3 |
Python | Python | remove unneccessary arg in client fixture | 46c1383919454ae281967316d6d6fb33bce9b773 | <ide><path>examples/patterns/largerapp/tests/test_largerapp.py
<ide> import pytest
<ide>
<ide> @pytest.fixture
<del>def client(request):
<add>def client():
<ide> app.config['TESTING'] = True
<ide> client = app.test_client()
<ide> return client | 1 |
Javascript | Javascript | parse types into links in doc html gen | e517efafd84c8cd87d477a67c367ee9295208dec | <ide><path>tools/doc/html.js
<ide> var fs = require('fs');
<ide> var marked = require('marked');
<ide> var path = require('path');
<ide> var preprocess = require('./preprocess.js');
<add>var typeParser = require('./type-parser.js');
<ide>
<ide> module.exports = toHTML;
<ide>
<ide> function parseLists(input) {
<ide> output.push({ type: 'html', text: tok.text });
<ide> return;
<ide> }
<del> if (state === null) {
<add> if (state === null ||
<add> (state === 'AFTERHEADING' && tok.type === 'heading')) {
<ide> if (tok.type === 'heading') {
<ide> state = 'AFTERHEADING';
<ide> }
<ide> function parseLists(input) {
<ide> function parseListItem(text) {
<ide> var parts = text.split('`');
<ide> var i;
<add> var typeMatches;
<ide>
<ide> for (i = 0; i < parts.length; i += 2) {
<del> parts[i] = parts[i].replace(/\{([^\}]+)\}/, '<span class="type">$1</span>');
<add> typeMatches = parts[i].match(/\{([^\}]+)\}/g);
<add> if (typeMatches) {
<add> typeMatches.forEach(function(typeMatch) {
<add> parts[i] = parts[i].replace(typeMatch, typeParser.toLink(typeMatch));
<add> });
<add> }
<ide> }
<ide>
<ide> //XXX maybe put more stuff here?
<ide> function getId(text) {
<ide> }
<ide> return text;
<ide> }
<del>
<ide><path>tools/doc/type-parser.js
<add>'use strict';
<add>const nodeDocUrl = '';
<add>const jsDocUrl = 'https://developer.mozilla.org/en-US/docs/Web/JavaScript/' +
<add> 'Reference/Global_Objects/';
<add>const jsPrimitiveUrl = 'https://developer.mozilla.org/en-US/docs/Web/' +
<add> 'JavaScript/Data_structures';
<add>const jsPrimitives = [
<add> 'Number', 'String', 'Boolean', 'Null', 'Symbol'
<add>]
<add>const jsGlobalTypes = [
<add> 'Error', 'Object', 'Function', 'Array', 'Uint8Array',
<add> 'Uint16Array', 'Uint32Array', 'Int8Array', 'Int16Array', 'Int32Array',
<add> 'Uint8ClampedArray', 'Float32Array', 'Float64Array', 'Date', 'RegExp',
<add> 'ArrayBuffer', 'DataView', 'Promise'
<add>];
<add>const typeMap = {
<add> 'Buffer': 'buffer.html#buffer_class_buffer',
<add> 'Handle': 'net.html#net_server_listen_handle_backlog_callback',
<add> 'Stream': 'stream.html#stream_stream',
<add> 'stream.Writable': 'stream.html#stream_class_stream_writable',
<add> 'stream.Readable': 'stream.html#stream_class_stream_readable',
<add> 'ChildProcess': 'child_process.html#child_process_class_childprocess',
<add> 'cluster.Worker': 'cluster.html#cluster_class_worker',
<add> 'dgram.Socket': 'dgram.html#dgram_class_dgram_socket',
<add> 'net.Socket': 'net.html#net_class_net_socket',
<add> 'EventEmitter': 'events.html#events_class_events_eventemitter',
<add> 'Timer': 'timers.html#timers_timers'
<add>};
<add>
<add>module.exports = {
<add> toLink: function (typeInput) {
<add> let typeLinks = [];
<add> typeInput = typeInput.replace('{', '').replace('}', '');
<add> let typeTexts = typeInput.split('|');
<add>
<add> typeTexts.forEach(function (typeText) {
<add> typeText = typeText.trim();
<add> if (typeText) {
<add> let typeUrl = null;
<add> if (jsPrimitives.indexOf(typeText) !== -1) {
<add> typeUrl = jsPrimitiveUrl + '#' + typeText + '_type';
<add> } else if (jsGlobalTypes.indexOf(typeText) !== -1) {
<add> typeUrl = jsDocUrl + typeText;
<add> } else if (typeMap[typeText]) {
<add> typeUrl = nodeDocUrl + typeMap[typeText];
<add> }
<add>
<add> if (typeUrl) {
<add> typeLinks.push('<a href="' + typeUrl + '" class="type"><' +
<add> typeText + '></a>');
<add> } else {
<add> typeLinks.push('<span class="type"><' + typeText + '></span>');
<add> }
<add> }
<add> });
<add>
<add> return typeLinks.length ? typeLinks.join(' | ') : typeInput;
<add> }
<add>} | 2 |
Text | Text | improve buffer documentation | c41e360de7462bd340b19fe2d20c451e52ff2d15 | <ide><path>doc/api/buffer.md
<ide>
<ide> > Stability: 2 - Stable
<ide>
<del>Prior to the introduction of [`TypedArray`][], the JavaScript language had no
<del>mechanism for reading or manipulating streams of binary data. The `Buffer` class
<del>was introduced as part of the Node.js API to enable interaction with octet
<del>streams in TCP streams, file system operations, and other contexts.
<add>In Node.js, `Buffer` objects are used to represent binary data in the form
<add>of a sequence of bytes. Many Node.js APIs, for example streams and file system
<add>operations, support `Buffer`s, as interactions with the operating system or
<add>other processes generally always happen in terms of binary data.
<ide>
<del>With [`TypedArray`][] now available, the `Buffer` class implements the
<del>[`Uint8Array`][] API in a manner that is more optimized and suitable for
<del>Node.js.
<add>The `Buffer` class is a subclass of the [`Uint8Array`][] class that is built
<add>into the JavaScript language. A number of additional methods are supported
<add>that cover additional use cases. Node.js APIs accept plain [`Uint8Array`][]s
<add>wherever `Buffer`s are supported as well.
<ide>
<del>Instances of the `Buffer` class are similar to arrays of integers from `0` to
<del>`255` (other integers are coerced to this range by `& 255` operation) but
<del>correspond to fixed-sized, raw memory allocations outside the V8 heap.
<del>The size of the `Buffer` is established when it is created and cannot be
<del>changed.
<add>Instances of the `Buffer` class, and [`Uint8Array`][]s in general,
<add>are similar to arrays of integers from `0` to `255`, but correspond to
<add>fixed-sized blocks of memory and cannot contain any other values.
<add>The size of a `Buffer` is established when it is created and cannot be changed.
<ide>
<ide> The `Buffer` class is within the global scope, making it unlikely that one
<ide> would need to ever use `require('buffer').Buffer`.
<ide> would need to ever use `require('buffer').Buffer`.
<ide> // Creates a zero-filled Buffer of length 10.
<ide> const buf1 = Buffer.alloc(10);
<ide>
<del>// Creates a Buffer of length 10, filled with 0x1.
<add>// Creates a Buffer of length 10,
<add>// filled with bytes which all have the value `1`.
<ide> const buf2 = Buffer.alloc(10, 1);
<ide>
<ide> // Creates an uninitialized buffer of length 10.
<ide> // This is faster than calling Buffer.alloc() but the returned
<ide> // Buffer instance might contain old data that needs to be
<del>// overwritten using either fill() or write().
<add>// overwritten using fill(), write(), or other functions that fill the Buffer's
<add>// contents.
<ide> const buf3 = Buffer.allocUnsafe(10);
<ide>
<del>// Creates a Buffer containing [0x1, 0x2, 0x3].
<add>// Creates a Buffer containing the bytes [1, 2, 3].
<ide> const buf4 = Buffer.from([1, 2, 3]);
<ide>
<del>// Creates a Buffer containing UTF-8 bytes [0x74, 0xc3, 0xa9, 0x73, 0x74].
<del>const buf5 = Buffer.from('tést');
<add>// Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries
<add>// are all truncated using `(value & 255)` to fit into the range 0–255.
<add>const buf5 = Buffer.from([257, 257.5, -255, '1']);
<ide>
<del>// Creates a Buffer containing Latin-1 bytes [0x74, 0xe9, 0x73, 0x74].
<del>const buf6 = Buffer.from('tést', 'latin1');
<del>```
<del>
<del>## `Buffer.from()`, `Buffer.alloc()`, and `Buffer.allocUnsafe()`
<del>
<del>In versions of Node.js prior to 6.0.0, `Buffer` instances were created using the
<del>`Buffer` constructor function, which allocates the returned `Buffer`
<del>differently based on what arguments are provided:
<del>
<del>* Passing a number as the first argument to `Buffer()` (e.g. `new Buffer(10)`)
<del> allocates a new `Buffer` object of the specified size. Prior to Node.js 8.0.0,
<del> the memory allocated for such `Buffer` instances is *not* initialized and
<del> *can contain sensitive data*. Such `Buffer` instances *must* be subsequently
<del> initialized by using either [`buf.fill(0)`][`buf.fill()`] or by writing to the
<del> entire `Buffer`. While this behavior is *intentional* to improve performance,
<del> development experience has demonstrated that a more explicit distinction is
<del> required between creating a fast-but-uninitialized `Buffer` versus creating a
<del> slower-but-safer `Buffer`. Since Node.js 8.0.0, `Buffer(num)` and `new
<del> Buffer(num)` return a `Buffer` with initialized memory.
<del>* Passing a string, array, or `Buffer` as the first argument copies the
<del> passed object's data into the `Buffer`.
<del>* Passing an [`ArrayBuffer`][] or a [`SharedArrayBuffer`][] returns a `Buffer`
<del> that shares allocated memory with the given array buffer.
<del>
<del>Because the behavior of `new Buffer()` is different depending on the type of the
<del>first argument, security and reliability issues can be inadvertently introduced
<del>into applications when argument validation or `Buffer` initialization is not
<del>performed.
<del>
<del>For example, if an attacker can cause an application to receive a number where
<del>a string is expected, the application may call `new Buffer(100)`
<del>instead of `new Buffer("100")`, it will allocate a 100 byte buffer instead
<del>of allocating a 3 byte buffer with content `"100"`. This is commonly possible
<del>using JSON API calls. Since JSON distinguishes between numeric and string types,
<del>it allows injection of numbers where a naive application might expect to always
<del>receive a string. Before Node.js 8.0.0, the 100 byte buffer might contain
<del>arbitrary pre-existing in-memory data, so may be used to expose in-memory
<del>secrets to a remote attacker. Since Node.js 8.0.0, exposure of memory cannot
<del>occur because the data is zero-filled. However, other attacks are still
<del>possible, such as causing very large buffers to be allocated by the server,
<del>leading to performance degradation or crashing on memory exhaustion.
<del>
<del>To make the creation of `Buffer` instances more reliable and less error-prone,
<del>the various forms of the `new Buffer()` constructor have been **deprecated**
<del>and replaced by separate `Buffer.from()`, [`Buffer.alloc()`][], and
<del>[`Buffer.allocUnsafe()`][] methods.
<del>
<del>*Developers should migrate all existing uses of the `new Buffer()` constructors
<del>to one of these new APIs.*
<del>
<del>* [`Buffer.from(array)`][] returns a new `Buffer` that *contains a copy* of the
<del> provided octets.
<del>* [`Buffer.from(arrayBuffer[, byteOffset[, length]])`][`Buffer.from(arrayBuf)`]
<del> returns a new `Buffer` that *shares the same allocated memory* as the given
<del> [`ArrayBuffer`][].
<del>* [`Buffer.from(buffer)`][] returns a new `Buffer` that *contains a copy* of the
<del> contents of the given `Buffer`.
<del>* [`Buffer.from(string[, encoding])`][`Buffer.from(string)`] returns a new
<del> `Buffer` that *contains a copy* of the provided string.
<del>* [`Buffer.alloc(size[, fill[, encoding]])`][`Buffer.alloc()`] returns a new
<del> initialized `Buffer` of the specified size. This method is slower than
<del> [`Buffer.allocUnsafe(size)`][`Buffer.allocUnsafe()`] but guarantees that newly
<del> created `Buffer` instances never contain old data that is potentially
<del> sensitive. A `TypeError` will be thrown if `size` is not a number.
<del>* [`Buffer.allocUnsafe(size)`][`Buffer.allocUnsafe()`] and
<del> [`Buffer.allocUnsafeSlow(size)`][`Buffer.allocUnsafeSlow()`] each return a
<del> new uninitialized `Buffer` of the specified `size`. Because the `Buffer` is
<del> uninitialized, the allocated segment of memory might contain old data that is
<del> potentially sensitive.
<add>// Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést':
<add>// [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation)
<add>// [116, 195, 169, 115, 116] (in decimal notation)
<add>const buf6 = Buffer.from('tést');
<ide>
<del>`Buffer` instances returned by [`Buffer.allocUnsafe()`][] *may* be allocated off
<del>a shared internal memory pool if `size` is less than or equal to half
<del>[`Buffer.poolSize`][]. Instances returned by [`Buffer.allocUnsafeSlow()`][]
<del>*never* use the shared internal memory pool.
<del>
<del>### The `--zero-fill-buffers` command line option
<del><!-- YAML
<del>added: v5.10.0
<del>-->
<del>
<del>Node.js can be started using the `--zero-fill-buffers` command line option to
<del>cause all newly-allocated `Buffer` instances to be zero-filled upon creation by
<del>default. Without the option, buffers created with [`Buffer.allocUnsafe()`][],
<del>[`Buffer.allocUnsafeSlow()`][], and `new SlowBuffer(size)` are not zero-filled.
<del>Use of this flag can have a significant negative impact on performance. Use the
<del>`--zero-fill-buffers` option only when necessary to enforce that newly allocated
<del>`Buffer` instances cannot contain old data that is potentially sensitive.
<del>
<del>```console
<del>$ node --zero-fill-buffers
<del>> Buffer.allocUnsafe(5);
<del><Buffer 00 00 00 00 00>
<add>// Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74].
<add>const buf7 = Buffer.from('tést', 'latin1');
<ide> ```
<ide>
<del>### What makes `Buffer.allocUnsafe()` and `Buffer.allocUnsafeSlow()` "unsafe"?
<del>
<del>When calling [`Buffer.allocUnsafe()`][] and [`Buffer.allocUnsafeSlow()`][], the
<del>segment of allocated memory is *uninitialized* (it is not zeroed-out). While
<del>this design makes the allocation of memory quite fast, the allocated segment of
<del>memory might contain old data that is potentially sensitive. Using a `Buffer`
<del>created by [`Buffer.allocUnsafe()`][] without *completely* overwriting the
<del>memory can allow this old data to be leaked when the `Buffer` memory is read.
<del>
<del>While there are clear performance advantages to using
<del>[`Buffer.allocUnsafe()`][], extra care *must* be taken in order to avoid
<del>introducing security vulnerabilities into an application.
<del>
<ide> ## Buffers and Character Encodings
<ide> <!-- YAML
<ide> changes:
<ide> changes:
<ide> description: Removed the deprecated `raw` and `raws` encodings.
<ide> -->
<ide>
<del>When string data is stored in or extracted out of a `Buffer` instance, a
<del>character encoding may be specified.
<add>When converting between `Buffer`s and strings, a character encoding may be
<add>specified. If no character encoding is specified, UTF-8 will be used as the
<add>default.
<ide>
<ide> ```js
<del>const buf = Buffer.from('hello world', 'ascii');
<add>const buf = Buffer.from('hello world', 'utf8');
<ide>
<ide> console.log(buf.toString('hex'));
<ide> // Prints: 68656c6c6f20776f726c64
<ide> console.log(buf.toString('base64'));
<ide> // Prints: aGVsbG8gd29ybGQ=
<ide>
<del>console.log(Buffer.from('fhqwhgads', 'ascii'));
<add>console.log(Buffer.from('fhqwhgads', 'utf8'));
<ide> // Prints: <Buffer 66 68 71 77 68 67 61 64 73>
<ide> console.log(Buffer.from('fhqwhgads', 'utf16le'));
<ide> // Prints: <Buffer 66 00 68 00 71 00 77 00 68 00 67 00 61 00 64 00 73 00>
<ide> ```
<ide>
<del>The character encodings currently supported by Node.js include:
<add>The character encodings currently supported by Node.js are the following:
<ide>
<del>* `'ascii'`: For 7-bit ASCII data only. This encoding is fast and will strip
<del> the high bit if set.
<add>* `'utf8'`: Multi-byte encoded Unicode characters. Many web pages and other
<add> document formats use [UTF-8][]. This is the default character encoding.
<add> When decoding a `Buffer` into a string that does not exclusively contain
<add> valid UTF-8 data, the Unicode replacement character `U+FFFD` � will be used
<add> to represent those errors.
<ide>
<del>* `'utf8'`: Multibyte encoded Unicode characters. Many web pages and other
<del> document formats use UTF-8.
<add>* `'utf16le'`: Multi-byte encoded Unicode characters. Unlike `'utf8'`, each
<add> character in the string will be encoded using either 2 or 4 bytes.
<add> Node.js only supports the [little-endian][endianness] variant of [UTF-16][].
<ide>
<del>* `'utf16le'`: 2 or 4 bytes, little-endian encoded Unicode characters.
<del> Surrogate pairs (U+10000 to U+10FFFF) are supported.
<add>* `'latin1'`: Latin-1 stands for [ISO-8859-1][]. This character encoding only
<add> supports the Unicode characters from `U+0000` to `U+00FF`. Each character is
<add> encoded using a single byte. Characters that do not fit into that range are
<add> truncated and will be mapped to characters in that range.
<ide>
<del>* `'ucs2'`: Alias of `'utf16le'`.
<add>Converting a `Buffer` into a string using one of the above is referred to as
<add>decoding, and converting a string into a `Buffer` is referred to as encoding.
<ide>
<del>* `'base64'`: Base64 encoding. When creating a `Buffer` from a string,
<add>Node.js also supports the following two binary-to-text encodings. For
<add>binary-to-text encodings, the naming convention is reversed: Converting a
<add>`Buffer` into a string is typically referred to as encoding, and converting a
<add>string into a `Buffer` as decoding.
<add>
<add>* `'base64'`: [Base64][] encoding. When creating a `Buffer` from a string,
<ide> this encoding will also correctly accept "URL and Filename Safe Alphabet" as
<ide> specified in [RFC 4648, Section 5][].
<ide>
<del>* `'latin1'`: A way of encoding the `Buffer` into a one-byte encoded string
<del> (as defined by the IANA in [RFC 1345][],
<del> page 63, to be the Latin-1 supplement block and C0/C1 control codes).
<add>* `'hex'`: Encode each byte as two hexadecimal characters. Data truncation
<add> may occur when decoding string that do exclusively contain valid hexadecimal
<add> characters. See below for an example.
<add>
<add>The following legacy character encodings are also supported:
<ide>
<del>* `'binary'`: Alias for `'latin1'`.
<add>* `'ascii'`: For 7-bit [ASCII][] data only. When encoding a string into a
<add> `Buffer`, this is equivalent to using `'latin1'`. When decoding a `Buffer`
<add> into a string, using encoding this will additionally unset the highest bit of
<add> each byte before decoding as `'latin1'`.
<add> Generally, there should be no reason to use this encoding, as `'utf8'`
<add> (or, if the data is known to always be ASCII-only, `'latin1'`) will be a
<add> better choice when encoding or decoding ASCII-only text. It is only provided
<add> for legacy compatibility.
<ide>
<del>* `'hex'`: Encode each byte as two hexadecimal characters. Data truncation
<del> may occur for unsanitized input. For example:
<add>* `'binary'`: Alias for `'latin1'`. See [binary strings][] for more background
<add> on this topic. The name of this encoding can be very misleading, as all of the
<add> encodings listed here convert between strings and binary data. For converting
<add> between strings and `Buffer`s, typically `'utf-8'` is the right choice.
<add>
<add>* `'ucs2'`: Alias of `'utf16le'`. UCS-2 used to refer to a variant of UTF-16
<add> that did not support characters that had code points larger than U+FFFF.
<add> In Node.js, these code points are always supported.
<ide>
<ide> ```js
<ide> Buffer.from('1ag', 'hex');
<ide> the WHATWG specification it is possible that the server actually returned
<ide> `'win-1252'`-encoded data, and using `'latin1'` encoding may incorrectly decode
<ide> the characters.
<ide>
<del>## Buffers and TypedArray
<add>## Buffers and TypedArrays
<ide> <!-- YAML
<ide> changes:
<ide> - version: v3.0.0
<ide> pr-url: https://github.com/nodejs/node/pull/2002
<ide> description: The `Buffer`s class now inherits from `Uint8Array`.
<ide> -->
<ide>
<del>`Buffer` instances are also [`Uint8Array`][] instances. However, there are
<del>subtle incompatibilities with [`TypedArray`][]. For example, while
<del>[`ArrayBuffer#slice()`][] creates a copy of the slice, the implementation of
<del>[`Buffer#slice()`][`buf.slice()`] creates a view over the existing `Buffer`
<del>without copying, making [`Buffer#slice()`][`buf.slice()`] far more efficient.
<add>`Buffer` instances are also [`Uint8Array`][] instances, which is the language’s
<add>built-in class for working with binary data. [`Uint8Array`][] in turn is a
<add>subclass of [`TypedArray`][]. Therefore, all [`TypedArray`][] methods are also
<add>available on `Buffer`s. However, there are subtle incompatibilities between
<add>the `Buffer` API and the [`TypedArray`][] API.
<add>
<add>In particular:
<ide>
<del>It is also possible to create new [`TypedArray`][] instances from a `Buffer`
<del>with the following caveats:
<add>* While [`TypedArray#slice()`][] creates a copy of part of the `TypedArray`,
<add> [`Buffer#slice()`][`buf.slice()`] creates a view over the existing `Buffer`
<add> without copying. This behavior can be surprising, and only exists for legacy
<add> compatibility. [`TypedArray#subarray()`][] can be used to achieve the behavior
<add> of [`Buffer#slice()`][`buf.slice()`] on both `Buffer`s and other
<add> `TypedArray`s.
<add>* [`buf.toString()`][] is incompatible with its `TypedArray` equivalent.
<add>* A number of methods, e.g. [`buf.indexOf()`][], support additional arguments.
<ide>
<del>1. The `Buffer` object's memory is copied to the [`TypedArray`][], not shared.
<add>There are two ways to create new [`TypedArray`][] instances from a `Buffer`.
<ide>
<del>2. The `Buffer` object's memory is interpreted as an array of distinct
<del>elements, and not as a byte array of the target type. That is,
<add>When passing a `Buffer` to a [`TypedArray`][] constructor, the `Buffer`’s
<add>elements will be copied, interpreted as an array of integers, and not as a byte
<add>array of the target type. For example,
<ide> `new Uint32Array(Buffer.from([1, 2, 3, 4]))` creates a 4-element
<del>[`Uint32Array`][] with elements `[1, 2, 3, 4]`, not a [`Uint32Array`][] with a
<del>single element `[0x1020304]` or `[0x4030201]`.
<add>[`Uint32Array`][] with elements `[1, 2, 3, 4]`, rather than a
<add>[`Uint32Array`][] with a single element `[0x1020304]` or `[0x4030201]`.
<ide>
<del>It is possible to create a new `Buffer` that shares the same allocated memory as
<del>a [`TypedArray`][] instance by using the `TypedArray` object's `.buffer`
<del>property.
<add>In order to create a [`TypedArray`][] that shares its memory with the `Buffer`,
<add>the underlying [`ArrayBuffer`][] can be passed to the [`TypedArray`][]
<add>constructor instead:
<add>
<add>```js
<add>const buf = Buffer.from('hello', 'utf16le');
<add>const uint16arr = new Uint16Array(
<add> buf.buffer, buf.byteOffset, buf.length / Uint16Array.BYTES_PER_ELEMENT);
<add>```
<add>
<add>It is also possible to create a new `Buffer` that shares the same allocated
<add>memory as a [`TypedArray`][] instance by using the `TypedArray` object’s
<add>`.buffer` property in the same way. [`Buffer.from()`][`Buffer.from(arrayBuf)`]
<add>behaves like `new Uint8Array()` in this context.
<ide>
<ide> ```js
<ide> const arr = new Uint16Array(2);
<ide> Additionally, the [`buf.values()`][], [`buf.keys()`][], and
<ide> The `Buffer` class is a global type for dealing with binary data directly.
<ide> It can be constructed in a variety of ways.
<ide>
<del>### `new Buffer(array)`
<add>### Class Method: `Buffer.alloc(size[, fill[, encoding]])`
<ide> <!-- YAML
<del>deprecated: v6.0.0
<add>added: v5.10.0
<ide> changes:
<ide> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/19524
<del> description: Calling this constructor emits a deprecation warning when
<del> run from code outside the `node_modules` directory.
<del> - version: v7.2.1
<del> pr-url: https://github.com/nodejs/node/pull/9529
<del> description: Calling this constructor no longer emits a deprecation warning.
<del> - version: v7.0.0
<del> pr-url: https://github.com/nodejs/node/pull/8169
<del> description: Calling this constructor emits a deprecation warning now.
<add> pr-url: https://github.com/nodejs/node/pull/18129
<add> description: Attempting to fill a non-zero length buffer with a zero length
<add> buffer triggers a thrown exception.
<add> - version: v10.0.0
<add> pr-url: https://github.com/nodejs/node/pull/17427
<add> description: Specifying an invalid string for `fill` triggers a thrown
<add> exception.
<add> - version: v8.9.3
<add> pr-url: https://github.com/nodejs/node/pull/17428
<add> description: Specifying an invalid string for `fill` now results in a
<add> zero-filled buffer.
<ide> -->
<ide>
<del>> Stability: 0 - Deprecated: Use [`Buffer.from(array)`][] instead.
<del>
<del>* `array` {integer[]} An array of bytes to copy from.
<add>* `size` {integer} The desired length of the new `Buffer`.
<add>* `fill` {string|Buffer|Uint8Array|integer} A value to pre-fill the new `Buffer`
<add> with. **Default:** `0`.
<add>* `encoding` {string} If `fill` is a string, this is its encoding.
<add> **Default:** `'utf8'`.
<ide>
<del>Allocates a new `Buffer` using an `array` of octets.
<add>Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
<add>`Buffer` will be zero-filled.
<ide>
<ide> ```js
<del>// Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'.
<del>const buf = new Buffer([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
<add>const buf = Buffer.alloc(5);
<add>
<add>console.log(buf);
<add>// Prints: <Buffer 00 00 00 00 00>
<ide> ```
<ide>
<del>### `new Buffer(arrayBuffer[, byteOffset[, length]])`
<del><!-- YAML
<del>added: v3.0.0
<del>deprecated: v6.0.0
<del>changes:
<del> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/19524
<del> description: Calling this constructor emits a deprecation warning when
<del> run from code outside the `node_modules` directory.
<del> - version: v7.2.1
<del> pr-url: https://github.com/nodejs/node/pull/9529
<del> description: Calling this constructor no longer emits a deprecation warning.
<del> - version: v7.0.0
<del> pr-url: https://github.com/nodejs/node/pull/8169
<del> description: Calling this constructor emits a deprecation warning now.
<del> - version: v6.0.0
<del> pr-url: https://github.com/nodejs/node/pull/4682
<del> description: The `byteOffset` and `length` parameters are supported now.
<del>-->
<add>If `size` is larger than
<add>[`buffer.constants.MAX_LENGTH`][] or smaller than 0, [`ERR_INVALID_OPT_VALUE`][]
<add>is thrown.
<ide>
<del>> Stability: 0 - Deprecated: Use
<del>> [`Buffer.from(arrayBuffer[, byteOffset[, length]])`][`Buffer.from(arrayBuf)`]
<del>> instead.
<add>If `fill` is specified, the allocated `Buffer` will be initialized by calling
<add>[`buf.fill(fill)`][`buf.fill()`].
<ide>
<del>* `arrayBuffer` {ArrayBuffer|SharedArrayBuffer} An [`ArrayBuffer`][],
<del> [`SharedArrayBuffer`][] or the `.buffer` property of a [`TypedArray`][].
<del>* `byteOffset` {integer} Index of first byte to expose. **Default:** `0`.
<del>* `length` {integer} Number of bytes to expose.
<del> **Default:** `arrayBuffer.byteLength - byteOffset`.
<add>```js
<add>const buf = Buffer.alloc(5, 'a');
<ide>
<del>This creates a view of the [`ArrayBuffer`][] or [`SharedArrayBuffer`][] without
<del>copying the underlying memory. For example, when passed a reference to the
<del>`.buffer` property of a [`TypedArray`][] instance, the newly created `Buffer`
<del>will share the same allocated memory as the [`TypedArray`][].
<add>console.log(buf);
<add>// Prints: <Buffer 61 61 61 61 61>
<add>```
<ide>
<del>The optional `byteOffset` and `length` arguments specify a memory range within
<del>the `arrayBuffer` that will be shared by the `Buffer`.
<add>If both `fill` and `encoding` are specified, the allocated `Buffer` will be
<add>initialized by calling [`buf.fill(fill, encoding)`][`buf.fill()`].
<ide>
<ide> ```js
<del>const arr = new Uint16Array(2);
<del>
<del>arr[0] = 5000;
<del>arr[1] = 4000;
<del>
<del>// Shares memory with `arr`.
<del>const buf = new Buffer(arr.buffer);
<add>const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
<ide>
<ide> console.log(buf);
<del>// Prints: <Buffer 88 13 a0 0f>
<add>// Prints: <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
<add>```
<ide>
<del>// Changing the original Uint16Array changes the Buffer also.
<del>arr[1] = 6000;
<add>Calling [`Buffer.alloc()`][] can be measurably slower than the alternative
<add>[`Buffer.allocUnsafe()`][] but ensures that the newly created `Buffer` instance
<add>contents will never contain sensitive data from previous allocations, including
<add>data that might not have been allocated for `Buffer`s.
<ide>
<del>console.log(buf);
<del>// Prints: <Buffer 88 13 70 17>
<del>```
<add>A `TypeError` will be thrown if `size` is not a number.
<ide>
<del>### `new Buffer(buffer)`
<add>### Class Method: `Buffer.allocUnsafe(size)`
<ide> <!-- YAML
<del>deprecated: v6.0.0
<add>added: v5.10.0
<ide> changes:
<del> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/19524
<del> description: Calling this constructor emits a deprecation warning when
<del> run from code outside the `node_modules` directory.
<del> - version: v7.2.1
<del> pr-url: https://github.com/nodejs/node/pull/9529
<del> description: Calling this constructor no longer emits a deprecation warning.
<ide> - version: v7.0.0
<del> pr-url: https://github.com/nodejs/node/pull/8169
<del> description: Calling this constructor emits a deprecation warning now.
<add> pr-url: https://github.com/nodejs/node/pull/7079
<add> description: Passing a negative `size` will now throw an error.
<ide> -->
<ide>
<del>> Stability: 0 - Deprecated: Use [`Buffer.from(buffer)`][] instead.
<add>* `size` {integer} The desired length of the new `Buffer`.
<ide>
<del>* `buffer` {Buffer|Uint8Array} An existing `Buffer` or [`Uint8Array`][] from
<del> which to copy data.
<add>Allocates a new `Buffer` of `size` bytes. If `size` is larger than
<add>[`buffer.constants.MAX_LENGTH`][] or smaller than 0, [`ERR_INVALID_OPT_VALUE`][]
<add>is thrown.
<ide>
<del>Copies the passed `buffer` data onto a new `Buffer` instance.
<add>The underlying memory for `Buffer` instances created in this way is *not
<add>initialized*. The contents of the newly created `Buffer` are unknown and
<add>*may contain sensitive data*. Use [`Buffer.alloc()`][] instead to initialize
<add>`Buffer` instances with zeroes.
<ide>
<ide> ```js
<del>const buf1 = new Buffer('buffer');
<del>const buf2 = new Buffer(buf1);
<add>const buf = Buffer.allocUnsafe(10);
<ide>
<del>buf1[0] = 0x61;
<add>console.log(buf);
<add>// Prints (contents may vary): <Buffer a0 8b 28 3f 01 00 00 00 50 32>
<ide>
<del>console.log(buf1.toString());
<del>// Prints: auffer
<del>console.log(buf2.toString());
<del>// Prints: buffer
<del>```
<del>
<del>### `new Buffer(size)`
<del><!-- YAML
<del>deprecated: v6.0.0
<del>changes:
<del> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/19524
<del> description: Calling this constructor emits a deprecation warning when
<del> run from code outside the `node_modules` directory.
<del> - version: v8.0.0
<del> pr-url: https://github.com/nodejs/node/pull/12141
<del> description: The `new Buffer(size)` will return zero-filled memory by
<del> default.
<del> - version: v7.2.1
<del> pr-url: https://github.com/nodejs/node/pull/9529
<del> description: Calling this constructor no longer emits a deprecation warning.
<del> - version: v7.0.0
<del> pr-url: https://github.com/nodejs/node/pull/8169
<del> description: Calling this constructor emits a deprecation warning now.
<del>-->
<del>
<del>> Stability: 0 - Deprecated: Use [`Buffer.alloc()`][] instead (also see
<del>> [`Buffer.allocUnsafe()`][]).
<del>
<del>* `size` {integer} The desired length of the new `Buffer`.
<del>
<del>Allocates a new `Buffer` of `size` bytes. If `size` is larger than
<del>[`buffer.constants.MAX_LENGTH`][] or smaller than 0, [`ERR_INVALID_OPT_VALUE`][]
<del>is thrown. A zero-length `Buffer` is created if `size` is 0.
<del>
<del>Prior to Node.js 8.0.0, the underlying memory for `Buffer` instances
<del>created in this way is *not initialized*. The contents of a newly created
<del>`Buffer` are unknown and *may contain sensitive data*. Use
<del>[`Buffer.alloc(size)`][`Buffer.alloc()`] instead to initialize a `Buffer`
<del>with zeroes.
<del>
<del>```js
<del>const buf = new Buffer(10);
<del>
<del>console.log(buf);
<del>// Prints: <Buffer 00 00 00 00 00 00 00 00 00 00>
<del>```
<del>
<del>### `new Buffer(string[, encoding])`
<del><!-- YAML
<del>deprecated: v6.0.0
<del>changes:
<del> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/19524
<del> description: Calling this constructor emits a deprecation warning when
<del> run from code outside the `node_modules` directory.
<del> - version: v7.2.1
<del> pr-url: https://github.com/nodejs/node/pull/9529
<del> description: Calling this constructor no longer emits a deprecation warning.
<del> - version: v7.0.0
<del> pr-url: https://github.com/nodejs/node/pull/8169
<del> description: Calling this constructor emits a deprecation warning now.
<del>-->
<del>
<del>> Stability: 0 - Deprecated:
<del>> Use [`Buffer.from(string[, encoding])`][`Buffer.from(string)`] instead.
<del>
<del>* `string` {string} String to encode.
<del>* `encoding` {string} The encoding of `string`. **Default:** `'utf8'`.
<del>
<del>Creates a new `Buffer` containing `string`. The `encoding` parameter identifies
<del>the character encoding of `string`.
<del>
<del>```js
<del>const buf1 = new Buffer('this is a tést');
<del>const buf2 = new Buffer('7468697320697320612074c3a97374', 'hex');
<del>
<del>console.log(buf1.toString());
<del>// Prints: this is a tést
<del>console.log(buf2.toString());
<del>// Prints: this is a tést
<del>console.log(buf1.toString('ascii'));
<del>// Prints: this is a tC)st
<del>```
<del>
<del>### Class Method: `Buffer.alloc(size[, fill[, encoding]])`
<del><!-- YAML
<del>added: v5.10.0
<del>changes:
<del> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/18129
<del> description: Attempting to fill a non-zero length buffer with a zero length
<del> buffer triggers a thrown exception.
<del> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/17427
<del> description: Specifying an invalid string for `fill` triggers a thrown
<del> exception.
<del> - version: v8.9.3
<del> pr-url: https://github.com/nodejs/node/pull/17428
<del> description: Specifying an invalid string for `fill` now results in a
<del> zero-filled buffer.
<del>-->
<del>
<del>* `size` {integer} The desired length of the new `Buffer`.
<del>* `fill` {string|Buffer|Uint8Array|integer} A value to pre-fill the new `Buffer`
<del> with. **Default:** `0`.
<del>* `encoding` {string} If `fill` is a string, this is its encoding.
<del> **Default:** `'utf8'`.
<del>
<del>Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the
<del>`Buffer` will be *zero-filled*.
<del>
<del>```js
<del>const buf = Buffer.alloc(5);
<del>
<del>console.log(buf);
<del>// Prints: <Buffer 00 00 00 00 00>
<del>```
<del>
<del>If `size` is larger than
<del>[`buffer.constants.MAX_LENGTH`][] or smaller than 0, [`ERR_INVALID_OPT_VALUE`][]
<del>is thrown. A zero-length `Buffer` is created if `size` is 0.
<del>
<del>If `fill` is specified, the allocated `Buffer` will be initialized by calling
<del>[`buf.fill(fill)`][`buf.fill()`].
<del>
<del>```js
<del>const buf = Buffer.alloc(5, 'a');
<del>
<del>console.log(buf);
<del>// Prints: <Buffer 61 61 61 61 61>
<del>```
<del>
<del>If both `fill` and `encoding` are specified, the allocated `Buffer` will be
<del>initialized by calling [`buf.fill(fill, encoding)`][`buf.fill()`].
<del>
<del>```js
<del>const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64');
<del>
<del>console.log(buf);
<del>// Prints: <Buffer 68 65 6c 6c 6f 20 77 6f 72 6c 64>
<del>```
<del>
<del>Calling [`Buffer.alloc()`][] can be significantly slower than the alternative
<del>[`Buffer.allocUnsafe()`][] but ensures that the newly created `Buffer` instance
<del>contents will *never contain sensitive data*.
<del>
<del>A `TypeError` will be thrown if `size` is not a number.
<del>
<del>### Class Method: `Buffer.allocUnsafe(size)`
<del><!-- YAML
<del>added: v5.10.0
<del>changes:
<del> - version: v7.0.0
<del> pr-url: https://github.com/nodejs/node/pull/7079
<del> description: Passing a negative `size` will now throw an error.
<del>-->
<del>
<del>* `size` {integer} The desired length of the new `Buffer`.
<del>
<del>Allocates a new `Buffer` of `size` bytes. If `size` is larger than
<del>[`buffer.constants.MAX_LENGTH`][] or smaller than 0, [`ERR_INVALID_OPT_VALUE`][]
<del>is thrown. A zero-length `Buffer` is created if `size` is 0.
<del>
<del>The underlying memory for `Buffer` instances created in this way is *not
<del>initialized*. The contents of the newly created `Buffer` are unknown and
<del>*may contain sensitive data*. Use [`Buffer.alloc()`][] instead to initialize
<del>`Buffer` instances with zeroes.
<del>
<del>```js
<del>const buf = Buffer.allocUnsafe(10);
<del>
<del>console.log(buf);
<del>// Prints (contents may vary): <Buffer a0 8b 28 3f 01 00 00 00 50 32>
<del>
<del>buf.fill(0);
<del>
<del>console.log(buf);
<del>// Prints: <Buffer 00 00 00 00 00 00 00 00 00 00>
<add>buf.fill(0);
<add>
<add>console.log(buf);
<add>// Prints: <Buffer 00 00 00 00 00 00 00 00 00 00>
<ide> ```
<ide>
<ide> A `TypeError` will be thrown if `size` is not a number.
<ide> allocations under 4KB are sliced from a single pre-allocated `Buffer`. This
<ide> allows applications to avoid the garbage collection overhead of creating many
<ide> individually allocated `Buffer` instances. This approach improves both
<ide> performance and memory usage by eliminating the need to track and clean up as
<del>many persistent objects.
<add>many individual `ArrayBuffer` objects.
<ide>
<ide> However, in the case where a developer may need to retain a small chunk of
<ide> memory from a pool for an indeterminate amount of time, it may be appropriate
<ide> socket.on('readable', () => {
<ide> });
<ide> ```
<ide>
<del>`Buffer.allocUnsafeSlow()` should be used only as a last resort after a
<del>developer has observed undue memory retention in their applications.
<del>
<ide> A `TypeError` will be thrown if `size` is not a number.
<ide>
<ide> ### Class Method: `Buffer.byteLength(string[, encoding])`
<ide> changes:
<ide> **Default:** `'utf8'`.
<ide> * Returns: {integer} The number of bytes contained within `string`.
<ide>
<del>Returns the actual byte length of a string. This is not the same as
<del>[`String.prototype.length`][] since that returns the number of *characters* in
<del>a string.
<add>Returns the byte length of a string when encoded using `encoding`.
<add>This is not the same as [`String.prototype.length`][], which does not account
<add>for the encoding that is used to convert the string into bytes.
<ide>
<ide> For `'base64'` and `'hex'`, this function assumes valid input. For strings that
<del>contain non-Base64/Hex-encoded data (e.g. whitespace), the return value might be
<add>contain non-base64/hex-encoded data (e.g. whitespace), the return value might be
<ide> greater than the length of a `Buffer` created from the string.
<ide>
<ide> ```js
<ide> console.log(`${str}: ${str.length} characters, ` +
<ide> ```
<ide>
<ide> When `string` is a `Buffer`/[`DataView`][]/[`TypedArray`][]/[`ArrayBuffer`][]/
<del>[`SharedArrayBuffer`][], the actual byte length is returned.
<add>[`SharedArrayBuffer`][], the byte length as reported by `.byteLength`
<add>is returned.
<ide>
<ide> ### Class Method: `Buffer.compare(buf1, buf2)`
<ide> <!-- YAML
<ide> changes:
<ide>
<ide> * `buf1` {Buffer|Uint8Array}
<ide> * `buf2` {Buffer|Uint8Array}
<del>* Returns: {integer}
<add>* Returns: {integer} Either `-1`, `0`, or `1`, depending on the result of the
<add> comparison. See [`buf.compare()`][] for details.
<ide>
<del>Compares `buf1` to `buf2` typically for the purpose of sorting arrays of
<add>Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of
<ide> `Buffer` instances. This is equivalent to calling
<ide> [`buf1.compare(buf2)`][`buf.compare()`].
<ide>
<ide> changes:
<ide> -->
<ide>
<ide> * `list` {Buffer[] | Uint8Array[]} List of `Buffer` or [`Uint8Array`][]
<del> instances to concat.
<add> instances to concatenate.
<ide> * `totalLength` {integer} Total length of the `Buffer` instances in `list`
<ide> when concatenated.
<ide> * Returns: {Buffer}
<ide> If the list has no items, or if the `totalLength` is 0, then a new zero-length
<ide> `Buffer` is returned.
<ide>
<ide> If `totalLength` is not provided, it is calculated from the `Buffer` instances
<del>in `list`. This however causes an additional loop to be executed in order to
<del>calculate the `totalLength`, so it is faster to provide the length explicitly if
<del>it is already known.
<add>in `list` by adding their lengths.
<ide>
<ide> If `totalLength` is provided, it is coerced to an unsigned integer. If the
<ide> combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is
<ide> added: v5.10.0
<ide>
<ide> * `array` {integer[]}
<ide>
<del>Allocates a new `Buffer` using an `array` of octets.
<add>Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`.
<add>Array entries outside that range will be truncated to fit into it.
<ide>
<ide> ```js
<del>// Creates a new Buffer containing UTF-8 bytes of the string 'buffer'.
<add>// Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'.
<ide> const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
<ide> ```
<ide>
<ide> added: v5.10.0
<ide> -->
<ide>
<ide> * `arrayBuffer` {ArrayBuffer|SharedArrayBuffer} An [`ArrayBuffer`][],
<del> [`SharedArrayBuffer`][], or the `.buffer` property of a [`TypedArray`][].
<add> [`SharedArrayBuffer`][], for example the `.buffer` property of a
<add> [`TypedArray`][].
<ide> * `byteOffset` {integer} Index of first byte to expose. **Default:** `0`.
<ide> * `length` {integer} Number of bytes to expose.
<ide> **Default:** `arrayBuffer.byteLength - byteOffset`.
<ide> added: v5.10.0
<ide> * `encoding` {string} The encoding of `string`. **Default:** `'utf8'`.
<ide>
<ide> Creates a new `Buffer` containing `string`. The `encoding` parameter identifies
<del>the character encoding of `string`.
<add>the character encoding to be used when converting `string` into bytes.
<ide>
<ide> ```js
<ide> const buf1 = Buffer.from('this is a tést');
<ide> console.log(buf1.toString());
<ide> // Prints: this is a tést
<ide> console.log(buf2.toString());
<ide> // Prints: this is a tést
<del>console.log(buf1.toString('ascii'));
<del>// Prints: this is a tC)st
<add>console.log(buf1.toString('latin1'));
<add>// Prints: this is a tést
<ide> ```
<ide>
<ide> A `TypeError` will be thrown if `string` is not a string or other type
<ide> added: v0.9.1
<ide> * `encoding` {string} A character encoding name to check.
<ide> * Returns: {boolean}
<ide>
<del>Returns `true` if `encoding` contains a supported character encoding, or `false`
<del>otherwise.
<add>Returns `true` if `encoding` is the name of a supported character encoding,
<add>or `false` otherwise.
<ide>
<ide> ```js
<ide> console.log(Buffer.isEncoding('utf-8'));
<ide> The index operator `[index]` can be used to get and set the octet at position
<ide> range is between `0x00` and `0xFF` (hex) or `0` and `255` (decimal).
<ide>
<ide> This operator is inherited from `Uint8Array`, so its behavior on out-of-bounds
<del>access is the same as `UInt8Array`. In other words, getting returns `undefined`
<del>and setting does nothing.
<add>access is the same as `Uint8Array`. In other words, `buf[index]` returns
<add>`undefined` when `index` is negative or `>= buf.length`, and
<add>`buf[index] = value` does not modify the buffer if `index` is negative or
<add>`>= buf.length`.
<ide>
<ide> ```js
<ide> // Copy an ASCII string into a `Buffer` one byte at a time.
<add>// (This only works for ASCII-only strings. In general, one should use
<add>// `Buffer.from()` to perform this conversion.)
<ide>
<ide> const str = 'Node.js';
<ide> const buf = Buffer.allocUnsafe(str.length);
<ide> for (let i = 0; i < str.length; i++) {
<ide> buf[i] = str.charCodeAt(i);
<ide> }
<ide>
<del>console.log(buf.toString('ascii'));
<add>console.log(buf.toString('utf8'));
<ide> // Prints: Node.js
<ide> ```
<ide>
<ide> console.log(buffer.buffer === arrayBuffer);
<ide> * {integer} The `byteOffset` on the underlying `ArrayBuffer` object based on
<ide> which this `Buffer` object is created.
<ide>
<del>When setting `byteOffset` in `Buffer.from(ArrayBuffer, byteOffset, length)`
<del>or sometimes when allocating a buffer smaller than `Buffer.poolSize` the
<add>When setting `byteOffset` in `Buffer.from(ArrayBuffer, byteOffset, length)`,
<add>or sometimes when allocating a buffer smaller than `Buffer.poolSize`, the
<ide> buffer doesn't start from a zero offset on the underlying `ArrayBuffer`.
<ide>
<ide> This can cause problems when accessing the underlying `ArrayBuffer` directly
<del>using `buf.buffer`, as the first bytes in this `ArrayBuffer` may be unrelated
<add>using `buf.buffer`, as other parts of the `ArrayBuffer` may be unrelated
<ide> to the `buf` object itself.
<ide>
<del>A common issue is when casting a `Buffer` object to a `TypedArray` object,
<del>in this case one needs to specify the `byteOffset` correctly:
<add>A common issue when creating a `TypedArray` object that shares its memory with
<add>a `Buffer` is that in this case one needs to specify the `byteOffset` correctly:
<ide>
<ide> ```js
<ide> // Create a buffer smaller than `Buffer.poolSize`.
<ide> const nodeBuffer = new Buffer.from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
<ide>
<del>// When casting the Node.js Buffer to an Int8 TypedArray remember to use the
<del>// byteOffset.
<add>// When casting the Node.js Buffer to an Int8Array, use the byteOffset
<add>// to refer only to the part of `nodeBuffer.buffer` that contains the memory
<add>// for `nodeBuffer`.
<ide> new Int8Array(nodeBuffer.buffer, nodeBuffer.byteOffset, nodeBuffer.length);
<ide> ```
<ide>
<ide> added: v0.1.90
<ide> inclusive). **Default:** [`buf.length`][].
<ide> * Returns: {integer} The number of bytes copied.
<ide>
<del>Copies data from a region of `buf` to a region in `target` even if the `target`
<add>Copies data from a region of `buf` to a region in `target`, even if the `target`
<ide> memory region overlaps with `buf`.
<ide>
<add>[`TypedArray#set()`][] performs the same operation, and is available for all
<add>TypedArrays, including Node.js `Buffer`s, although it takes different
<add>function arguments.
<add>
<ide> ```js
<ide> // Create two `Buffer` instances.
<ide> const buf1 = Buffer.allocUnsafe(26);
<ide> for (let i = 0; i < 26; i++) {
<ide>
<ide> // Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`.
<ide> buf1.copy(buf2, 8, 16, 20);
<add>// This is equivalent to:
<add>// buf2.set(buf1.subarray(16, 20), 8);
<ide>
<ide> console.log(buf2.toString('ascii', 0, 25));
<ide> // Prints: !!!!!!!!qrst!!!!!!!!!!!!!
<ide> changes:
<ide> * Returns: {boolean}
<ide>
<ide> Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes,
<del>`false` otherwise.
<add>`false` otherwise. Equivalent to
<add>[`buf.compare(otherBuffer) === 0`][`buf.compare()`].
<ide>
<ide> ```js
<ide> const buf1 = Buffer.from('ABC');
<ide> If the final write of a `fill()` operation falls on a multi-byte character,
<ide> then only the bytes of that character that fit into `buf` are written:
<ide>
<ide> ```js
<del>// Fill a `Buffer` with a two-byte character.
<add>// Fill a `Buffer` with character that takes up two bytes in UTF-8.
<ide>
<del>console.log(Buffer.allocUnsafe(3).fill('\u0222'));
<del>// Prints: <Buffer c8 a2 c8>
<add>console.log(Buffer.allocUnsafe(5).fill('\u0222'));
<add>// Prints: <Buffer c8 a2 c8 a2 c8>
<ide> ```
<ide>
<ide> If `value` contains invalid characters, it is truncated; if no valid
<ide> added: v0.1.90
<ide>
<ide> * {integer}
<ide>
<del>Returns the amount of memory allocated for `buf` in bytes. This
<del>does not necessarily reflect the amount of "usable" data within `buf`.
<add>Returns the number of bytes in `buf`.
<ide>
<ide> ```js
<del>// Create a `Buffer` and write a shorter ASCII string to it.
<add>// Create a `Buffer` and write a shorter string to it using UTF-8.
<ide>
<ide> const buf = Buffer.alloc(1234);
<ide>
<ide> console.log(buf.length);
<ide> // Prints: 1234
<ide>
<del>buf.write('some string', 0, 'ascii');
<add>buf.write('some string', 0, 'utf8');
<ide>
<ide> console.log(buf.length);
<ide> // Prints: 1234
<ide> ```
<ide>
<del>While the `length` property is not immutable, changing the value of `length`
<del>can result in undefined and inconsistent behavior. Applications that wish to
<del>modify the length of a `Buffer` should therefore treat `length` as read-only and
<del>use [`buf.slice()`][] to create a new `Buffer`.
<del>
<del>```js
<del>let buf = Buffer.allocUnsafe(10);
<del>
<del>buf.write('abcdefghj', 0, 'ascii');
<del>
<del>console.log(buf.length);
<del>// Prints: 10
<del>
<del>buf = buf.slice(0, 5);
<del>
<del>console.log(buf.length);
<del>// Prints: 5
<del>```
<del>
<ide> ### `buf.parent`
<ide> <!-- YAML
<ide> deprecated: v8.0.0
<ide> added: v12.0.0
<ide> * Returns: {bigint}
<ide>
<ide> Reads a signed 64-bit integer from `buf` at the specified `offset` with
<del>the specified endian format (`readBigInt64BE()` returns big endian,
<del>`readBigInt64LE()` returns little endian).
<add>the specified [endianness][] (`readBigInt64BE()` reads as big endian,
<add>`readBigInt64LE()` reads as little endian).
<ide>
<ide> Integers read from a `Buffer` are interpreted as two's complement signed values.
<ide>
<ide> added: v12.0.0
<ide> * Returns: {bigint}
<ide>
<ide> Reads an unsigned 64-bit integer from `buf` at the specified `offset` with
<del>specified endian format (`readBigUInt64BE()` returns big endian,
<del>`readBigUInt64LE()` returns little endian).
<add>the specified [endianness][] (`readBigUInt64BE()` reads as big endian,
<add>`readBigUInt64LE()` reads as little endian).
<ide>
<ide> ```js
<ide> const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]);
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 8`. **Default:** `0`.
<ide> * Returns: {number}
<ide>
<del>Reads a 64-bit double from `buf` at the specified `offset` with specified
<del>endian format (`readDoubleBE()` returns big endian, `readDoubleLE()` returns
<add>Reads a 64-bit double from `buf` at the specified `offset` with the specified
<add>[endianness][] (`readDoubleBE()` reads as big endian, `readDoubleLE()` reads as
<ide> little endian).
<ide>
<ide> ```js
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 4`. **Default:** `0`.
<ide> * Returns: {number}
<ide>
<del>Reads a 32-bit float from `buf` at the specified `offset` with specified
<del>endian format (`readFloatBE()` returns big endian, `readFloatLE()` returns
<add>Reads a 32-bit float from `buf` at the specified `offset` with the specified
<add>[endianness][] (`readFloatBE()` reads as big endian, `readFloatLE()` reads as
<ide> little endian).
<ide>
<ide> ```js
<ide> changes:
<ide> * Returns: {integer}
<ide>
<ide> Reads a signed 16-bit integer from `buf` at the specified `offset` with
<del>the specified endian format (`readInt16BE()` returns big endian,
<del>`readInt16LE()` returns little endian).
<add>the specified [endianness][] (`readInt16BE()` reads as big endian,
<add>`readInt16LE()` reads as little endian).
<ide>
<ide> Integers read from a `Buffer` are interpreted as two's complement signed values.
<ide>
<ide> changes:
<ide> * Returns: {integer}
<ide>
<ide> Reads a signed 32-bit integer from `buf` at the specified `offset` with
<del>the specified endian format (`readInt32BE()` returns big endian,
<del>`readInt32LE()` returns little endian).
<add>the specified [endianness][] (`readInt32BE()` reads as big endian,
<add>`readInt32LE()` reads as little endian).
<ide>
<ide> Integers read from a `Buffer` are interpreted as two's complement signed values.
<ide>
<ide> changes:
<ide> * Returns: {integer}
<ide>
<ide> Reads an unsigned 16-bit integer from `buf` at the specified `offset` with
<del>specified endian format (`readUInt16BE()` returns big endian, `readUInt16LE()`
<del>returns little endian).
<add>the specified [endianness][] (`readUInt16BE()` reads as big endian, `readUInt16LE()`
<add>reads as little endian).
<ide>
<ide> ```js
<ide> const buf = Buffer.from([0x12, 0x34, 0x56]);
<ide> changes:
<ide> * Returns: {integer}
<ide>
<ide> Reads an unsigned 32-bit integer from `buf` at the specified `offset` with
<del>specified endian format (`readUInt32BE()` returns big endian,
<del>`readUInt32LE()` returns little endian).
<add>the specified [endianness][] (`readUInt32BE()` reads as big endian,
<add>`readUInt32LE()` reads as little endian).
<ide>
<ide> ```js
<ide> const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]);
<ide> offset and cropped by the `start` and `end` indices.
<ide> Specifying `end` greater than [`buf.length`][] will return the same result as
<ide> that of `end` equal to [`buf.length`][].
<ide>
<add>This method is inherited from [`TypedArray#subarray()`][].
<add>
<ide> Modifying the new `Buffer` slice will modify the memory in the original `Buffer`
<ide> because the allocated memory of the two objects overlap.
<ide>
<ide> buf2.swap64();
<ide> // Throws ERR_INVALID_BUFFER_SIZE.
<ide> ```
<ide>
<del>JavaScript cannot encode 64-bit integers. This method is intended
<del>for working with 64-bit floats.
<del>
<ide> ### `buf.toJSON()`
<ide> <!-- YAML
<ide> added: v0.9.2
<ide> added: v0.9.2
<ide> Returns a JSON representation of `buf`. [`JSON.stringify()`][] implicitly calls
<ide> this function when stringifying a `Buffer` instance.
<ide>
<add>`Buffer.from()` accepts objects in the format returned from this method.
<add>In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`.
<add>
<ide> ```js
<ide> const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]);
<ide> const json = JSON.stringify(buf);
<ide> console.log(json);
<ide>
<ide> const copy = JSON.parse(json, (key, value) => {
<ide> return value && value.type === 'Buffer' ?
<del> Buffer.from(value.data) :
<add> Buffer.from(value) :
<ide> value;
<ide> });
<ide>
<ide> added: v0.1.90
<ide>
<ide> Decodes `buf` to a string according to the specified character encoding in
<ide> `encoding`. `start` and `end` may be passed to decode only a subset of `buf`.
<del>If a byte sequence in the input is not valid in the given `encoding` then
<del>it is replaced with the replacement character `U+FFFD`.
<add>
<add>If `encoding` is `'utf8'` and a byte sequence in the input is not valid UTF-8,
<add>then each invalid byte is replaced with the replacement character `U+FFFD`.
<ide>
<ide> The maximum length of a string instance (in UTF-16 code units) is available
<ide> as [`buffer.constants.MAX_STRING_LENGTH`][].
<ide> for (let i = 0; i < 26; i++) {
<ide> buf1[i] = i + 97;
<ide> }
<ide>
<del>console.log(buf1.toString('ascii'));
<add>console.log(buf1.toString('utf8'));
<ide> // Prints: abcdefghijklmnopqrstuvwxyz
<del>console.log(buf1.toString('ascii', 0, 5));
<add>console.log(buf1.toString('utf8', 0, 5));
<ide> // Prints: abcde
<ide>
<ide> const buf2 = Buffer.from('tést');
<ide> added: v0.1.90
<ide> * `string` {string} String to write to `buf`.
<ide> * `offset` {integer} Number of bytes to skip before starting to write `string`.
<ide> **Default:** `0`.
<del>* `length` {integer} Number of bytes to write. **Default:**
<add>* `length` {integer} Maximum number of bytes to write. **Default:**
<ide> `buf.length - offset`.
<ide> * `encoding` {string} The character encoding of `string`. **Default:** `'utf8'`.
<ide> * Returns: {integer} Number of bytes written.
<ide> added: v12.0.0
<ide> satisfy: `0 <= offset <= buf.length - 8`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeBigInt64BE()` writes big endian, `writeBigInt64LE()` writes little
<del>endian).
<add>Writes `value` to `buf` at the specified `offset` with the specified
<add>[endianness][] (`writeBigInt64BE()` writes as big endian, `writeBigInt64LE()`
<add>writes as little endian).
<ide>
<ide> `value` is interpreted and written as a two's complement signed integer.
<ide>
<ide> added: v12.0.0
<ide> satisfy: `0 <= offset <= buf.length - 8`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeBigUInt64BE()` writes big endian, `writeBigUInt64LE()` writes
<add>Writes `value` to `buf` at the specified `offset` with specified [endianness][]
<add>(`writeBigUInt64BE()` writes as big endian, `writeBigUInt64LE()` writes as
<ide> little endian).
<ide>
<ide> ```js
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 8`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeDoubleBE()` writes big endian, `writeDoubleLE()` writes little
<del>endian). `value` *should* be a valid 64-bit double. Behavior is undefined when
<del>`value` is anything other than a 64-bit double.
<add>Writes `value` to `buf` at the specified `offset` with the specified
<add>[endianness][] (`writeDoubleBE()` writes as big endian, `writeDoubleLE()` writes
<add>as little endian). `value` must be a JavaScript number. Behavior is undefined
<add>when `value` is anything other than a JavaScript number.
<ide>
<ide> ```js
<ide> const buf = Buffer.allocUnsafe(8);
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 4`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeFloatBE()` writes big endian, `writeFloatLE()` writes little
<del>endian). `value` *should* be a valid 32-bit float. Behavior is undefined when
<del>`value` is anything other than a 32-bit float.
<add>Writes `value` to `buf` at the specified `offset` with specified [endianness][]
<add>(`writeFloatBE()` writes as big endian, `writeFloatLE()` writes as little
<add>endian). `value` must be a JavaScript number. Behavior is undefined when
<add>`value` is anything other than a JavaScript number.
<ide>
<ide> ```js
<ide> const buf = Buffer.allocUnsafe(4);
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 1`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset`. `value` *should* be a valid
<add>Writes `value` to `buf` at the specified `offset`. `value` must be a valid
<ide> signed 8-bit integer. Behavior is undefined when `value` is anything other than
<ide> a signed 8-bit integer.
<ide>
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 2`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeInt16BE()` writes big endian, `writeInt16LE()` writes little
<del>endian). `value` *should* be a valid signed 16-bit integer. Behavior is
<add>Writes `value` to `buf` at the specified `offset` with the specified
<add>[endianness][] (`writeInt16BE()` writes as big endian, `writeInt16LE()` writes
<add>as little endian). `value` must be a valid signed 16-bit integer. Behavior is
<ide> undefined when `value` is anything other than a signed 16-bit integer.
<ide>
<ide> `value` is interpreted and written as a two's complement signed integer.
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 4`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeInt32BE()` writes big endian, `writeInt32LE()` writes little
<del>endian). `value` *should* be a valid signed 32-bit integer. Behavior is
<add>Writes `value` to `buf` at the specified `offset` with the specified
<add>[endianness][] (`writeInt32BE()` writes aS big endian, `writeInt32LE()` writes
<add>as little endian). `value` must be a valid signed 32-bit integer. Behavior is
<ide> undefined when `value` is anything other than a signed 32-bit integer.
<ide>
<ide> `value` is interpreted and written as a two's complement signed integer.
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 1`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset`. `value` *should* be a
<add>Writes `value` to `buf` at the specified `offset`. `value` must be a
<ide> valid unsigned 8-bit integer. Behavior is undefined when `value` is anything
<ide> other than an unsigned 8-bit integer.
<ide>
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 2`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeUInt16BE()` writes big endian, `writeUInt16LE()` writes little
<del>endian). `value` should be a valid unsigned 16-bit integer. Behavior is
<add>Writes `value` to `buf` at the specified `offset` with the specified
<add>[endianness][] (`writeUInt16BE()` writes as big endian, `writeUInt16LE()` writes
<add>as little endian). `value` must be a valid unsigned 16-bit integer. Behavior is
<ide> undefined when `value` is anything other than an unsigned 16-bit integer.
<ide>
<ide> ```js
<ide> changes:
<ide> satisfy `0 <= offset <= buf.length - 4`. **Default:** `0`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<del>Writes `value` to `buf` at the specified `offset` with specified endian
<del>format (`writeUInt32BE()` writes big endian, `writeUInt32LE()` writes little
<del>endian). `value` should be a valid unsigned 32-bit integer. Behavior is
<add>Writes `value` to `buf` at the specified `offset` with the specified
<add>[endianness][] (`writeUInt32BE()` writes as big endian, `writeUInt32LE()` writes
<add>as little endian). `value` must be a valid unsigned 32-bit integer. Behavior is
<ide> undefined when `value` is anything other than an unsigned 32-bit integer.
<ide>
<ide> ```js
<ide> console.log(buf);
<ide> // Prints: <Buffer ab 90 78 56 34 12>
<ide> ```
<ide>
<add>### `new Buffer(array)`
<add><!-- YAML
<add>deprecated: v6.0.0
<add>changes:
<add> - version: v10.0.0
<add> pr-url: https://github.com/nodejs/node/pull/19524
<add> description: Calling this constructor emits a deprecation warning when
<add> run from code outside the `node_modules` directory.
<add> - version: v7.2.1
<add> pr-url: https://github.com/nodejs/node/pull/9529
<add> description: Calling this constructor no longer emits a deprecation warning.
<add> - version: v7.0.0
<add> pr-url: https://github.com/nodejs/node/pull/8169
<add> description: Calling this constructor emits a deprecation warning now.
<add>-->
<add>
<add>> Stability: 0 - Deprecated: Use [`Buffer.from(array)`][] instead.
<add>
<add>* `array` {integer[]} An array of bytes to copy from.
<add>
<add>See [`Buffer.from(array)`][].
<add>
<add>### `new Buffer(arrayBuffer[, byteOffset[, length]])`
<add><!-- YAML
<add>added: v3.0.0
<add>deprecated: v6.0.0
<add>changes:
<add> - version: v10.0.0
<add> pr-url: https://github.com/nodejs/node/pull/19524
<add> description: Calling this constructor emits a deprecation warning when
<add> run from code outside the `node_modules` directory.
<add> - version: v7.2.1
<add> pr-url: https://github.com/nodejs/node/pull/9529
<add> description: Calling this constructor no longer emits a deprecation warning.
<add> - version: v7.0.0
<add> pr-url: https://github.com/nodejs/node/pull/8169
<add> description: Calling this constructor emits a deprecation warning now.
<add> - version: v6.0.0
<add> pr-url: https://github.com/nodejs/node/pull/4682
<add> description: The `byteOffset` and `length` parameters are supported now.
<add>-->
<add>
<add>> Stability: 0 - Deprecated: Use
<add>> [`Buffer.from(arrayBuffer[, byteOffset[, length]])`][`Buffer.from(arrayBuf)`]
<add>> instead.
<add>
<add>* `arrayBuffer` {ArrayBuffer|SharedArrayBuffer} An [`ArrayBuffer`][],
<add> [`SharedArrayBuffer`][] or the `.buffer` property of a [`TypedArray`][].
<add>* `byteOffset` {integer} Index of first byte to expose. **Default:** `0`.
<add>* `length` {integer} Number of bytes to expose.
<add> **Default:** `arrayBuffer.byteLength - byteOffset`.
<add>
<add>See
<add>[`Buffer.from(arrayBuffer[, byteOffset[, length]])`][`Buffer.from(arrayBuf)`].
<add>
<add>### `new Buffer(buffer)`
<add><!-- YAML
<add>deprecated: v6.0.0
<add>changes:
<add> - version: v10.0.0
<add> pr-url: https://github.com/nodejs/node/pull/19524
<add> description: Calling this constructor emits a deprecation warning when
<add> run from code outside the `node_modules` directory.
<add> - version: v7.2.1
<add> pr-url: https://github.com/nodejs/node/pull/9529
<add> description: Calling this constructor no longer emits a deprecation warning.
<add> - version: v7.0.0
<add> pr-url: https://github.com/nodejs/node/pull/8169
<add> description: Calling this constructor emits a deprecation warning now.
<add>-->
<add>
<add>> Stability: 0 - Deprecated: Use [`Buffer.from(buffer)`][] instead.
<add>
<add>* `buffer` {Buffer|Uint8Array} An existing `Buffer` or [`Uint8Array`][] from
<add> which to copy data.
<add>
<add>See [`Buffer.from(buffer)`][].
<add>
<add>### `new Buffer(size)`
<add><!-- YAML
<add>deprecated: v6.0.0
<add>changes:
<add> - version: v10.0.0
<add> pr-url: https://github.com/nodejs/node/pull/19524
<add> description: Calling this constructor emits a deprecation warning when
<add> run from code outside the `node_modules` directory.
<add> - version: v8.0.0
<add> pr-url: https://github.com/nodejs/node/pull/12141
<add> description: The `new Buffer(size)` will return zero-filled memory by
<add> default.
<add> - version: v7.2.1
<add> pr-url: https://github.com/nodejs/node/pull/9529
<add> description: Calling this constructor no longer emits a deprecation warning.
<add> - version: v7.0.0
<add> pr-url: https://github.com/nodejs/node/pull/8169
<add> description: Calling this constructor emits a deprecation warning now.
<add>-->
<add>
<add>> Stability: 0 - Deprecated: Use [`Buffer.alloc()`][] instead (also see
<add>> [`Buffer.allocUnsafe()`][]).
<add>
<add>* `size` {integer} The desired length of the new `Buffer`.
<add>
<add>See [`Buffer.alloc()`][] and [`Buffer.allocUnsafe()`][]. This variant of the
<add>constructor is equivalent to [`Buffer.allocUnsafe()`][], although using
<add>[`Buffer.alloc()`][] is recommended in code paths that are not critical to
<add>performance.
<add>
<add>### `new Buffer(string[, encoding])`
<add><!-- YAML
<add>deprecated: v6.0.0
<add>changes:
<add> - version: v10.0.0
<add> pr-url: https://github.com/nodejs/node/pull/19524
<add> description: Calling this constructor emits a deprecation warning when
<add> run from code outside the `node_modules` directory.
<add> - version: v7.2.1
<add> pr-url: https://github.com/nodejs/node/pull/9529
<add> description: Calling this constructor no longer emits a deprecation warning.
<add> - version: v7.0.0
<add> pr-url: https://github.com/nodejs/node/pull/8169
<add> description: Calling this constructor emits a deprecation warning now.
<add>-->
<add>
<add>> Stability: 0 - Deprecated:
<add>> Use [`Buffer.from(string[, encoding])`][`Buffer.from(string)`] instead.
<add>
<add>* `string` {string} String to encode.
<add>* `encoding` {string} The encoding of `string`. **Default:** `'utf8'`.
<add>
<add>See [`Buffer.from(string[, encoding])`][`Buffer.from(string)`].
<add>
<ide> ## `buffer.INSPECT_MAX_BYTES`
<ide> <!-- YAML
<ide> added: v0.5.4
<ide> deprecated: v6.0.0
<ide>
<ide> > Stability: 0 - Deprecated: Use [`Buffer.allocUnsafeSlow()`][] instead.
<ide>
<del>Returns an un-pooled `Buffer`.
<del>
<del>In order to avoid the garbage collection overhead of creating many individually
<del>allocated `Buffer` instances, by default allocations under 4KB are sliced from a
<del>single larger allocated object.
<del>
<del>In the case where a developer may need to retain a small chunk of memory from a
<del>pool for an indeterminate amount of time, it may be appropriate to create an
<del>un-pooled `Buffer` instance using `SlowBuffer` then copy out the relevant bits.
<del>
<del>```js
<del>// Need to keep around a few small chunks of memory.
<del>const store = [];
<del>
<del>socket.on('readable', () => {
<del> let data;
<del> while (null !== (data = readable.read())) {
<del> // Allocate for retained data.
<del> const sb = SlowBuffer(10);
<del>
<del> // Copy the data into the new allocation.
<del> data.copy(sb, 0, 0, 10);
<del>
<del> store.push(sb);
<del> }
<del>});
<del>```
<del>
<del>Use of `SlowBuffer` should be used only as a last resort *after* a developer
<del>has observed undue memory retention in their applications.
<add>See [`Buffer.allocUnsafeSlow()`][]. This was never a class in the sense that
<add>the constructor always returned a `Buffer` instance, rather than a `SlowBuffer`
<add>instance.
<ide>
<ide> ### `new SlowBuffer(size)`
<ide> <!-- YAML
<ide> deprecated: v6.0.0
<ide>
<ide> * `size` {integer} The desired length of the new `SlowBuffer`.
<ide>
<del>Allocates a new `Buffer` of `size` bytes. If `size` is larger than
<del>[`buffer.constants.MAX_LENGTH`][] or smaller than 0, [`ERR_INVALID_OPT_VALUE`][]
<del>is thrown. A zero-length `Buffer` is created if `size` is 0.
<del>
<del>The underlying memory for `SlowBuffer` instances is *not initialized*. The
<del>contents of a newly created `SlowBuffer` are unknown and may contain sensitive
<del>data. Use [`buf.fill(0)`][`buf.fill()`] to initialize a `SlowBuffer` with
<del>zeroes.
<del>
<del>```js
<del>const { SlowBuffer } = require('buffer');
<del>
<del>const buf = new SlowBuffer(5);
<del>
<del>console.log(buf);
<del>// Prints: (contents may vary): <Buffer 78 e0 82 02 01>
<del>
<del>buf.fill(0);
<del>
<del>console.log(buf);
<del>// Prints: <Buffer 00 00 00 00 00>
<del>```
<add>See [`Buffer.allocUnsafeSlow()`][].
<ide>
<ide> ## Buffer Constants
<ide> <!-- YAML
<ide> added: v8.2.0
<ide>
<ide> * {integer} The largest size allowed for a single `Buffer` instance.
<ide>
<del>On 32-bit architectures, this value is `(2^30)-1` (~1GB).
<del>On 64-bit architectures, this value is `(2^31)-1` (~2GB).
<add>On 32-bit architectures, this value currently is `(2^30)-1` (~1GB).
<add>On 64-bit architectures, this value currently is `(2^31)-1` (~2GB).
<ide>
<ide> This value is also available as [`buffer.kMaxLength`][].
<ide>
<ide> in UTF-16 code units.
<ide>
<ide> This value may depend on the JS engine that is being used.
<ide>
<del>[RFC 1345]: https://tools.ietf.org/html/rfc1345
<add>## `Buffer.from()`, `Buffer.alloc()`, and `Buffer.allocUnsafe()`
<add>
<add>In versions of Node.js prior to 6.0.0, `Buffer` instances were created using the
<add>`Buffer` constructor function, which allocates the returned `Buffer`
<add>differently based on what arguments are provided:
<add>
<add>* Passing a number as the first argument to `Buffer()` (e.g. `new Buffer(10)`)
<add> allocates a new `Buffer` object of the specified size. Prior to Node.js 8.0.0,
<add> the memory allocated for such `Buffer` instances is *not* initialized and
<add> *can contain sensitive data*. Such `Buffer` instances *must* be subsequently
<add> initialized by using either [`buf.fill(0)`][`buf.fill()`] or by writing to the
<add> entire `Buffer` before reading data from the `Buffer`.
<add> While this behavior is *intentional* to improve performance,
<add> development experience has demonstrated that a more explicit distinction is
<add> required between creating a fast-but-uninitialized `Buffer` versus creating a
<add> slower-but-safer `Buffer`. Since Node.js 8.0.0, `Buffer(num)` and `new
<add> Buffer(num)` return a `Buffer` with initialized memory.
<add>* Passing a string, array, or `Buffer` as the first argument copies the
<add> passed object's data into the `Buffer`.
<add>* Passing an [`ArrayBuffer`][] or a [`SharedArrayBuffer`][] returns a `Buffer`
<add> that shares allocated memory with the given array buffer.
<add>
<add>Because the behavior of `new Buffer()` is different depending on the type of the
<add>first argument, security and reliability issues can be inadvertently introduced
<add>into applications when argument validation or `Buffer` initialization is not
<add>performed.
<add>
<add>For example, if an attacker can cause an application to receive a number where
<add>a string is expected, the application may call `new Buffer(100)`
<add>instead of `new Buffer("100")`, leading it to allocate a 100 byte buffer instead
<add>of allocating a 3 byte buffer with content `"100"`. This is commonly possible
<add>using JSON API calls. Since JSON distinguishes between numeric and string types,
<add>it allows injection of numbers where a naively written application that does not
<add>validate its input sufficiently might expect to always receive a string.
<add>Before Node.js 8.0.0, the 100 byte buffer might contain
<add>arbitrary pre-existing in-memory data, so may be used to expose in-memory
<add>secrets to a remote attacker. Since Node.js 8.0.0, exposure of memory cannot
<add>occur because the data is zero-filled. However, other attacks are still
<add>possible, such as causing very large buffers to be allocated by the server,
<add>leading to performance degradation or crashing on memory exhaustion.
<add>
<add>To make the creation of `Buffer` instances more reliable and less error-prone,
<add>the various forms of the `new Buffer()` constructor have been **deprecated**
<add>and replaced by separate `Buffer.from()`, [`Buffer.alloc()`][], and
<add>[`Buffer.allocUnsafe()`][] methods.
<add>
<add>*Developers should migrate all existing uses of the `new Buffer()` constructors
<add>to one of these new APIs.*
<add>
<add>* [`Buffer.from(array)`][] returns a new `Buffer` that *contains a copy* of the
<add> provided octets.
<add>* [`Buffer.from(arrayBuffer[, byteOffset[, length]])`][`Buffer.from(arrayBuf)`]
<add> returns a new `Buffer` that *shares the same allocated memory* as the given
<add> [`ArrayBuffer`][].
<add>* [`Buffer.from(buffer)`][] returns a new `Buffer` that *contains a copy* of the
<add> contents of the given `Buffer`.
<add>* [`Buffer.from(string[, encoding])`][`Buffer.from(string)`] returns a new
<add> `Buffer` that *contains a copy* of the provided string.
<add>* [`Buffer.alloc(size[, fill[, encoding]])`][`Buffer.alloc()`] returns a new
<add> initialized `Buffer` of the specified size. This method is slower than
<add> [`Buffer.allocUnsafe(size)`][`Buffer.allocUnsafe()`] but guarantees that newly
<add> created `Buffer` instances never contain old data that is potentially
<add> sensitive. A `TypeError` will be thrown if `size` is not a number.
<add>* [`Buffer.allocUnsafe(size)`][`Buffer.allocUnsafe()`] and
<add> [`Buffer.allocUnsafeSlow(size)`][`Buffer.allocUnsafeSlow()`] each return a
<add> new uninitialized `Buffer` of the specified `size`. Because the `Buffer` is
<add> uninitialized, the allocated segment of memory might contain old data that is
<add> potentially sensitive.
<add>
<add>`Buffer` instances returned by [`Buffer.allocUnsafe()`][] *may* be allocated off
<add>a shared internal memory pool if `size` is less than or equal to half
<add>[`Buffer.poolSize`][]. Instances returned by [`Buffer.allocUnsafeSlow()`][]
<add>*never* use the shared internal memory pool.
<add>
<add>### The `--zero-fill-buffers` command line option
<add><!-- YAML
<add>added: v5.10.0
<add>-->
<add>
<add>Node.js can be started using the `--zero-fill-buffers` command line option to
<add>cause all newly-allocated `Buffer` instances to be zero-filled upon creation by
<add>default. Without the option, buffers created with [`Buffer.allocUnsafe()`][],
<add>[`Buffer.allocUnsafeSlow()`][], and `new SlowBuffer(size)` are not zero-filled.
<add>Use of this flag can have a measurable negative impact on performance. Use the
<add>`--zero-fill-buffers` option only when necessary to enforce that newly allocated
<add>`Buffer` instances cannot contain old data that is potentially sensitive.
<add>
<add>```console
<add>$ node --zero-fill-buffers
<add>> Buffer.allocUnsafe(5);
<add><Buffer 00 00 00 00 00>
<add>```
<add>
<add>### What makes `Buffer.allocUnsafe()` and `Buffer.allocUnsafeSlow()` "unsafe"?
<add>
<add>When calling [`Buffer.allocUnsafe()`][] and [`Buffer.allocUnsafeSlow()`][], the
<add>segment of allocated memory is *uninitialized* (it is not zeroed-out). While
<add>this design makes the allocation of memory quite fast, the allocated segment of
<add>memory might contain old data that is potentially sensitive. Using a `Buffer`
<add>created by [`Buffer.allocUnsafe()`][] without *completely* overwriting the
<add>memory can allow this old data to be leaked when the `Buffer` memory is read.
<add>
<add>While there are clear performance advantages to using
<add>[`Buffer.allocUnsafe()`][], extra care *must* be taken in order to avoid
<add>introducing security vulnerabilities into an application.
<add>
<ide> [RFC 4648, Section 5]: https://tools.ietf.org/html/rfc4648#section-5
<ide> [WHATWG Encoding Standard]: https://encoding.spec.whatwg.org/
<del>[`ArrayBuffer#slice()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/slice
<ide> [`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer
<ide> [`Buffer.alloc()`]: #buffer_class_method_buffer_alloc_size_fill_encoding
<ide> [`Buffer.allocUnsafe()`]: #buffer_class_method_buffer_allocunsafe_size
<ide> This value may depend on the JS engine that is being used.
<ide> [`String#lastIndexOf()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/lastIndexOf
<ide> [`String.prototype.length`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length
<ide> [`TypedArray.from()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/from
<add>[`TypedArray#set()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/set
<add>[`TypedArray#slice()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/slice
<add>[`TypedArray#subarray()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray/subarray
<ide> [`TypedArray`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray
<ide> [`Uint32Array`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint32Array
<ide> [`Uint8Array`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array
<ide> This value may depend on the JS engine that is being used.
<ide> [`buf.keys()`]: #buffer_buf_keys
<ide> [`buf.length`]: #buffer_buf_length
<ide> [`buf.slice()`]: #buffer_buf_slice_start_end
<add>[`buf.toString()`]: #buffer_buf_tostring_encoding_start_end
<ide> [`buf.values()`]: #buffer_buf_values
<ide> [`buffer.constants.MAX_LENGTH`]: #buffer_buffer_constants_max_length
<ide> [`buffer.constants.MAX_STRING_LENGTH`]: #buffer_buffer_constants_max_string_length
<ide> [`buffer.kMaxLength`]: #buffer_buffer_kmaxlength
<ide> [`util.inspect()`]: util.html#util_util_inspect_object_options
<add>[ASCII]: https://en.wikipedia.org/wiki/ASCII
<add>[Base64]: https://en.wikipedia.org/wiki/Base64
<add>[ISO-8859-1]: https://en.wikipedia.org/wiki/ISO-8859-1
<add>[UTF-8]: https://en.wikipedia.org/wiki/UTF-8
<add>[UTF-16]: https://en.wikipedia.org/wiki/UTF-16
<add>[binary strings]: https://developer.mozilla.org/en-US/docs/Web/API/DOMString/Binary
<add>[endianness]: https://en.wikipedia.org/wiki/Endianness
<ide> [iterator]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols | 1 |
Python | Python | increase version for alembic due to | 14427019e84e246bc43994360efc3130cf4729df | <ide><path>setup.py
<ide> def run_tests(self):
<ide> zip_safe=False,
<ide> scripts=['airflow/bin/airflow'],
<ide> install_requires=[
<del> 'alembic>=0.8.0, <0.9',
<add> 'alembic>=0.8.3, <0.9',
<ide> 'chartkick>=0.4.2, < 0.5',
<ide> 'croniter>=0.3.8, <0.4',
<ide> 'dill>=0.2.2, <0.3', | 1 |
Javascript | Javascript | fix objectpath formatting | 19dbe70f46da8bd9831da4de429b6768ac0fb92a | <ide><path>utils/packLDrawModel.js
<ide> function parseObject( fileName, isRoot ) {
<ide>
<ide> }
<ide>
<del> var objectPath = path.join( prefix, fileName );
<add> var objectPath = path.join( prefix, fileName ).trim().replace( /\\/g, '/' );
<ide>
<ide> if ( ! objectContent ) {
<ide> | 1 |
Go | Go | tls minversion too low for now | 4004a39d53d1922a764eddcf2326c660aec308c3 | <ide><path>daemon/logger/splunk/splunk.go
<ide> func New(info logger.Info) (logger.Logger, error) {
<ide> return nil, fmt.Errorf("%s: %s is expected", driverName, splunkTokenKey)
<ide> }
<ide>
<del> tlsConfig := &tls.Config{}
<add> // FIXME set minimum TLS version for splunk (see https://github.com/moby/moby/issues/42443)
<add> tlsConfig := &tls.Config{} //nolint: gosec // G402: TLS MinVersion too low.
<ide>
<ide> // Splunk is using autogenerated certificates by default,
<ide> // allow users to trust them with skipping verification | 1 |
PHP | PHP | fix doc blocks and isautolayoutenabled() method | 6407b26d69a0f38a40956d87a0763ca41a2b46ab | <ide><path>src/View/ViewBuilder.php
<ide> class ViewBuilder implements JsonSerializable, Serializable
<ide> /**
<ide> * The subdirectory to the template.
<ide> *
<del> * @var string
<add> * @var string|null
<ide> */
<ide> protected $_templatePath;
<ide>
<ide> /**
<ide> * The template file to render.
<ide> *
<del> * @var string
<add> * @var string|null
<ide> */
<ide> protected $_template;
<ide>
<ide> class ViewBuilder implements JsonSerializable, Serializable
<ide> /**
<ide> * The layout name to render.
<ide> *
<del> * @var string
<add> * @var string|null|false
<ide> */
<ide> protected $_layout;
<ide>
<ide> /**
<ide> * Whether or not autoLayout should be enabled.
<ide> *
<del> * @var bool
<add> * @var bool|null
<ide> */
<ide> protected $_autoLayout;
<ide>
<ide> /**
<ide> * The layout path to build the view with.
<ide> *
<del> * @var string
<add> * @var string|null
<ide> */
<ide> protected $_layoutPath;
<ide>
<ide> /**
<ide> * The view variables to use
<ide> *
<del> * @var string
<add> * @var string|null
<ide> */
<ide> protected $_name;
<ide>
<ide> class ViewBuilder implements JsonSerializable, Serializable
<ide> * Can either use plugin notation, a short name
<ide> * or a fully namespaced classname.
<ide> *
<del> * @var string
<add> * @var string|null
<ide> */
<ide> protected $_className;
<ide>
<ide> public function enableAutoLayout($enable = true)
<ide> */
<ide> public function isAutoLayoutEnabled()
<ide> {
<del> return $this->_autoLayout;
<add> return $this->_autoLayout !== null ? $this->_autoLayout : true;
<ide> }
<ide>
<ide> /** | 1 |
Ruby | Ruby | recommit env changes | d7a61617512a120454fd3a7edd7126e050cc4633 | <ide><path>Library/Homebrew/extend/ENV/std.rb
<ide> def gcc_4_0_1
<ide> self.cxx = "#{MacOS.dev_tools_path}/g++-4.0"
<ide> replace_in_cflags '-O4', '-O3'
<ide> set_cpu_cflags '-march=nocona -mssse3'
<del> @compiler = :gcc
<add> @compiler = :gcc_4_0
<ide> end
<ide> alias_method :gcc_4_0, :gcc_4_0_1
<ide>
<ide> def gcc
<ide> self.cc = MacOS.locate("gcc-4.2")
<ide> self.cxx = MacOS.locate("g++-4.2")
<ide>
<del> unless cc
<add> if cc.empty?
<ide> self.cc = "#{HOMEBREW_PREFIX}/bin/gcc-4.2"
<ide> self.cxx = "#{HOMEBREW_PREFIX}/bin/g++-4.2"
<ide> raise "GCC could not be found" unless File.exist? cc
<ide><path>Library/Homebrew/os/mac.rb
<ide> def sdk_path(v = version)
<ide>
<ide> def default_cc
<ide> cc = locate 'cc'
<del> Pathname.new(cc).realpath.basename.to_s rescue nil
<add> cc.realpath.basename.to_s rescue nil
<ide> end
<ide>
<ide> def default_compiler
<ide> case default_cc
<add> when /^gcc-4.0/ then :gcc_4_0
<ide> when /^gcc/ then :gcc
<ide> when /^llvm/ then :llvm
<ide> when "clang" then :clang | 2 |
Javascript | Javascript | add test case for use strict inserting | 78e9d83cfd7a35f97668b9d085d260a97303e3d1 | <ide><path>test/configCases/code-generation/use-strict/harmony-with-strict.js
<add>"use strict";
<add>export default "a";
<ide><path>test/configCases/code-generation/use-strict/harmony-with-strict2.js
<add>"use strict";
<add>import * as a from "./harmony-without-strict2";
<add>export default a;
<add>import "./harmony-with-strict3"
<ide><path>test/configCases/code-generation/use-strict/harmony-with-strict3.js
<add>"use strict";
<add>export default "a";
<ide><path>test/configCases/code-generation/use-strict/harmony-without-strict.js
<add>export default "a";
<ide><path>test/configCases/code-generation/use-strict/harmony-without-strict2.js
<add>export default "a";
<ide><path>test/configCases/code-generation/use-strict/index.js
<add>"use strict";
<add>it("should include only one use strict per module", function() {
<add> require("./harmony-with-strict");
<add> require("./harmony-without-strict");
<add> require("./harmony-with-strict2");
<add>
<add> var fs = require("fs");
<add> var source = fs.readFileSync(__filename, "utf-8");
<add>
<add> var regExp = /\"use strict\";?\s*(.*)/g
<add> var match = regExp.exec(source);
<add> var matches = [];
<add> while(match) {
<add> matches.push(match[1]);
<add> match = regExp.exec(source);
<add> }
<add>
<add> matches.should.be.eql([
<add> "Object.defineProperty(exports, \"__esModule\", { value: true });",
<add> "Object.defineProperty(exports, \"__esModule\", { value: true });",
<add> "Object.defineProperty(exports, \"__esModule\", { value: true });",
<add> "/* unused harmony default export */ var _unused_webpack_default_export = \"a\";",
<add> "Object.defineProperty(exports, \"__esModule\", { value: true });",
<add> "it(\"should include only one use strict per module\", function() {"
<add> ]);
<add>});
<ide><path>test/configCases/code-generation/use-strict/webpack.config.js
<add>module.exports = {
<add> node: {
<add> __dirname: false,
<add> __filename: false
<add> }
<add>}; | 7 |
Javascript | Javascript | fix a typo in the example | 7e5e66fa3c5055b5e191ef0cd2c24eb433a280fc | <ide><path>src/ng/directive/input.js
<ide> var inputType = {
<ide> }]);
<ide> </script>
<ide> <form name="myForm" ng-controller="DateController as dateCtrl">
<del> <label for="exampleInput">Pick a between 8am and 5pm:</label>
<add> <label for="exampleInput">Pick a time between 8am and 5pm:</label>
<ide> <input type="time" id="exampleInput" name="input" ng-model="example.value"
<ide> placeholder="HH:mm:ss" min="08:00:00" max="17:00:00" required />
<ide> <div role="alert"> | 1 |
Ruby | Ruby | silence more warnings | 5e0a8668fb623aab76670adf3f277f8d1c676d96 | <ide><path>Library/Homebrew/extend/ENV/super.rb
<ide> def self.extended(base)
<ide> # done on the singleton class, because in MRI all ENV methods are defined
<ide> # on its singleton class, precluding the use of extend.
<ide> class << base
<add> alias_method :"old_[]", :[]
<ide> def [] key
<ide> if has_key? key
<ide> fetch(key)
<ide><path>Library/Homebrew/software_spec.rb
<ide> def initialize
<ide> @revision = 0
<ide> @prefix = '/usr/local'
<ide> @cellar = '/usr/local/Cellar'
<add> @root_url = nil
<ide> end
<ide>
<ide> # Checksum methods in the DSL's bottle block optionally take | 2 |
Javascript | Javascript | remove linter warning | 5703d81cb7f4faa29d201753c34f9464434fd9ce | <ide><path>src/extras/Earcut.js
<ide> function earcutLinked( ear, triangles, dim, minX, minY, invSize, pass ) {
<ide> ear = cureLocalIntersections( ear, triangles, dim );
<ide> earcutLinked( ear, triangles, dim, minX, minY, invSize, 2 );
<ide>
<del> // as a last resort, try splitting the remaining polygon into two
<add> // as a last resort, try splitting the remaining polygon into two
<ide>
<ide> } else if ( pass === 2 ) {
<ide> | 1 |
Go | Go | serialize dynamic network creation | ff59f1baaba50f32674b6b2d07730cd44bde8267 | <ide><path>libnetwork/controller.go
<ide> import (
<ide>
<ide> log "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/pkg/discovery"
<add> "github.com/docker/docker/pkg/locker"
<ide> "github.com/docker/docker/pkg/plugins"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/libnetwork/cluster"
<ide> type controller struct {
<ide> ingressSandbox *sandbox
<ide> sboxOnce sync.Once
<ide> agent *agent
<add> networkLocker *locker.Locker
<ide> agentInitDone chan struct{}
<ide> keys []*types.EncryptionKey
<ide> clusterConfigAvailable bool
<ide> func New(cfgOptions ...config.Option) (NetworkController, error) {
<ide> svcRecords: make(map[string]svcInfo),
<ide> serviceBindings: make(map[serviceKey]*service),
<ide> agentInitDone: make(chan struct{}),
<add> networkLocker: locker.New(),
<ide> }
<ide>
<ide> if err := c.initStores(); err != nil {
<ide> func (c *controller) RegisterDriver(networkType string, driver driverapi.Driver,
<ide> // NewNetwork creates a new network of the specified network type. The options
<ide> // are network specific and modeled in a generic way.
<ide> func (c *controller) NewNetwork(networkType, name string, id string, options ...NetworkOption) (Network, error) {
<add> if id != "" {
<add> c.networkLocker.Lock(id)
<add> defer c.networkLocker.Unlock(id)
<add>
<add> if _, err := c.NetworkByID(id); err == nil {
<add> return nil, NetworkNameError(id)
<add> }
<add> }
<add>
<ide> if !config.IsValidName(name) {
<ide> return nil, ErrInvalidName(name)
<ide> } | 1 |
Java | Java | fix y-coord on touches | fa884ee5e656b8cb03d000d49f1b1456a7b21784 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/uimanager/events/TouchesHelper.java
<ide> * given {@param event} instance. This method use {@param reactTarget} parameter to set as a
<ide> * target view id associated with current gesture.
<ide> */
<del> private static WritableArray createsPointersArray(int reactTarget, TouchEvent event) {
<add> private static WritableArray createsPointersArray(int reactTarget, TouchEvent touchEvent) {
<add> MotionEvent event = touchEvent.getMotionEvent();
<add>
<ide> WritableArray touches = Arguments.createArray();
<del> MotionEvent motionEvent = event.getMotionEvent();
<del>
<del> // Calculate the coordinates for the target view.
<del> // The MotionEvent contains the X,Y of the touch in the coordinate space of the root view
<del> // The TouchEvent contains the X,Y of the touch in the coordinate space of the target view
<del> // Subtracting them allows us to get the coordinates of the target view's top left corner
<del> // We then use this when computing the view specific touches below
<del> // Since only one view is actually handling even multiple touches, the values are all relative
<del> // to this one target view.
<del> float targetViewCoordinateX = motionEvent.getX() - event.getViewX();
<del> float targetViewCoordinateY = motionEvent.getY() - event.getViewY();
<del>
<del> for (int index = 0; index < motionEvent.getPointerCount(); index++) {
<add>
<add> // Calculate raw-to-relative offset as getRawX() and getRawY() can only return values for the
<add> // pointer at index 0. We use those value to calculate "raw" coordinates for other pointers
<add> float offsetX = event.getRawX() - event.getX();
<add> float offsetY = event.getRawY() - event.getY();
<add>
<add> for (int index = 0; index < event.getPointerCount(); index++) {
<ide> WritableMap touch = Arguments.createMap();
<del> // pageX,Y values are relative to the RootReactView
<del> // the motionEvent already contains coordinates in that view
<del> touch.putDouble(PAGE_X_KEY, PixelUtil.toDIPFromPixel(motionEvent.getX(index)));
<del> touch.putDouble(PAGE_Y_KEY, PixelUtil.toDIPFromPixel(motionEvent.getY(index)));
<del> // locationX,Y values are relative to the target view
<del> // To compute the values for the view, we subtract that views location from the event X,Y
<del> float locationX = motionEvent.getX(index) - targetViewCoordinateX;
<del> float locationY = motionEvent.getY(index) - targetViewCoordinateY;
<del> touch.putDouble(LOCATION_X_KEY, PixelUtil.toDIPFromPixel(locationX));
<del> touch.putDouble(LOCATION_Y_KEY, PixelUtil.toDIPFromPixel(locationY));
<add> touch.putDouble(PAGE_X_KEY, PixelUtil.toDIPFromPixel(event.getX(index) + offsetX));
<add> touch.putDouble(PAGE_Y_KEY, PixelUtil.toDIPFromPixel(event.getY(index) + offsetY));
<add> touch.putDouble(LOCATION_X_KEY, PixelUtil.toDIPFromPixel(event.getX(index)));
<add> touch.putDouble(LOCATION_Y_KEY, PixelUtil.toDIPFromPixel(event.getY(index)));
<ide> touch.putInt(TARGET_KEY, reactTarget);
<del> touch.putDouble(TIMESTAMP_KEY, motionEvent.getEventTime());
<del> touch.putDouble(POINTER_IDENTIFIER_KEY, motionEvent.getPointerId(index));
<add> touch.putDouble(TIMESTAMP_KEY, event.getEventTime());
<add> touch.putDouble(POINTER_IDENTIFIER_KEY, event.getPointerId(index));
<ide> touches.pushMap(touch);
<ide> }
<ide> | 1 |
Text | Text | add fork to instructions | b236e5db97f09b685fc001abbe8131d6211851f0 | <ide><path>CONTRIBUTING.md
<ide> recipes, that does provide some argument for bringing it "in house".
<ide>
<ide> ### Getting started
<ide>
<del>To make changes to spaCy's code base, you need to clone the GitHub repository
<add>To make changes to spaCy's code base, you need to fork then clone the GitHub repository
<ide> and build spaCy from source. You'll need to make sure that you have a
<ide> development environment consisting of a Python distribution including header
<ide> files, a compiler, [pip](https://pip.pypa.io/en/latest/installing/), | 1 |
Text | Text | remove outdated todo's | 518a1f97f32ca63615567a50d6716f4ba5885acc | <ide><path>website/docs/api/data-formats.md
<ide> $ python -m spacy train config.cfg --paths.train ./corpus/train.spacy
<ide> This section defines settings and controls for the training and evaluation
<ide> process that are used when you run [`spacy train`](/api/cli#train).
<ide>
<del><!-- TODO: complete -->
<del>
<ide> | Name | Description |
<ide> | --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
<ide> | `accumulate_gradient` | Whether to divide the batch up into substeps. Defaults to `1`. ~~int~~ |
<ide><path>website/docs/usage/training.md
<ide> workflows, from data preprocessing to training and packaging your model.
<ide>
<ide> ## Training config {#config}
<ide>
<del><!-- > #### Migration from spaCy v2.x
<del>>
<del>> TODO: once we have an answer for how to update the training command
<del>> (`spacy migrate`?), add details here -->
<del>
<ide> Training config files include all **settings and hyperparameters** for training
<ide> your model. Instead of providing lots of arguments on the command line, you only
<ide> need to pass your `config.cfg` file to [`spacy train`](/api/cli#train). Under | 2 |
Text | Text | add copy about how to curl sha256.txt | 59d23ad63dc75c68631480b2bdd98cbb67265d9d | <ide><path>README.md
<ide> documentation of the latest stable version.
<ide>
<ide> Stable, LTS and Nightly download directories all contain a *SHASUM256.txt*
<ide> file that lists the SHA checksums for each file available for
<del>download. To check that a downloaded file matches the checksum, run
<add>download.
<add>
<add>The *SHASUM256.txt* can be downloaded using curl.
<add>
<add>```
<add>$ curl -O https://nodejs.org/dist/vx.y.z/SHASUMS256.txt
<add>```
<add>
<add>To check that a downloaded file matches the checksum, run
<ide> it through `sha256sum` with a command such as:
<ide>
<ide> ``` | 1 |
Text | Text | fix spelling mistakes | 92addc2c922aa834e7d2a3329123b5d3cf78da4f | <ide><path>guide/spanish/blockchain/features/index.md
<add>
<ide> ---
<ide> title: Features of BlockTech
<ide> localeTitle: Características de BlockTech
<ide> ---
<ide> ## Características de la tecnología Blockchain
<ide>
<del>Blockchain se usa casi siempre en lugar de Bitcoin y cryptocurrency. Sin embargo, hay muchos otros lugares donde se puede utilizar esta tecnología. Y apenas estamos empezando a arañar la superficie. Sabemos que con la tecnología Blockchain (BlockTech) va a ser enorme. ¿Pero qué lo hace único?
<add>Blockchain se usa mayormente en Bitcoin y otras criptomonedas, sin embargo, hay muchos otros lugares donde se puede utilizar esta tecnología y apenas estamos empezando a arañar la superficie. La tecnología Blockchain (BlockTech) tiene un futuro prometedor. ¿Pero qué la hace única?
<ide>
<ide> En este artículo, vamos a explorar las características clave de BlockTech.
<ide>
<ide> #### _\* Sistema descentralizado_
<ide>
<del>> Blockchain es una tecnología descentralizada, por diseño.
<add>> Blockchain es una tecnología descentralizada por diseño.
<ide>
<del>Cuando algo está controlado por una autoridad central, donde el poder para tomar una decisión está en manos del ápice de la administración, dicho sistema se denomina Sistema Centralizado. Los bancos, por ejemplo, son un sistema centralizado, donde el Gobernador es responsable de tomar decisiones.
<add>Cuando algo está controlado por una autoridad central, donde el poder para tomar una decisión está en manos del ápice de la administración, dicho sistema se denomina Sistema Centralizado. Los bancos, por ejemplo, son un sistema centralizado, donde el director es el responsable de tomar las decisiones.
<ide>
<ide> Por el contrario, cuando el poder está en manos de las personas o los usuarios, se dice que dicho sistema es un sistema descentralizado. La red de igual a igual, Torrent, por ejemplo, es un sistema descentralizado, donde el usuario tiene el control completo.
<ide>
<ide> 
<ide>
<del>#### _\* Libro mayor distribuido_
<add>#### _\* Registros distribuidos
<ide>
<del>> Las cadenas de bloques utilizan la tecnología de libro mayor distribuida (DLT) para almacenar y acceder a los datos.
<add>> Las cadenas de bloques utilizan la tecnología de registro distribuido (DLT) para almacenar y acceder a los datos.
<ide>
<del>Cuando algo se almacena en un Libro mayor distribuido, se hacen varias copias de él a través de la red al mismo tiempo. A diferencia de las bases de datos tradicionales, el libro mayor distribuido no tiene una base de datos central o una funcionalidad de administración.
<add>Cuando algo se almacena en un registro distribuido, se hacen varias copias de él a través de la red al mismo tiempo. A diferencia de las bases de datos tradicionales, el registro distribuido no tiene una base de datos central o una funcionalidad de administración.
<ide>
<ide> 
<ide>
<del>Cuando se aplica en un sistema descentralizado como Blockchain, cada usuario tiene una copia del libro mayor y participa en la verificación de la transacción. Esto le da a Blockchain la propiedad de la inmutabilidad y garantiza la seguridad. Dado que, los datos se distribuyen, no hay una versión centralizada de los datos para que los piratas informáticos se corrompan. Los datos y los registros son públicos y fácilmente verificables. Esto también elimina el punto único de falla.
<add>Cuando se aplica en un sistema descentralizado como Blockchain, cada usuario tiene una copia del registro y participa en la verificación de la transacción. Esto le da a Blockchain la propiedad de la inmutabilidad y garantiza la seguridad. Dado que, los datos se distribuyen, no hay una versión centralizada de los datos que pueda ser corrompida por un pirata informático. Los datos y los registros son públicos y fácilmente verificables. Esto también elimina el punto único de falla.
<ide>
<ide> #### _\* Ecosistema seguro (Hash criptográfico)_
<ide>
<ide> BlockTech utiliza conceptos como la prueba de trabajo y el cifrado Hash para gar
<ide>
<ide> #### _\* Minería_
<ide>
<del>Torrent es una red descentralizada de igual a igual utilizada para compartir archivos. BlockTech utiliza tecnología similar. Lo que diferencia a los usuarios es que, en Torrent, el sistema se basa en el código de honor de los usuarios para sembrar los archivos. Mientras que, en blockchain, los usuarios que participan en la transacción tienen incentivos económicos. Estos usuarios se llaman "mineros". Los mineros gastan sus recursos informáticos para resolver los hash criptográficos y garantizar la inmutabilidad y confiabilidad de la transacción. Cada solución exitosa (descifrado) asegura algún beneficio económico.
<add>Torrent es una red descentralizada de igual a igual utilizada para compartir archivos. BlockTech utiliza tecnología similar. Lo que diferencia a los usuarios es que, en Torrent, el sistema se basa en el código de honor de los usuarios para sembrar los archivos. Mientras que, en blockchain, los usuarios que participan en la transacción tienen incentivos económicos. Estos usuarios se llaman "mineros". Los mineros usan sus recursos computacionales para resolver los hash criptográficos y garantizar la inmutabilidad y confiabilidad de la transacción. Cada solución exitosa (descifrado) asegura algún beneficio económico.
<ide>
<ide> 
<ide>
<del>#### _\* Cronológico y tiempo sellado._
<add>#### _\* Cronológico y con marca de tiempo._
<ide>
<del>Las cadenas de bloques, idealmente, son solo listas enlazadas muy sofisticadas donde cada bloque es un repositorio que almacena información perteneciente a una transacción y también enlaces al bloque anterior en la misma transacción. Estos bloques se organizan en un pedido y se sellan en el tiempo durante la creación para garantizar un registro justo.
<add>Las cadenas de bloques, idealmente, son solo listas enlazadas muy sofisticadas donde cada bloque es un repositorio que almacena información perteneciente a una transacción y también enlaces al bloque anterior en la misma transacción. Estos bloques se organizan en un pedido y se marcan en el tiempo de la creación para garantizar un registro coherente.
<ide>
<ide> #### _\* Basado en el consenso_
<ide>
<ide> Consensus Based es un enfoque para la toma de decisiones. Es una forma creativa y dinámica de llegar a un acuerdo entre todos los miembros de un grupo. Una transacción en Blockchain solo se puede ejecutar si todas las partes en la red lo aprueban por unanimidad. Sin embargo, está sujeto a alteraciones para adaptarse a diversas circunstancias.
<ide>
<ide> ### Fuentes
<ide>
<del>1. [Libro Mayor Distribuido](https://searchcio.techtarget.com/definition/distributed-ledger)
<add>1. [Registro Distribuido](https://searchcio.techtarget.com/definition/distributed-ledger)
<ide> 2. [Que es sembrar](http://help.utorrent.com/customer/portal/articles/164656)
<ide> 3. [Mecanismo de consenso](https://www.seedsforchange.org.uk/consensus)
<ide> 4. [Características principales de Blockchain](https://cryptocurry.com/news/top-4-major-features-blockchain/)
<del>5. [Aplicación y características de blockchain](https://arxiv.org/pdf/1806.03693.pdf)
<ide>\ No newline at end of file
<add>5. [Aplicación y características de Blockchain](https://arxiv.org/pdf/1806.03693.pdf) | 1 |
PHP | PHP | fix failing tests | 0cdef75060d4eee0f28cddb2cf3260ac8cc8a907 | <ide><path>lib/Cake/Test/Case/Controller/Component/SessionComponentTest.php
<ide> public function testSessionReadWrite() {
<ide> $this->assertEquals($Session->read('Test'), $array);
<ide> $Session->delete('Test');
<ide>
<del> $this->assertFalse($Session->write(array('Test'), 'some value'));
<add> $this->assertTrue($Session->write(array('Test'), 'some value'));
<ide> $this->assertTrue($Session->write(array('Test' => 'some value')));
<ide> $this->assertEquals('some value', $Session->read('Test'));
<ide> $Session->delete('Test');
<ide><path>lib/Cake/Test/Case/Utility/HashTest.php
<ide> public function testGet() {
<ide> $result = Hash::get($data, '0');
<ide> $this->assertEquals('abc', $result);
<ide>
<add> $result = Hash::get($data, 0);
<add> $this->assertEquals('abc', $result);
<add>
<ide> $result = Hash::get($data, '1');
<ide> $this->assertEquals('def', $result);
<ide>
<ide><path>lib/Cake/Utility/Hash.php
<ide> public static function get(array $data, $path) {
<ide> if (empty($data)) {
<ide> return null;
<ide> }
<del> if (is_string($path)) {
<add> if (is_string($path) || is_numeric($path)) {
<ide> $parts = explode('.', $path);
<ide> } else {
<ide> $parts = $path; | 3 |
PHP | PHP | apply fixes from styleci | ddc6883f4c62821ec48d9670037019175864bf93 | <ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function where($column, $operator = null, $value = null, $boolean = 'and'
<ide> }
<ide>
<ide> // If the column is a Closure instance and there is an operator value, we will
<del> // assume the developer wants to run a subquery and then compare the result
<del> // of that subquery with the given value that was provided to the method.
<add> // assume the developer wants to run a subquery and then compare the result
<add> // of that subquery with the given value that was provided to the method.
<ide> if ($this->isQueryable($column) && ! is_null($operator)) {
<ide> [$sub, $bindings] = $this->createSub($column);
<ide> | 1 |
Ruby | Ruby | use the actual class name when unloading | e9a0f24ccbd74e9494e0641037d27b23631e22cf | <ide><path>Library/Homebrew/cmd/versions.rb
<ide> def version_for_sha sha
<ide> mktemp do
<ide> path = Pathname.new(Pathname.pwd+"#{name}.rb")
<ide> path.write text_from_sha(sha)
<del> # FIXME: shouldn't have to do this?
<del> Object.send(:remove_const, "#{name.capitalize}")
<add>
<add> # Unload the class so Formula#version returns the correct value.
<add> # Note that this means that the command will error out after it
<add> # encounters a formula that won't import. This doesn't matter
<add> # for most formulae, but e.g. Bash at revision aae084c9db has a
<add> # syntax error and so `versions` isn't able to walk very far back
<add> # through the history.
<add> # FIXME shouldn't have to do this?
<add> Object.send(:remove_const, "#{Formula.class_s(name)}")
<ide> Formula.factory(path).version
<ide> end rescue nil
<ide> end | 1 |
Javascript | Javascript | fix memory leak in .maintainlanguagemode | 4810d13094f388ea2240555a8774ee6093e407a7 | <ide><path>spec/grammar-registry-spec.js
<ide> describe('GrammarRegistry', () => {
<ide> })
<ide> })
<ide>
<del> describe('.maintainLanguageMode', () => {
<add> describe('.maintainLanguageMode(buffer)', () => {
<ide> it('assigns a grammar to the buffer based on its path', async () => {
<ide> const buffer = new TextBuffer()
<ide>
<ide> describe('GrammarRegistry', () => {
<ide> expect(retainedBufferCount(grammarRegistry)).toBe(0)
<ide> })
<ide>
<del> describe('when called twice with a given buffer', () => {
<del> it('does nothing the second time', async () => {
<del> const buffer = new TextBuffer()
<del> grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
<del> const disposable1 = grammarRegistry.maintainLanguageMode(buffer)
<del> const disposable2 = grammarRegistry.maintainLanguageMode(buffer)
<add> it('doesn\'t do anything when called a second time with the same buffer', async () => {
<add> const buffer = new TextBuffer()
<add> grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
<add> const disposable1 = grammarRegistry.maintainLanguageMode(buffer)
<add> const disposable2 = grammarRegistry.maintainLanguageMode(buffer)
<ide>
<del> buffer.setPath('test.js')
<del> expect(buffer.getLanguageMode().getLanguageName()).toBe('JavaScript')
<add> buffer.setPath('test.js')
<add> expect(buffer.getLanguageMode().getLanguageName()).toBe('JavaScript')
<ide>
<del> disposable2.dispose()
<del> buffer.setPath('test.txt')
<del> expect(buffer.getLanguageMode().getLanguageName()).toBe('Null Grammar')
<add> disposable2.dispose()
<add> buffer.setPath('test.txt')
<add> expect(buffer.getLanguageMode().getLanguageName()).toBe('Null Grammar')
<ide>
<del> disposable1.dispose()
<del> buffer.setPath('test.js')
<del> expect(buffer.getLanguageMode().getLanguageName()).toBe('Null Grammar')
<del> })
<add> disposable1.dispose()
<add> buffer.setPath('test.js')
<add> expect(buffer.getLanguageMode().getLanguageName()).toBe('Null Grammar')
<add> })
<add>
<add> it('does not retain the buffer after the buffer is destroyed', () => {
<add> const buffer = new TextBuffer()
<add> grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
<add>
<add> const disposable = grammarRegistry.maintainLanguageMode(buffer)
<add> expect(retainedBufferCount(grammarRegistry)).toBe(1)
<add> expect(subscriptionCount(grammarRegistry)).toBe(2)
<add>
<add> buffer.destroy()
<add> expect(retainedBufferCount(grammarRegistry)).toBe(0)
<add> expect(subscriptionCount(grammarRegistry)).toBe(0)
<add> expect(buffer.emitter.getTotalListenerCount()).toBe(0)
<add>
<add> disposable.dispose()
<add> expect(retainedBufferCount(grammarRegistry)).toBe(0)
<add> expect(subscriptionCount(grammarRegistry)).toBe(0)
<add> })
<add>
<add> it('does not retain the buffer when the grammar registry is destroyed', () => {
<add> const buffer = new TextBuffer()
<add> grammarRegistry.loadGrammarSync(require.resolve('language-javascript/grammars/javascript.cson'))
<add>
<add> const disposable = grammarRegistry.maintainLanguageMode(buffer)
<add> expect(retainedBufferCount(grammarRegistry)).toBe(1)
<add> expect(subscriptionCount(grammarRegistry)).toBe(2)
<add>
<add> grammarRegistry.clear()
<add>
<add> expect(retainedBufferCount(grammarRegistry)).toBe(0)
<add> expect(subscriptionCount(grammarRegistry)).toBe(0)
<add> expect(buffer.emitter.getTotalListenerCount()).toBe(0)
<ide> })
<ide> })
<ide>
<ide> describe('GrammarRegistry', () => {
<ide> function retainedBufferCount (grammarRegistry) {
<ide> return grammarRegistry.grammarScoresByBuffer.size
<ide> }
<add>
<add>function subscriptionCount (grammarRegistry) {
<add> return grammarRegistry.subscriptions.disposables.size
<add>}
<ide><path>src/grammar-registry.js
<ide> class GrammarRegistry extends FirstMate.GrammarRegistry {
<ide> }
<ide> })
<ide>
<del> this.subscriptions.add(pathChangeSubscription)
<add> const destroySubscription = buffer.onDidDestroy(() => {
<add> this.grammarScoresByBuffer.delete(buffer)
<add> this.languageNameOverridesByBufferId.delete(buffer.id)
<add> this.subscriptions.remove(destroySubscription)
<add> this.subscriptions.remove(pathChangeSubscription)
<add> })
<add>
<add> this.subscriptions.add(pathChangeSubscription, destroySubscription)
<ide>
<ide> return new Disposable(() => {
<add> destroySubscription.dispose()
<add> pathChangeSubscription.dispose()
<ide> this.subscriptions.remove(pathChangeSubscription)
<add> this.subscriptions.remove(destroySubscription)
<ide> this.grammarScoresByBuffer.delete(buffer)
<del> pathChangeSubscription.dispose()
<add> this.languageNameOverridesByBufferId.delete(buffer.id)
<ide> })
<ide> }
<ide> | 2 |
Javascript | Javascript | fix race condition in test-http-exceptions.js | eda21cccb4caf0ba8be325b79c6c37ecaf1845c3 | <ide><path>test/simple/test-http-exceptions.js
<ide> server = http.createServer(function (req, res) {
<ide> res.write(server_response);
<ide> res.end();
<ide> });
<del>server.listen(PORT);
<ide>
<ide> function check_reqs() {
<ide> var done_reqs = 0;
<ide> function add_client(num) {
<ide> return req;
<ide> }
<ide>
<del>for (req_num = 0; req_num < 4 ; req_num += 1) {
<del> client_requests.push(add_client(req_num));
<del>}
<add>server.listen(PORT, function () {
<add> for (req_num = 0; req_num < 4 ; req_num += 1) {
<add> client_requests.push(add_client(req_num));
<add> }
<add>
<add> timer = setTimeout(function () {
<add> process.removeListener("uncaughtException", exception_handler);
<add> server.close();
<add> assert.strictEqual(4, exception_count);
<add> process.exit(0);
<add> }, 300);
<add>});
<ide>
<ide> function exception_handler(err) {
<ide> sys.puts("Caught an exception: " + err);
<ide> function exception_handler(err) {
<ide> }
<ide> exception_count += 1;
<ide> }
<del>process.addListener("uncaughtException", exception_handler);
<ide>
<del>timer = setTimeout(function () {
<del> process.removeListener("uncaughtException", exception_handler);
<del> server.close();
<del> assert.strictEqual(4, exception_count);
<del> process.exit(0);
<del>}, 300);
<add>process.addListener("uncaughtException", exception_handler); | 1 |
Python | Python | add delete_object method to the s3 driver | e33f872cb3a0c51e3be8b27d9b13e20d6e302a84 | <ide><path>libcloud/storage/drivers/s3.py
<ide> def delete_container(self, container):
<ide>
<ide> return False
<ide>
<add> def delete_object(self, obj):
<add> # TODO: escape object and container name
<add> response = self.connection.request('/%s/%s' % (obj.container.name,
<add> obj.name),
<add> method='DELETE')
<add> if response.status == httplib.NO_CONTENT:
<add> return True
<add> elif response.status == httplib.NOT_FOUND:
<add> raise ObjectDoesNotExistError(value=None, driver=self,
<add> object_name=obj.name)
<add>
<add> return False
<add>
<ide> def _to_containers(self, obj, xpath):
<ide> return [ self._to_container(element) for element in \
<ide> obj.findall(fixxpath(xpath=xpath, namespace=NAMESPACE))] | 1 |
PHP | PHP | add middleware registration to resources | 677563189476bfcfa019295be0a7da6b33950127 | <ide><path>src/Illuminate/Routing/ControllerDispatcher.php
<ide> public function dispatch(Route $route, $controller, $method)
<ide> return $controller->callAction($method, $parameters);
<ide> }
<ide>
<del> return call_user_func_array([$controller, $method], $parameters);
<add> return $controller->{$method}(...array_values($parameters));
<ide> }
<ide>
<ide> /**
<ide><path>src/Illuminate/Routing/ResourceRegistrar.php
<ide> protected function getResourceAction($resource, $controller, $method, $options)
<ide> {
<ide> $name = $this->getResourceName($resource, $method, $options);
<ide>
<del> return ['as' => $name, 'uses' => $controller.'@'.$method];
<add> $action = ['as' => $name, 'uses' => $controller.'@'.$method];
<add>
<add> if (isset($options['middleware'])) {
<add> $action['middleware'] = $options['middleware'];
<add> }
<add>
<add> return $action;
<ide> }
<ide>
<ide> /**
<ide><path>src/Illuminate/Routing/RouteRegistrar.php
<ide> public function attribute($key, $value)
<ide> return $this;
<ide> }
<ide>
<add> /**
<add> * Route a resource to a controller.
<add> *
<add> * @param string $name
<add> * @param string $controller
<add> * @param array $options
<add> * @return void
<add> */
<add> public function resource($name, $controller, array $options = [])
<add> {
<add> $this->router->resource($name, $controller, $this->attributes + $options);
<add> }
<add>
<ide> /**
<ide> * Create a route group with shared attributes.
<ide> *
<ide><path>tests/Routing/RouteRegistrarTest.php
<ide> public function testCanRegisterGroupWithMiddleware()
<ide> $this->seeMiddleware('group-middleware');
<ide> }
<ide>
<add>
<ide> public function testCanRegisterGroupWithNamespace()
<ide> {
<ide> $this->router->namespace('App\Http\Controllers')->group(function ($router) {
<ide> public function index()
<ide> {
<ide> return 'controller';
<ide> }
<add>
<add> public function destroy()
<add> {
<add> return 'deleted';
<add> }
<ide> } | 4 |
Python | Python | fix iterations for decoder | 8afaaa26f5754948f4ddf8f31d70d0293488a897 | <ide><path>src/transformers/models/t5/convert_t5x_checkpoint_to_flax.py
<ide> def convert_t5x_checkpoint_to_flax(t5x_checkpoint_path, config_name, flax_dump_f
<ide> flax_model.params["encoder"]["final_layer_norm"]["weight"] = t5x_encoder_norm
<ide>
<ide> # Decoder
<del> for layer_index in range(config.num_layers):
<add> for layer_index in range(config.num_decoder_layers):
<ide> layer_name = f"layers_{str(layer_index)}"
<ide>
<ide> # Self-Attention | 1 |
Java | Java | re-obtain port on every websocket integration test | d86e4cf2038ea67c51e0da4286034331225c16a6 | <ide><path>spring-websocket/src/test/java/org/springframework/web/socket/AbstractWebSocketIntegrationTests.java
<ide> public void setup() throws Exception {
<ide> ((Lifecycle) this.webSocketClient).start();
<ide> }
<ide>
<add> this.server.setup();
<ide> this.server.deployConfig(this.wac);
<ide> this.server.start();
<ide> }
<ide> public void teardown() throws Exception {
<ide> catch (Throwable t) {
<ide> logger.error("Failed to stop WebSocket client", t);
<ide> }
<del>
<ide> try {
<ide> this.server.undeployConfig();
<ide> }
<ide> catch (Throwable t) {
<ide> logger.error("Failed to undeploy application config", t);
<ide> }
<del>
<ide> try {
<ide> this.server.stop();
<ide> }
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/JettyWebSocketTestServer.java
<ide> /*
<del> * Copyright 2002-2013 the original author or authors.
<add> * Copyright 2002-2014 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import org.eclipse.jetty.servlet.FilterHolder;
<ide> import org.eclipse.jetty.servlet.ServletContextHandler;
<ide> import org.eclipse.jetty.servlet.ServletHolder;
<add>import org.springframework.util.Assert;
<ide> import org.springframework.util.SocketUtils;
<ide> import org.springframework.web.context.WebApplicationContext;
<ide> import org.springframework.web.servlet.DispatcherServlet;
<ide> */
<ide> public class JettyWebSocketTestServer implements WebSocketTestServer {
<ide>
<del> private final Server jettyServer;
<add> private Server jettyServer;
<ide>
<del> private final int port;
<add> private int port = -1;
<ide>
<ide>
<del> public JettyWebSocketTestServer() {
<add> @Override
<add> public void setup() {
<ide> this.port = SocketUtils.findAvailableTcpPort();
<ide> this.jettyServer = new Server(this.port);
<ide> }
<ide> public int getPort() {
<ide>
<ide> @Override
<ide> public void deployConfig(WebApplicationContext cxt, Filter... filters) {
<add> Assert.state(this.port != -1, "setup() was never called.");
<ide> ServletContextHandler contextHandler = new ServletContextHandler();
<ide> ServletHolder servletHolder = new ServletHolder(new DispatcherServlet(cxt));
<ide> contextHandler.addServlet(servletHolder, "/");
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/TomcatWebSocketTestServer.java
<ide> import org.apache.tomcat.util.descriptor.web.FilterMap;
<ide> import org.apache.tomcat.websocket.server.WsContextListener;
<ide>
<add>import org.springframework.util.Assert;
<ide> import org.springframework.util.SocketUtils;
<ide> import org.springframework.web.context.WebApplicationContext;
<ide> import org.springframework.web.servlet.DispatcherServlet;
<ide> */
<ide> public class TomcatWebSocketTestServer implements WebSocketTestServer {
<ide>
<del> private final Tomcat tomcatServer;
<add> private Tomcat tomcatServer;
<ide>
<del> private final int port;
<add> private int port = -1;
<ide>
<ide> private Context context;
<ide>
<ide>
<del> public TomcatWebSocketTestServer() {
<add> @Override
<add> public void setup() {
<ide> this.port = SocketUtils.findAvailableTcpPort();
<ide>
<ide> Connector connector = new Connector(Http11NioProtocol.class.getName());
<del> connector.setPort(this.port);
<add> connector.setPort(this.port);
<ide>
<del> File baseDir = createTempDir("tomcat");
<del> String baseDirPath = baseDir.getAbsolutePath();
<add> File baseDir = createTempDir("tomcat");
<add> String baseDirPath = baseDir.getAbsolutePath();
<ide>
<ide> this.tomcatServer = new Tomcat();
<ide> this.tomcatServer.setBaseDir(baseDirPath);
<ide> this.tomcatServer.setPort(this.port);
<del> this.tomcatServer.getService().addConnector(connector);
<del> this.tomcatServer.setConnector(connector);
<add> this.tomcatServer.getService().addConnector(connector);
<add> this.tomcatServer.setConnector(connector);
<ide> }
<ide>
<ide> private File createTempDir(String prefix) {
<ide> public int getPort() {
<ide>
<ide> @Override
<ide> public void deployConfig(WebApplicationContext wac, Filter... filters) {
<del> this.context = this.tomcatServer.addContext("", System.getProperty("java.io.tmpdir"));
<add> Assert.state(this.port != -1, "setup() was never called.");
<add> this.context = this.tomcatServer.addContext("", System.getProperty("java.io.tmpdir"));
<ide> this.context.addApplicationListener(WsContextListener.class.getName());
<ide> Tomcat.addServlet(this.context, "dispatcherServlet", new DispatcherServlet(wac)).setAsyncSupported(true);
<ide> this.context.addServletMapping("/", "dispatcherServlet");
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/UndertowTestServer.java
<ide> import javax.servlet.ServletException;
<ide>
<ide> import io.undertow.Undertow;
<add>import io.undertow.server.HttpHandler;
<ide> import io.undertow.servlet.api.DeploymentInfo;
<ide> import io.undertow.servlet.api.DeploymentManager;
<ide> import io.undertow.servlet.api.FilterInfo;
<ide> import io.undertow.servlet.api.InstanceFactory;
<ide> import io.undertow.servlet.api.InstanceHandle;
<ide> import io.undertow.websockets.jsr.WebSocketDeploymentInfo;
<ide>
<add>import org.springframework.util.Assert;
<ide> import org.springframework.util.SocketUtils;
<ide> import org.springframework.web.context.WebApplicationContext;
<ide> import org.springframework.web.servlet.DispatcherServlet;
<ide> */
<ide> public class UndertowTestServer implements WebSocketTestServer {
<ide>
<add> private int port = -1;
<add>
<ide> private Undertow server;
<ide>
<ide> private DeploymentManager manager;
<ide>
<del> private final int port;
<del>
<ide>
<del> public UndertowTestServer() {
<add> @Override
<add> public void setup() {
<ide> this.port = SocketUtils.findAvailableTcpPort();
<ide> }
<ide>
<ide> public int getPort() {
<ide>
<ide> @Override
<ide> public void deployConfig(WebApplicationContext cxt, Filter... filters) {
<add> Assert.state(this.port != -1, "setup() was never called");
<ide> DispatcherServletInstanceFactory servletFactory = new DispatcherServletInstanceFactory(cxt);
<del>
<ide> DeploymentInfo servletBuilder = deployment()
<ide> .setClassLoader(UndertowTestServer.class.getClassLoader())
<ide> .setDeploymentName("undertow-websocket-test")
<ide> .setContextPath("/")
<ide> .addServlet(servlet("DispatcherServlet", DispatcherServlet.class, servletFactory).addMapping("/"))
<ide> .addServletContextAttribute(WebSocketDeploymentInfo.ATTRIBUTE_NAME, new WebSocketDeploymentInfo());
<del>
<ide> for (final Filter filter : filters) {
<ide> String filterName = filter.getClass().getName();
<ide> servletBuilder.addFilter(new FilterInfo(filterName, filter.getClass(), new FilterInstanceFactory(filter)));
<ide> for (DispatcherType type : DispatcherType.values()) {
<ide> servletBuilder.addFilterUrlMapping(filterName, "/*", type);
<ide> }
<ide> }
<del> this.manager = defaultContainer().addDeployment(servletBuilder);
<del> this.manager.deploy();
<del>
<ide> try {
<del> this.server = Undertow.builder()
<del> .addHttpListener(this.port, "localhost")
<del> .setHandler(this.manager.start()).build();
<add> this.manager = defaultContainer().addDeployment(servletBuilder);
<add> this.manager.deploy();
<add> HttpHandler httpHandler = this.manager.start();
<add> this.server = Undertow.builder().addHttpListener(this.port, "localhost").setHandler(httpHandler).build();
<ide> }
<ide> catch (ServletException ex) {
<ide> throw new IllegalStateException(ex);
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/WebSocketTestServer.java
<ide> public interface WebSocketTestServer {
<ide>
<ide> int getPort();
<ide>
<add> void setup();
<add>
<ide> void deployConfig(WebApplicationContext cxt, Filter... filters);
<ide>
<ide> void undeployConfig(); | 5 |
Javascript | Javascript | add more unit tests for the api | a984fe5b55b3e7a0fad47badf8af2f5ef8d6de3a | <ide><path>src/core/obj.js
<ide> var Catalog = (function CatalogClosure() {
<ide> },
<ide> get attachments() {
<ide> var xref = this.xref;
<del> var attachments, nameTreeRef;
<add> var attachments = null, nameTreeRef;
<ide> var obj = this.catDict.get('Names');
<ide> if (obj) {
<ide> nameTreeRef = obj.getRaw('EmbeddedFiles');
<ide><path>test/unit/api_spec.js
<ide> describe('api', function() {
<ide> 0, 841.89, null] });
<ide> });
<ide> });
<add> it('gets attachments', function() {
<add> var promise = doc.getAttachments();
<add> waitsForPromise(promise, function (data) {
<add> expect(data).toEqual(null);
<add> });
<add> });
<add> it('gets javascript', function() {
<add> var promise = doc.getJavaScript();
<add> waitsForPromise(promise, function (data) {
<add> expect(data).toEqual([]);
<add> });
<add> });
<ide> it('gets outline', function() {
<ide> var promise = doc.getOutline();
<ide> waitsForPromise(promise, function(outline) {
<ide> describe('api', function() {
<ide> expect(true).toEqual(true);
<ide> });
<ide> });
<add> it('gets filesize in bytes', function() {
<add> var promise = doc.getDownloadInfo();
<add> waitsForPromise(promise, function (data) {
<add> expect(data.length).toEqual(105779);
<add> });
<add> });
<ide> });
<ide> describe('Page', function() {
<ide> var resolvePromise; | 2 |
PHP | PHP | add method to generate secure random strings | 41a63c767e460fd71ae569a8017cfa27725981d9 | <ide><path>src/Utility/Security.php
<ide> public static function randomBytes($length)
<ide> return $bytes;
<ide> }
<ide>
<add> /**
<add> * Creates a secure random string.
<add> *
<add> * @param int $length String length
<add> * @return string
<add> * @since 3.6.0
<add> */
<add> public static function randomString($length)
<add> {
<add> return bin2hex(Security::randomBytes($length / 2));
<add> }
<add>
<ide> /**
<ide> * Like randomBytes() above, but not cryptographically secure.
<ide> *
<ide><path>tests/TestCase/Utility/SecurityTest.php
<ide> public function testRandomBytes()
<ide> $this->assertRegExp('/[^0-9a-f]/', $value, 'should return a binary string');
<ide> }
<ide>
<add> /**
<add> * Test the randomString method.
<add> *
<add> * @return void
<add> */
<add> public function testRandomString()
<add> {
<add> $value = Security::randomString(16);
<add> $this->assertSame(16, strlen($value));
<add>
<add> $value = Security::randomString(64);
<add> $this->assertSame(64, strlen($value));
<add>
<add> $this->assertRegExp('/^[0-9a-f]+$/', $value, 'should return a ASCII string');
<add> }
<add>
<ide> /**
<ide> * Test the insecureRandomBytes method
<ide> * | 2 |
Javascript | Javascript | fix typo that caused errors on ie8 | ee5a5352fd4b94cedee6ef20d4bf2d43ce77e00b | <ide><path>src/scenario/output/Xml.js
<ide> angular.scenario.output('xml', function(context, runner, model) {
<ide> stepContext.attr('status', step.status);
<ide> it.append(stepContext);
<ide> if (step.error) {
<del> var error = $('<error></error');
<add> var error = $('<error></error>');
<ide> stepContext.append(error);
<ide> error.text(formatException(stepContext.error));
<ide> } | 1 |
Go | Go | replace old version tests | 813d2e082a094d11bda0d0f5a708ce30805ab28e | <ide><path>integration-cli/docker_api_version_test.go
<del>package main
<del>
<del>import (
<del> "github.com/docker/docker/client"
<del> "github.com/docker/docker/dockerversion"
<del> "github.com/docker/docker/integration-cli/checker"
<del> "github.com/go-check/check"
<del> "golang.org/x/net/context"
<del>)
<del>
<del>func (s *DockerSuite) TestGetVersion(c *check.C) {
<del> cli, err := client.NewEnvClient()
<del> c.Assert(err, checker.IsNil)
<del> defer cli.Close()
<del>
<del> v, err := cli.ServerVersion(context.Background())
<del> c.Assert(v.Version, checker.Equals, dockerversion.Version, check.Commentf("Version mismatch"))
<del>}
<ide><path>integration-cli/docker_cli_experimental_test.go
<del>package main
<del>
<del>import (
<del> "strings"
<del>
<del> "github.com/docker/docker/integration-cli/checker"
<del> "github.com/go-check/check"
<del>)
<del>
<del>func (s *DockerSuite) TestExperimentalVersionTrue(c *check.C) {
<del> testExperimentalInVersion(c, ExperimentalDaemon, "*true")
<del>}
<del>
<del>func (s *DockerSuite) TestExperimentalVersionFalse(c *check.C) {
<del> testExperimentalInVersion(c, NotExperimentalDaemon, "*false")
<del>}
<del>
<del>func testExperimentalInVersion(c *check.C, requirement func() bool, expectedValue string) {
<del> testRequires(c, requirement)
<del> out, _ := dockerCmd(c, "version")
<del> for _, line := range strings.Split(out, "\n") {
<del> if strings.HasPrefix(strings.TrimSpace(line), "Experimental:") {
<del> c.Assert(line, checker.Matches, expectedValue)
<del> return
<del> }
<del> }
<del>
<del> c.Fatal(`"Experimental" not found in version output`)
<del>}
<ide><path>integration-cli/docker_cli_version_test.go
<del>package main
<del>
<del>import (
<del> "strings"
<del>
<del> "github.com/docker/docker/integration-cli/checker"
<del> "github.com/go-check/check"
<del>)
<del>
<del>// ensure docker version works
<del>func (s *DockerSuite) TestVersionEnsureSucceeds(c *check.C) {
<del> out, _ := dockerCmd(c, "version")
<del> stringsToCheck := map[string]int{
<del> "Client:": 1,
<del> "Server:": 1,
<del> " Version:": 2,
<del> " API version:": 2,
<del> " Go version:": 2,
<del> " Git commit:": 2,
<del> " OS/Arch:": 2,
<del> " Built:": 2,
<del> }
<del>
<del> for k, v := range stringsToCheck {
<del> c.Assert(strings.Count(out, k), checker.Equals, v, check.Commentf("The count of %v in %s does not match excepted", k, out))
<del> }
<del>}
<del>
<del>// ensure the Windows daemon return the correct platform string
<del>func (s *DockerSuite) TestVersionPlatform_w(c *check.C) {
<del> testRequires(c, DaemonIsWindows)
<del> testVersionPlatform(c, "windows/amd64")
<del>}
<del>
<del>// ensure the Linux daemon return the correct platform string
<del>func (s *DockerSuite) TestVersionPlatform_l(c *check.C) {
<del> testRequires(c, DaemonIsLinux)
<del> testVersionPlatform(c, "linux")
<del>}
<del>
<del>func testVersionPlatform(c *check.C, platform string) {
<del> out, _ := dockerCmd(c, "version")
<del> expected := "OS/Arch: " + platform
<del>
<del> split := strings.Split(out, "\n")
<del> c.Assert(len(split) >= 14, checker.Equals, true, check.Commentf("got %d lines from version", len(split)))
<del>
<del> // Verify the second 'OS/Arch' matches the platform. Experimental has
<del> // more lines of output than 'regular'
<del> bFound := false
<del> for i := 14; i < len(split); i++ {
<del> if strings.Contains(split[i], expected) {
<del> bFound = true
<del> break
<del> }
<del> }
<del> c.Assert(bFound, checker.Equals, true, check.Commentf("Could not find server '%s' in '%s'", expected, out))
<del>}
<ide><path>integration/system/main_test.go
<add>package system
<add>
<add>import (
<add> "fmt"
<add> "os"
<add> "testing"
<add>
<add> "github.com/docker/docker/internal/test/environment"
<add>)
<add>
<add>var testEnv *environment.Execution
<add>
<add>func TestMain(m *testing.M) {
<add> var err error
<add> testEnv, err = environment.New()
<add> if err != nil {
<add> fmt.Println(err)
<add> os.Exit(1)
<add> }
<add>
<add> testEnv.Print()
<add> os.Exit(m.Run())
<add>}
<add>
<add>func setupTest(t *testing.T) func() {
<add> environment.ProtectImages(t, testEnv)
<add> return func() { testEnv.Clean(t) }
<add>}
<ide><path>integration/system/version_test.go
<add>package system
<add>
<add>import (
<add> "testing"
<add>
<add> "github.com/docker/docker/integration-cli/request"
<add> "github.com/stretchr/testify/assert"
<add> "github.com/stretchr/testify/require"
<add> "golang.org/x/net/context"
<add>)
<add>
<add>func TestVersion(t *testing.T) {
<add> client, err := request.NewClient()
<add> require.NoError(t, err)
<add>
<add> version, err := client.ServerVersion(context.Background())
<add> require.NoError(t, err)
<add>
<add> assert.NotNil(t, version.APIVersion)
<add> assert.NotNil(t, version.Version)
<add> assert.NotNil(t, version.MinAPIVersion)
<add> assert.Equal(t, testEnv.DaemonInfo.ExperimentalBuild, version.Experimental)
<add> assert.Equal(t, testEnv.DaemonInfo.OSType, version.Os)
<add>} | 5 |
Python | Python | drop unused import | 4e14b26fa9727a79f8ae7c7ef25d1339500fa26c | <ide><path>rest_framework/views.py
<ide> from django.utils.safestring import mark_safe
<ide> from django.views.decorators.csrf import csrf_exempt
<ide> from rest_framework import status, exceptions
<del>from rest_framework.compat import View, apply_markdown, smart_text
<add>from rest_framework.compat import View, apply_markdown
<ide> from rest_framework.response import Response
<ide> from rest_framework.request import Request
<ide> from rest_framework.settings import api_settings | 1 |
Ruby | Ruby | remove deprecation comment | 5b6b400c5837b3bc1777ed6ea6740496d6317f2e | <ide><path>Library/Homebrew/cmd/tap.rb
<ide> def tap_args
<ide> switch "--full",
<ide> description: "Convert a shallow clone to a full clone without untapping. Taps are only cloned as "\
<ide> "shallow clones if `--shallow` was originally passed."
<del> # odeprecated "brew tap --shallow"
<ide> switch "--shallow",
<ide> description: "Fetch tap as a shallow clone rather than a full clone. Useful for continuous integration."
<ide> switch "--force-auto-update", | 1 |
Javascript | Javascript | improve eachcomputedproperty implementation | 810c2c6a82ce797a6a9f3e397f4796c890e28b6b | <ide><path>packages/ember-metal/lib/descriptor.js
<ide> class Descriptor extends EmberDescriptor {
<ide> constructor(desc) {
<ide> super();
<ide> this.desc = desc;
<add> this.enumerable = desc.enumerable;
<ide> }
<ide>
<ide> setup(obj, key) {
<ide><path>packages/ember-metal/lib/index.js
<ide> export {
<ide> } from './property_events';
<ide> export {
<ide> defineProperty,
<del> Descriptor,
<del> _hasCachedComputedProperties
<add> Descriptor
<ide> } from './properties';
<ide> export {
<ide> watchKey,
<ide><path>packages/ember-metal/lib/meta.js
<ide> if (EMBER_METAL_ES5_GETTERS) {
<ide> Meta.prototype.removeDescriptors = function(subkey) {
<ide> this.writeDescriptors(subkey, UNDEFINED);
<ide> };
<add>
<add> Meta.prototype.forEachDescriptors = function(fn) {
<add> let pointer = this;
<add> let seen;
<add> while (pointer !== undefined) {
<add> let map = pointer._descriptors;
<add> if (map !== undefined) {
<add> for (let key in map) {
<add> seen = seen === undefined ? new Set() : seen;
<add> if (!seen.has(key)) {
<add> seen.add(key);
<add> let value = map[key];
<add> if (value !== UNDEFINED) {
<add> fn(key, value);
<add> }
<add> }
<add> }
<add> }
<add> pointer = pointer.parent;
<add> }
<add> };
<ide> }
<ide>
<ide> const getPrototypeOf = Object.getPrototypeOf;
<ide><path>packages/ember-metal/lib/properties.js
<ide> import { HAS_NATIVE_PROXY } from 'ember-utils';
<ide> import { descriptorFor, meta as metaFor, peekMeta, DESCRIPTOR, UNDEFINED } from './meta';
<ide> import { overrideChains } from './property_events';
<ide> import { DESCRIPTOR_TRAP, EMBER_METAL_ES5_GETTERS, MANDATORY_SETTER } from 'ember/features';
<del>import { peekCacheFor } from './computed';
<ide> // ..........................................................
<ide> // DESCRIPTOR
<ide> //
<ide> import { peekCacheFor } from './computed';
<ide> export class Descriptor {
<ide> constructor() {
<ide> this.isDescriptor = true;
<add> this.enumerable = true;
<ide> }
<ide> }
<ide>
<ide> export function defineProperty(obj, keyName, desc, data, meta) {
<ide> meta.writeDescriptors(keyName, value);
<ide> }
<ide>
<del> didDefineComputedProperty(obj.constructor);
<del>
<ide> if (typeof desc.setup === 'function') { desc.setup(obj, keyName); }
<ide> } else if (desc === undefined || desc === null) {
<ide> value = data;
<ide> export function defineProperty(obj, keyName, desc, data, meta) {
<ide>
<ide> return this;
<ide> }
<del>
<del>let hasCachedComputedProperties = false;
<del>export function _hasCachedComputedProperties() {
<del> hasCachedComputedProperties = true;
<del>}
<del>
<del>function didDefineComputedProperty(constructor) {
<del> if (hasCachedComputedProperties === false) { return; }
<del>
<del> let cache = peekCacheFor(constructor);
<del> if (cache !== undefined) {
<del> cache.delete('_computedProperties');
<del> }
<del>}
<ide><path>packages/ember-runtime/lib/system/core_object.js
<ide> import {
<ide> import {
<ide> PROXY_CONTENT,
<ide> descriptorFor,
<del> get,
<ide> meta,
<ide> peekMeta,
<ide> finishChains,
<ide> import {
<ide> REQUIRED,
<ide> defineProperty,
<ide> ComputedProperty,
<del> computed,
<ide> InjectedProperty,
<ide> run,
<ide> deleteMeta,
<del> descriptor,
<del> _hasCachedComputedProperties
<add> descriptor
<ide> } from 'ember-metal';
<ide> import ActionHandler from '../mixins/action_handler';
<ide> import { validatePropertyInjections } from '../inject';
<ide> let ClassMixinProps = {
<ide> @private
<ide> */
<ide> metaForProperty(key) {
<del> let proto = this.proto();
<add> let proto = this.proto(); // ensure prototype is initialized
<ide> let possibleDesc = descriptorFor(proto, key);
<ide>
<ide> assert(
<ide> `metaForProperty() could not find a computed property with key '${key}'.`,
<ide> possibleDesc !== undefined
<ide> );
<add>
<ide> return possibleDesc._meta || {};
<ide> },
<ide>
<del> _computedProperties: computed(function() {
<del> _hasCachedComputedProperties();
<del> let proto = this.proto();
<del> let possibleDesc;
<del> let properties = [];
<del>
<del> for (let name in proto) {
<del> possibleDesc = descriptorFor(proto, name);
<del>
<del> if (possibleDesc !== undefined) {
<del> properties.push({
<del> name,
<del> meta: possibleDesc._meta
<del> });
<del> }
<del> }
<del> return properties;
<del> }).readOnly(),
<del>
<ide> /**
<ide> Iterate over each computed property for the class, passing its name
<ide> and any associated metadata (see `metaForProperty`) to the callback.
<ide> let ClassMixinProps = {
<ide> @param {Object} binding
<ide> @private
<ide> */
<del> eachComputedProperty(callback, binding) {
<del> let property;
<add> eachComputedProperty(callback, binding = this) {
<add> this.proto(); // ensure prototype is initialized
<ide> let empty = {};
<ide>
<del> let properties = get(this, '_computedProperties');
<del>
<del> for (let i = 0; i < properties.length; i++) {
<del> property = properties[i];
<del> callback.call(binding || this, property.name, property.meta || empty);
<del> }
<add> meta(this.prototype).forEachDescriptors((name, descriptor) => {
<add> if (descriptor.enumerable) {
<add> let meta = descriptor._meta || empty;
<add> callback.call(binding, name, meta);
<add> }
<add> });
<ide> }
<ide> };
<ide>
<ide><path>packages/ember-runtime/tests/system/object/computed_test.js
<ide> import {
<ide> alias,
<ide> computed,
<ide> get as emberGet,
<del> observer
<add> observer,
<add> defineProperty
<ide> } from 'ember-metal';
<ide> import { testWithDefault } from 'internal-test-helpers';
<ide> import EmberObject from '../../../system/object';
<ide> QUnit.test('can retrieve metadata for a computed property', function(assert) {
<ide> }, 'metaForProperty() could not find a computed property with key \'staticProperty\'.');
<ide> });
<ide>
<add>QUnit.test('overriding a computed property with null removes it from eachComputedProperty iteration', function(assert) {
<add> let MyClass = EmberObject.extend({
<add> foo: computed(function() {}),
<add>
<add> fooDidChange: observer('foo', function() {}),
<add>
<add> bar: computed(function() {}),
<add> });
<add>
<add> let SubClass = MyClass.extend({
<add> foo: null
<add> });
<add>
<add> let list = [];
<add>
<add> SubClass.eachComputedProperty(name => list.push(name));
<add>
<add> assert.deepEqual(list.sort(), ['bar'], 'overridding with null removes from eachComputedProperty listing');
<add>});
<add>
<ide> QUnit.test('can iterate over a list of computed properties for a class', function(assert) {
<ide> let MyClass = EmberObject.extend({
<ide> foo: computed(function() {}),
<ide> QUnit.test('list of properties updates when an additional property is added (suc
<ide> });
<ide>
<ide> assert.deepEqual(list.sort(), ['bar', 'foo', 'baz'].sort(), 'expected three computed properties');
<add>
<add> defineProperty(MyClass.prototype, 'qux', computed(K));
<add>
<add> list = [];
<add>
<add> MyClass.eachComputedProperty(function(name) {
<add> list.push(name);
<add> });
<add>
<add> assert.deepEqual(list.sort(), ['bar', 'foo', 'baz', 'qux'].sort(), 'expected four computed properties');
<ide> });
<ide>
<ide> QUnit.test('Calling _super in call outside the immediate function of a CP getter works', function(assert) { | 6 |
Python | Python | add space in error message | be6503a8a81873109cf830bef225bf06a13edda2 | <ide><path>keras/engine/training.py
<ide> def _standardize_input_data(data, names, shapes=None,
<ide> if len(names) > 1:
<ide> # Case: model expects multiple inputs but only received
<ide> # a single Numpy array.
<del> raise ValueError('The model expects ' + str(len(names)) +
<add> raise ValueError('The model expects ' + str(len(names)) + ' ' +
<ide> exception_prefix +
<ide> ' arrays, but only received one array. '
<ide> 'Found: array with shape ' + str(data.shape)) | 1 |
Javascript | Javascript | change the polling system of the api in the webui | f415f8e5114d5be09f384cf017bb90f9df24104a | <ide><path>glances/outputs/static/js/components/glances/controller.js
<ide> 'use strict';
<ide>
<del>function GlancesController($interval, GlancesStats, REFRESH_TIME, Hotkeys) {
<add>function GlancesController($timeout, GlancesStats, REFRESH_TIME, Hotkeys) {
<ide> var vm = this;
<ide>
<ide> vm.dataLoaded = false;
<ide> vm.stats = {};
<add>
<add> var refreshDataSuccess = function (data) {
<add> data.isBsd = data.stats['system']['os_name'] === 'FreeBSD';
<add> data.isLinux = data.stats['system']['os_name'] === 'Linux';
<add> data.isMac = data.stats['system']['os_name'] === 'Darwin';
<add> data.isWindows = data.stats['system']['os_name'] === 'Windows';
<add>
<add> vm.stats = data;
<add> vm.is_disconnected = false;
<add> vm.dataLoaded = true;
<add>
<add> data = undefined;
<add> nextLoad();
<add> };
<add>
<add> var refreshDataError = function() {
<add> vm.is_disconnected = true;
<add> nextLoad();
<add> };
<add>
<ide> vm.refreshData = function () {
<del> GlancesStats.getData().then(function (data) {
<del>
<del> data.isBsd = data.stats['system']['os_name'] === 'FreeBSD';
<del> data.isLinux = data.stats['system']['os_name'] === 'Linux';
<del> data.isMac = data.stats['system']['os_name'] === 'Darwin';
<del> data.isWindows = data.stats['system']['os_name'] === 'Windows';
<del>
<del> vm.stats = data;
<del> vm.is_disconnected = false;
<del> vm.dataLoaded = true;
<del> }, function() {
<del> vm.is_disconnected = true;
<del> });
<add> GlancesStats.getData().then(refreshDataSuccess, refreshDataError);
<add> };
<add>
<add> var loadPromise;
<add> var cancelNextLoad = function() {
<add> $timeout.cancel(loadPromise);
<add> };
<add>
<add> var nextLoad = function() {
<add> cancelNextLoad();
<add> loadPromise = $timeout(vm.refreshData, REFRESH_TIME * 1000); // in milliseconds
<ide> };
<ide>
<ide> vm.refreshData();
<del> $interval(function () {
<del> vm.refreshData();
<del> }, REFRESH_TIME * 1000); // in milliseconds
<ide>
<ide> Hotkeys.registerHotkey(Hotkeys.createHotkey({
<ide> key: 'm',
<ide> callback: function () {
<ide> console.log('Sort processes by MEM%');
<ide> }
<ide> }));
<del>
<del> Hotkeys.registerHotkey( hotkey);
<ide> }
<ide><path>glances/outputs/static/public/js/main.min.js
<ide> glancesApp.component('glances', {
<ide>
<ide> 'use strict';
<ide>
<del>function GlancesController($interval, GlancesStats, REFRESH_TIME, Hotkeys) {
<add>function GlancesController($timeout, GlancesStats, REFRESH_TIME, Hotkeys) {
<ide> var vm = this;
<ide>
<ide> vm.dataLoaded = false;
<ide> vm.stats = {};
<add>
<add> var refreshDataSuccess = function (data) {
<add> data.isBsd = data.stats['system']['os_name'] === 'FreeBSD';
<add> data.isLinux = data.stats['system']['os_name'] === 'Linux';
<add> data.isMac = data.stats['system']['os_name'] === 'Darwin';
<add> data.isWindows = data.stats['system']['os_name'] === 'Windows';
<add>
<add> vm.stats = data;
<add> vm.is_disconnected = false;
<add> vm.dataLoaded = true;
<add>
<add> data = undefined;
<add> nextLoad();
<add> };
<add>
<add> var refreshDataError = function() {
<add> vm.is_disconnected = true;
<add> nextLoad();
<add> };
<add>
<ide> vm.refreshData = function () {
<del> GlancesStats.getData().then(function (data) {
<del>
<del> data.isBsd = data.stats['system']['os_name'] === 'FreeBSD';
<del> data.isLinux = data.stats['system']['os_name'] === 'Linux';
<del> data.isMac = data.stats['system']['os_name'] === 'Darwin';
<del> data.isWindows = data.stats['system']['os_name'] === 'Windows';
<del>
<del> vm.stats = data;
<del> vm.is_disconnected = false;
<del> vm.dataLoaded = true;
<del> }, function() {
<del> vm.is_disconnected = true;
<del> });
<add> GlancesStats.getData().then(refreshDataSuccess, refreshDataError);
<add> };
<add>
<add> var loadPromise;
<add> var cancelNextLoad = function() {
<add> $timeout.cancel(loadPromise);
<add> };
<add>
<add> var nextLoad = function() {
<add> cancelNextLoad();
<add> loadPromise = $timeout(vm.refreshData, REFRESH_TIME * 1000); // in milliseconds
<ide> };
<ide>
<ide> vm.refreshData();
<del> $interval(function () {
<del> vm.refreshData();
<del> }, REFRESH_TIME * 1000); // in milliseconds
<ide>
<ide> Hotkeys.registerHotkey(Hotkeys.createHotkey({
<ide> key: 'm',
<ide> callback: function () {
<ide> console.log('Sort processes by MEM%');
<ide> }
<ide> }));
<add>}
<add>
<add>'use strict';
<add>
<add>glancesApp.component('glancesHelp', {
<add> controller: GlancesHelpController,
<add> controllerAs: 'vm',
<add> bindings: {
<add> },
<add> templateUrl: 'components/help/view.html'
<add>});
<add>
<add>'use strict';
<ide>
<del> Hotkeys.registerHotkey( hotkey);
<add>function GlancesHelpController(GlancesStats) {
<add> var vm = this;
<add>
<add> GlancesStats.getHelp().then(function(help) {
<add> vm.help = help;
<add> });
<ide> }
<ide>
<ide> 'use strict';
<ide> function GlancesPluginAlertController(favicoService) {
<ide>
<ide> 'use strict';
<ide>
<del>glancesApp.component('glancesHelp', {
<del> controller: GlancesHelpController,
<add>glancesApp.component('glancesPluginCloud', {
<add> controller: GlancesPluginCloudController,
<ide> controllerAs: 'vm',
<ide> bindings: {
<add> stats: '<'
<ide> },
<del> templateUrl: 'components/help/view.html'
<add> templateUrl: 'components/plugin-cloud/view.html'
<ide> });
<ide>
<ide> 'use strict';
<ide>
<del>function GlancesHelpController(GlancesStats) {
<add>function GlancesPluginCloudController() {
<ide> var vm = this;
<ide>
<del> GlancesStats.getHelp().then(function(help) {
<del> vm.help = help;
<del> });
<add> vm.provider = null;
<add> vm.instance = null;
<add>
<add> vm.$onChanges = function (changes) {
<add> var stats = changes.stats.currentValue;
<add> if (stats === undefined || stats.stats === undefined) {
<add> return;
<add> }
<add>
<add> var data = stats.stats['cloud'];
<add>
<add> if (data['ami-id'] !== undefined) {
<add> vm.provider = 'AWS EC2';
<add> vm.instance = data['instance-type'] + ' instance ' + data['instance-id'] + ' (' + data['region'] + ')';
<add> }
<add>
<add> data = undefined;
<add> };
<ide> }
<ide>
<ide> 'use strict';
<ide> function GlancesPluginDiskioController($filter) {
<ide>
<ide> 'use strict';
<ide>
<del>glancesApp.component('glancesPluginCloud', {
<del> controller: GlancesPluginCloudController,
<del> controllerAs: 'vm',
<del> bindings: {
<del> stats: '<'
<del> },
<del> templateUrl: 'components/plugin-cloud/view.html'
<del>});
<del>
<del>'use strict';
<del>
<del>function GlancesPluginCloudController() {
<del> var vm = this;
<del>
<del> vm.provider = null;
<del> vm.instance = null;
<del>
<del> vm.$onChanges = function (changes) {
<del> var stats = changes.stats.currentValue;
<del> if (stats === undefined || stats.stats === undefined) {
<del> return;
<del> }
<del>
<del> var data = stats.stats['cloud'];
<del>
<del> if (data['ami-id'] !== undefined) {
<del> vm.provider = 'AWS EC2';
<del> vm.instance = data['instance-type'] + ' instance ' + data['instance-id'] + ' (' + data['region'] + ')';
<del> }
<del>
<del> data = undefined;
<del> };
<del>}
<del>
<del>'use strict';
<del>
<ide> glancesApp.component('glancesPluginFs', {
<ide> controller: GlancesPluginFsController,
<ide> controllerAs: 'vm',
<ide> function GlancesPluginMemMoreController() {
<ide>
<ide> 'use strict';
<ide>
<del>glancesApp.component('glancesPluginNetwork', {
<del> controller: GlancesPluginNetworkController,
<add>glancesApp.component('glancesPluginMemswap', {
<add> controller: GlancesPluginMemswapController,
<ide> controllerAs: 'vm',
<ide> bindings: {
<del> stats: '<',
<del> arguments: '<'
<add> stats: '<'
<ide> },
<del> templateUrl: 'components/plugin-network/view.html'
<add> templateUrl: 'components/plugin-memswap/view.html'
<ide> });
<ide>
<ide> 'use strict';
<ide>
<del>function GlancesPluginNetworkController($filter) {
<add>function GlancesPluginMemswapController() {
<ide> var vm = this;
<add> var _view = {};
<ide>
<del> vm.networks = [];
<add> vm.percent = null;
<add> vm.total = null;
<add> vm.used = null;
<add> vm.free = null;
<ide>
<ide> vm.$onChanges = function (changes) {
<ide> var stats = changes.stats.currentValue;
<ide> if (stats === undefined || stats.stats === undefined) {
<ide> return;
<ide> }
<ide>
<del> var data = stats.stats['network'];
<add> var data = stats.stats['memswap'];
<add> _view = stats.view['memswap'];
<ide>
<del> vm.networks = [];
<del> for (var i = 0; i < data.length; i++) {
<del> var networkData = data[i];
<add> vm.percent = data['percent'];
<add> vm.total = data['total'];
<add> vm.used = data['used'];
<add> vm.free = data['free'];
<ide>
<del> var network = {
<del> 'interfaceName': networkData['interface_name'],
<del> 'rx': networkData['rx'],
<del> 'tx': networkData['tx'],
<del> 'cx': networkData['cx'],
<del> 'time_since_update': networkData['time_since_update'],
<del> 'cumulativeRx': networkData['cumulative_rx'],
<del> 'cumulativeTx': networkData['cumulative_tx'],
<del> 'cumulativeCx': networkData['cumulative_cx']
<del> };
<add> data = undefined;
<add> };
<ide>
<del> vm.networks.push(network);
<add> this.getDecoration = function (value) {
<add> if (_view[value] === undefined) {
<add> return;
<ide> }
<ide>
<del> vm.networks = $filter('orderBy')(vm.networks, 'interfaceName');
<del>
<del> data = undefined;
<add> return _view[value].decoration.toLowerCase();
<ide> };
<ide> }
<ide>
<ide> 'use strict';
<ide>
<del>glancesApp.component('glancesPluginMemswap', {
<del> controller: GlancesPluginMemswapController,
<add>glancesApp.component('glancesPluginNetwork', {
<add> controller: GlancesPluginNetworkController,
<ide> controllerAs: 'vm',
<ide> bindings: {
<del> stats: '<'
<add> stats: '<',
<add> arguments: '<'
<ide> },
<del> templateUrl: 'components/plugin-memswap/view.html'
<add> templateUrl: 'components/plugin-network/view.html'
<ide> });
<ide>
<ide> 'use strict';
<ide>
<del>function GlancesPluginMemswapController() {
<add>function GlancesPluginNetworkController($filter) {
<ide> var vm = this;
<del> var _view = {};
<ide>
<del> vm.percent = null;
<del> vm.total = null;
<del> vm.used = null;
<del> vm.free = null;
<add> vm.networks = [];
<ide>
<ide> vm.$onChanges = function (changes) {
<ide> var stats = changes.stats.currentValue;
<ide> if (stats === undefined || stats.stats === undefined) {
<ide> return;
<ide> }
<ide>
<del> var data = stats.stats['memswap'];
<del> _view = stats.view['memswap'];
<add> var data = stats.stats['network'];
<ide>
<del> vm.percent = data['percent'];
<del> vm.total = data['total'];
<del> vm.used = data['used'];
<del> vm.free = data['free'];
<add> vm.networks = [];
<add> for (var i = 0; i < data.length; i++) {
<add> var networkData = data[i];
<ide>
<del> data = undefined;
<del> };
<add> var network = {
<add> 'interfaceName': networkData['interface_name'],
<add> 'rx': networkData['rx'],
<add> 'tx': networkData['tx'],
<add> 'cx': networkData['cx'],
<add> 'time_since_update': networkData['time_since_update'],
<add> 'cumulativeRx': networkData['cumulative_rx'],
<add> 'cumulativeTx': networkData['cumulative_tx'],
<add> 'cumulativeCx': networkData['cumulative_cx']
<add> };
<ide>
<del> this.getDecoration = function (value) {
<del> if (_view[value] === undefined) {
<del> return;
<add> vm.networks.push(network);
<ide> }
<ide>
<del> return _view[value].decoration.toLowerCase();
<add> vm.networks = $filter('orderBy')(vm.networks, 'interfaceName');
<add>
<add> data = undefined;
<ide> };
<ide> }
<ide>
<ide> function GlancesPluginPortsController() {
<ide>
<ide> 'use strict';
<ide>
<add>glancesApp.component('glancesPluginProcess', {
<add> controller: GlancesPluginProcessController,
<add> controllerAs: 'vm',
<add> bindings: {
<add> stats: '<',
<add> arguments: '<'
<add> },
<add> templateUrl: 'components/plugin-process/view.html'
<add>});
<add>
<add>'use strict';
<add>
<add>function GlancesPluginProcessController() {
<add> var vm = this;
<add>
<add> vm.sorter = {
<add> column: "cpu_percent",
<add> auto: true,
<add> isReverseColumn: function (column) {
<add> return !(column === 'username' || column === 'name');
<add> },
<add> getColumnLabel: function (column) {
<add> if (_.isEqual(column, ['io_read', 'io_write'])) {
<add> return 'io_counters';
<add> } else {
<add> return column;
<add> }
<add> }
<add> };
<add>}
<add>
<add>'use strict';
<add>
<ide> glancesApp.component('glancesPluginProcesscount', {
<ide> controller: GlancesPluginProcesscountController,
<ide> controllerAs: 'vm',
<ide> function GlancesPluginProcesscountController() {
<ide>
<ide> 'use strict';
<ide>
<del>glancesApp.component('glancesPluginProcess', {
<del> controller: GlancesPluginProcessController,
<del> controllerAs: 'vm',
<del> bindings: {
<del> stats: '<',
<del> arguments: '<'
<del> },
<del> templateUrl: 'components/plugin-process/view.html'
<del>});
<del>
<del>'use strict';
<del>
<del>function GlancesPluginProcessController() {
<del> var vm = this;
<del>
<del> vm.sorter = {
<del> column: "cpu_percent",
<del> auto: true,
<del> isReverseColumn: function (column) {
<del> return !(column === 'username' || column === 'name');
<del> },
<del> getColumnLabel: function (column) {
<del> if (_.isEqual(column, ['io_read', 'io_write'])) {
<del> return 'io_counters';
<del> } else {
<del> return column;
<del> }
<del> }
<del> };
<del>}
<del>
<del>'use strict';
<del>
<ide> glancesApp.component('glancesPluginProcesslist', {
<ide> controller: GlancesPluginProcesslistController,
<ide> controllerAs: 'vm',
<ide> function GlancesPluginQuicklookController() {
<ide>
<ide> 'use strict';
<ide>
<del>glancesApp.component('glancesPluginUptime', {
<del> controller: GlancesPluginUptimeController,
<del> controllerAs: 'vm',
<del> bindings: {
<del> stats: '<'
<del> },
<del> templateUrl: 'components/plugin-uptime/view.html'
<del>});
<del>
<del>'use strict';
<del>
<del>function GlancesPluginUptimeController() {
<del> var vm = this;
<del>
<del> vm.value = null;
<del>
<del> vm.$onChanges = function (changes) {
<del> var stats = changes.stats.currentValue;
<del> if (stats === undefined || stats.stats === undefined) {
<del> return;
<del> }
<del>
<del> vm.value = stats.stats['uptime'];
<del> };
<del>}
<del>
<del>'use strict';
<del>
<ide> glancesApp.component('glancesPluginSystem', {
<ide> controller: GlancesPluginSystemController,
<ide> controllerAs: 'vm',
<ide> function GlancesPluginSystemController() {
<ide>
<ide> 'use strict';
<ide>
<add>glancesApp.component('glancesPluginUptime', {
<add> controller: GlancesPluginUptimeController,
<add> controllerAs: 'vm',
<add> bindings: {
<add> stats: '<'
<add> },
<add> templateUrl: 'components/plugin-uptime/view.html'
<add>});
<add>
<add>'use strict';
<add>
<add>function GlancesPluginUptimeController() {
<add> var vm = this;
<add>
<add> vm.value = null;
<add>
<add> vm.$onChanges = function (changes) {
<add> var stats = changes.stats.currentValue;
<add> if (stats === undefined || stats.stats === undefined) {
<add> return;
<add> }
<add>
<add> vm.value = stats.stats['uptime'];
<add> };
<add>}
<add>
<add>'use strict';
<add>
<ide> glancesApp.component('glancesPluginWifi', {
<ide> controller: GlancesPluginWifiController,
<ide> controllerAs: 'vm',
<ide><path>glances/outputs/static/public/js/templates.min.js
<ide> angular.module('glancesApp').run(['$templateCache', function($templateCache) {$templateCache.put('components/glances/view.html','<div ng-keydown="vm.onKeyDown($event)" tabindex="0">\n <div ng-if="!vm.dataLoaded" class="container-fluid" id="loading-page">\n <div class="glances-logo"></div>\n <div class="loader">Loading...</div>\n </div>\n\n <glances-help ng-if="vm.arguments.help_tag"></glances-help>\n\n <div ng-if="vm.dataLoaded && !vm.arguments.help_tag" class="container-fluid">\n <div class="top-plugin">\n <div class="row">\n <div class="col-sm-24">\n <div class="pull-left">\n <glances-plugin-system stats="vm.stats"></glances-plugin-system>\n </div>\n <div class="pull-left">\n <glances-plugin-ip stats="vm.stats" arguments="vm.arguments"></glances-plugin-ip>\n </div>\n <div class="pull-right">\n <glances-plugin-uptime stats="vm.stats"></glances-plugin-uptime>\n </div>\n </div>\n <div class="row">\n <div class="col-sm-24">\n <div class="pull-left">\n <glances-plugin-cloud stats="vm.stats"></glances-plugin-cloud>\n </div>\n </div>\n </div>\n </div>\n </div>\n\n <div class="row">\n <div class="hidden-xs hidden-sm hidden-md col-lg-6" ng-if="!vm.arguments.disable_quicklook">\n <glances-plugin-quicklook stats="vm.stats" arguments="vm.arguments"></glances-plugin-quicklook>\n </div>\n <div class="col-sm-6 col-md-8 col-lg-6" ng-if="!vm.arguments.disable_cpu && !vm.arguments.percpu">\n <glances-plugin-cpu stats="vm.stats"></glances-plugin-cpu>\n </div>\n <div class="col-sm-12 col-md-8 col-lg-6" ng-if="!vm.arguments.disable_cpu && vm.arguments.percpu">\n <glances-plugin-percpu stats="vm.stats"></glances-plugin-percpu>\n </div>\n <div class="hidden-xs hidden-sm col-md-4 col-lg-3" ng-if="!vm.arguments.disable_gpu && statsGpu.gpus.length > 0">\n <glances-plugin-gpu stats="vm.stats"></glances-plugin-gpu>\n </div>\n <div class="col-sm-6 col-md-4 col-lg-3" ng-if="!vm.argumentsdisable_mem">\n <glances-plugin-mem stats="vm.stats"></glances-plugin-mem>\n </div>\n <div class="hidden-xs hidden-sm col-md-4 col-lg-3"\n ng-if="!vm.arguments.disable_mem && !(!vm.arguments.disable_gpu && statsGpu.gpus.length > 0)">\n <glances-plugin-mem-more stats="vm.stats"></glances-plugin-mem-more>\n </div>\n <div class="col-sm-6 col-md-4 col-lg-3" ng-if="!vm.arguments.disable_memswap">\n <glances-plugin-memswap stats="vm.stats"></glances-plugin-memswap>\n </div>\n <div class="col-sm-6 col-md-4 col-lg-3" ng-if="!vm.arguments.disable_load">\n <glances-plugin-load stats="vm.stats"></glances-plugin-load>\n </div>\n </div>\n <div class="row">\n <div class="col-sm-6 sidebar" ng-if="!vm.arguments.disable_left_sidebar">\n <div class="table">\n <glances-plugin-network id="plugin-network" class="plugin table-row-group" stats="vm.stats" arguments="vm.arguments" ng-if="!vm.arguments.disable_network"></glances-plugin-network>\n <glances-plugin-wifi id="plugin-wifi" class="plugin table-row-group" stats="vm.stats" ng-if="!vm.arguments.disable_wifi"></glances-plugin-wifi>\n <glances-plugin-ports id="plugin-ports" class="plugin table-row-group" stats="vm.stats" ng-if="!vm.arguments.disable_ports"></glances-plugin-ports>\n <glances-plugin-diskio id="plugin-diskio" class="plugin table-row-group" stats="vm.stats" arguments="vm.arguments" ng-if="!vm.arguments.disable_diskio"></glances-plugin-diskio>\n <glances-plugin-fs id="plugin-fs" class="plugin table-row-group" stats="vm.stats" arguments="vm.arguments" ng-if="!vm.arguments.disable_fs"></glances-plugin-fs>\n <glances-plugin-irq stats="vm.stats" arguments="vm.arguments" ng-if="vm.arguments.enable_irq"></glances-plugin-irq>\n <glances-plugin-folders stats="vm.stats" arguments="vm.arguments" ng-if="!vm.arguments.disable_fs"></glances-plugin-folders>\n <glances-plugin-raid stats="vm.stats" arguments="vm.arguments"></glances-plugin-raid>\n <glances-plugin-sensors stats="vm.stats" arguments="vm.arguments" ng-if="!vm.arguments.disable_sensors"></glances-plugin-sensors>\n </div>\n </div>\n <div class="col-sm-18">\n <glances-plugin-docker stats="vm.stats" arguments="vm.arguments" ng-if="!vm.arguments.disable_docker"></glances-plugin-docker>\n <glances-plugin-alert stats="vm.stats" ng-if="!vm.arguments.disable_alert"></glances-plugin-alert>\n <glances-plugin-process stats="vm.stats" arguments="vm.arguments"></glances-plugin-process>\n </div>\n </div>\n </div>\n </div>\n</div>\n');
<del>$templateCache.put('components/plugin-alert/view.html','<section id="alerts">\n <span class="title" ng-if="!vm.hasAlerts()">No warning or critical alert detected</span>\n <span class="title" ng-if="vm.hasAlerts()">Warning or critical alerts (lasts {{vm.count()}} entries)</span>\n</section>\n<section id="alert" class="plugin">\n <div class="table">\n <div class="table-row" ng-repeat="alert in vm.getAlerts()">\n <div class="table-cell text-left">\n {{alert.begin | date : \'yyyy-MM-dd H:mm:ss\'}} ({{ alert.ongoing ? \'ongoing\' : alert.duration }}) - <span ng-hide="alert.ongoing">{{alert.level}} on</span> <span class="{{ alert.level | lowercase }}">{{alert.name}}</span> ({{alert.max}})\n </div>\n </div>\n </div>\n</section>\n');
<ide> $templateCache.put('components/help/view.html',' <div class="container-fluid">\n <div class="row">\n <div class="col-sm-12 col-lg-24">{{vm.help.version}} {{vm.help.psutil_version}}</div>\n </div>\n <div class="row"> </div>\n <div class="row">\n <div class="col-sm-12 col-lg-24">{{vm.help.configuration_file}}</div>\n </div>\n <div class="row"> </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_auto}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_network}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_cpu}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_alert}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_mem}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.percpu}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_user}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_ip}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_proc}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.enable_disable_docker}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_io}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.view_network_io_combination}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.sort_cpu_times}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.view_cumulative_network}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_diskio}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_filesytem_freespace}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_filesystem}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_vm.help}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_network}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.diskio_iops}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_sensors}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_top_menu}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_left_sidebar}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_amp}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.enable_disable_process_stats}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.show_hide_irq}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.enable_disable_gpu}}</div>\n <div class="col-sm-12 col-lg-6">{{vm.help.enable_disable_mean_gpu}}</div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.enable_disable_quick_look}}</div>\n <div class="col-sm-12 col-lg-6"></div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.enable_disable_short_processname}}</div>\n <div class="col-sm-12 col-lg-6"></div>\n </div>\n <div class="row">\n <div class="col-sm-12 col-lg-6">{{vm.help.enable_disable_ports}}</div>\n <div class="col-sm-12 col-lg-6"></div>\n </div>\n\n </div>\n');
<add>$templateCache.put('components/plugin-alert/view.html','<section id="alerts">\n <span class="title" ng-if="!vm.hasAlerts()">No warning or critical alert detected</span>\n <span class="title" ng-if="vm.hasAlerts()">Warning or critical alerts (lasts {{vm.count()}} entries)</span>\n</section>\n<section id="alert" class="plugin">\n <div class="table">\n <div class="table-row" ng-repeat="alert in vm.getAlerts()">\n <div class="table-cell text-left">\n {{alert.begin | date : \'yyyy-MM-dd H:mm:ss\'}} ({{ alert.ongoing ? \'ongoing\' : alert.duration }}) - <span ng-hide="alert.ongoing">{{alert.level}} on</span> <span class="{{ alert.level | lowercase }}">{{alert.name}}</span> ({{alert.max}})\n </div>\n </div>\n </div>\n</section>\n');
<add>$templateCache.put('components/plugin-cloud/view.html','<section id="cloud">\n <span class="title">{{ vm.provider }}</span> {{ vm.instance }}\n</section>\n');
<ide> $templateCache.put('components/plugin-cpu/view.html','<section id="cpu" class="plugin">\n <div class="row">\n <div class="col-sm-24 col-md-12 col-lg-8">\n <div class="table">\n <div class="table-row">\n <div class="table-cell text-left title">CPU</div>\n <div class="table-cell">{{ vm.total }}%</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">user:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'user\')">\n {{ vm.user }}%\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">system:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'system\')">\n {{ vm.system }}%\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">idle:</div>\n <div class="table-cell">{{ vm.idle }}%</div>\n </div>\n </div>\n </div>\n <div class="hidden-xs hidden-sm col-md-12 col-lg-8">\n <div class="table">\n <div class="table-row">\n <div class="table-cell text-left">nice:</div>\n <div class="table-cell">\n {{ vm.nice }}%\n </div>\n </div>\n <div class="table-row" ng-show="vm.irq != undefined">\n <div class="table-cell text-left">irq:</div>\n <div class="table-cell">\n {{ vm.irq }}%\n </div>\n </div>\n <div class="table-row" ng-show="vm.iowait != undefined">\n <div class="table-cell text-left">iowait:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'iowait\')">\n {{ vm.iowait }}%\n </div>\n </div>\n <div class="table-row" ng-show="vm.steal != undefined">\n <div class="table-cell text-left">steal:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'steal\')">\n {{ vm.steal }}%\n </div>\n </div>\n </div>\n </div>\n <div class="hidden-xs hidden-sm hidden-md col-lg-8">\n <div class="table">\n <div class="table-row" ng-if="vm.ctx_switches">\n <div class="table-cell text-left">ctx_sw:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'ctx_switches\')">\n {{ vm.ctx_switches }}\n </div>\n </div>\n <div class="table-row" ng-if="vm.interrupts">\n <div class="table-cell text-left">inter:</div>\n <div class="table-cell">\n {{ vm.interrupts }}\n </div>\n </div>\n <div class="table-row" ng-if="vm.soft_interrupts">\n <div class="table-cell text-left">sw_int:</div>\n <div class="table-cell">\n {{ vm.soft_interrupts }}\n </div>\n </div>\n <div class="table-row" ng-if="!statsSystem.isLinux() && vm.syscalls">\n <div class="table-cell text-left">syscal:</div>\n <div class="table-cell">\n {{ vm.syscalls }}\n </div>\n </div>\n </div>\n </div>\n </div>\n</section>\n');
<ide> $templateCache.put('components/plugin-diskio/view.html','<div class="table-row" ng-show="vm.disks.length > 0">\n <div class="table-cell text-left title">DISK I/O</div>\n <div class="table-cell" ng-show="!vm.arguments.diskio_iops">R/s</div>\n <div class="table-cell" ng-show="!vm.arguments.diskio_iops">W/s</div>\n\n <div class="table-cell" ng-show="vm.arguments.diskio_iops">IOR/s</div>\n <div class="table-cell" ng-show="vm.arguments.diskio_iops">IOW/s</div>\n</div>\n<div class="table-row" ng-repeat="disk in vm.disks">\n <div class="table-cell text-left">{{(disk.alias ? disk.alias : disk.name) | min_size}}</div>\n <div class="table-cell" ng-show="!vm.arguments.diskio_iops">{{disk.bitrate.txps }}</div>\n <div class="table-cell" ng-show="!vm.arguments.diskio_iops">{{disk.bitrate.rxps }}</div>\n\n <div class="table-cell" ng-show="vm.arguments.diskio_iops">{{disk.count.txps }}</div>\n <div class="table-cell" ng-show="vm.arguments.diskio_iops">{{disk.count.rxps }}</div>\n</div>');
<del>$templateCache.put('components/plugin-cloud/view.html','<section id="cloud">\n <span class="title">{{ vm.provider }}</span> {{ vm.instance }}\n</section>\n');
<ide> $templateCache.put('components/plugin-fs/view.html','<div class="table-row">\n <div class="table-cell text-left title">FILE SYS</div>\n <div class="table-cell">\n <span ng-show="!vm.arguments.fs_free_space">Used</span>\n <span ng-show="vm.arguments.fs_free_space">Free</span>\n </div>\n <div class="table-cell">Total</div>\n</div>\n<div class="table-row" ng-repeat="fs in vm.fileSystems">\n <div class="table-cell text-left">{{ fs.shortMountPoint }} <span class="visible-lg-inline" ng-show="fs.name.length <= 20">({{ fs.name }})<span></div>\n <div class="table-cell" ng-class="vm.getDecoration(fs.mountPoint, \'used\')">\n <span ng-show="!vm.arguments.fs_free_space">{{ fs.used | bytes }}</span>\n <span ng-show="vm.arguments.fs_free_space">{{ fs.free | bytes }}</span>\n </div>\n <div class="table-cell">{{ fs.size | bytes }}</div>\n</div>');
<ide> $templateCache.put('components/plugin-ip/view.html','<section id="ip" ng-if="vm.address != undefined && !vm.arguments.disable_ip">\n - <span class="title">IP</span> <span>{{ vm.address }}/{{ vm.maskCidr }}</span> <span ng-if="vm.publicAddress" class="title">Pub</span> <span>{{ vm.publicAddress }}</span>\n</section>\n');
<ide> $templateCache.put('components/plugin-load/view.html','<section id="load" class="plugin" ng-if="vm.cpucore != undefined">\n <div class="table">\n <div class="table-row">\n <div class="table-cell text-left title">LOAD</div>\n <div class="table-cell">{{ vm.cpucore }}-core</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">1 min:</div>\n <div class="table-cell">\n {{ vm.min1 | number : 2}}\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">5 min:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'min5\')">\n {{ vm.min5 | number : 2}}\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">15 min:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'min15\')">\n {{ vm.min15 | number : 2}}\n </div>\n </div>\n </div>\n</section>\n');
<ide> $templateCache.put('components/plugin-mem/view.html','<section id="mem" class="plugin">\n <div class="table">\n <div class="table-row">\n <div class="table-cell text-left title">MEM</div>\n <div class="table-cell">{{ vm.percent }}%</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">total:</div>\n <div class="table-cell">{{ vm.total | bytes }}</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">used:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'used\')">\n {{ vm.used | bytes:2 }}\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">free:</div>\n <div class="table-cell">{{ vm.free | bytes }}</div>\n </div>\n </div>\n</section>\n');
<ide> $templateCache.put('components/plugin-mem-more/view.html','<section id="mem-more" class="plugin">\n <div class="table">\n <div class="table-row">\n <div class="table-cell text-left">active:</div>\n <div class="table-cell">{{ vm.active | bytes }}</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">inactive:</div>\n <div class="table-cell">{{ vm.inactive | bytes }}</div>\n </div>\n <div class="table-row" ng-show="vm.buffers != undefined">\n <div class="table-cell text-left">buffers:</div>\n <div class="table-cell">{{ vm.buffers | bytes }}</div>\n </div>\n <div class="table-row" ng-show="vm.cached != undefined">\n <div class="table-cell text-left">cached:</div>\n <div class="table-cell">{{ vm.cached | bytes }}</div>\n </div>\n </div>\n</section>\n');
<del>$templateCache.put('components/plugin-network/view.html','<div class="table-row">\n <div class="table-cell text-left title">NETWORK</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">Rx/s</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">Tx/s</div>\n\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum">Rx+Tx/s</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">Rx</div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">Tx</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum">Rx+Tx</div>\n</div>\n<div class="table-row" ng-repeat="network in vm.networks">\n <div class="table-cell text-left">{{ network.interfaceName | min_size }}</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.rx / network.time_since_update | bytes) : (network.rx / network.time_since_update | bits) }}</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.tx / network.time_since_update | bytes) : (network.tx / network.time_since_update | bits) }}</div>\n\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cx / network.time_since_update | bytes) : (network.cx / network.time_since_update | bits) }}</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cumulativeRx | bytes) : (network.cumulativeRx | bits) }}</div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cumulativeTx | bytes) : (network.cumulativeTx | bits) }}</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cumulativeCx | bytes) : (network.cumulativeCx | bits) }}</div>\n</div>');
<ide> $templateCache.put('components/plugin-memswap/view.html','<section id="memswap" class="plugin">\n <div class="table">\n <div class="table-row">\n <div class="table-cell text-left title">SWAP</div>\n <div class="table-cell">{{ vm.percent }}%</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">total:</div>\n <div class="table-cell">{{ vm.total | bytes }}</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">used:</div>\n <div class="table-cell" ng-class="vm.getDecoration(\'used\')">\n {{ vm.used | bytes }}\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">free:</div>\n <div class="table-cell">{{ vm.free | bytes }}</div>\n </div>\n </div>\n</section>\n');
<add>$templateCache.put('components/plugin-network/view.html','<div class="table-row">\n <div class="table-cell text-left title">NETWORK</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">Rx/s</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">Tx/s</div>\n\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum">Rx+Tx/s</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">Rx</div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">Tx</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum">Rx+Tx</div>\n</div>\n<div class="table-row" ng-repeat="network in vm.networks">\n <div class="table-cell text-left">{{ network.interfaceName | min_size }}</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.rx / network.time_since_update | bytes) : (network.rx / network.time_since_update | bits) }}</div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.tx / network.time_since_update | bytes) : (network.tx / network.time_since_update | bits) }}</div>\n\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="!vm.arguments.network_cumul && vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cx / network.time_since_update | bytes) : (network.cx / network.time_since_update | bits) }}</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cumulativeRx | bytes) : (network.cumulativeRx | bits) }}</div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && !vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cumulativeTx | bytes) : (network.cumulativeTx | bits) }}</div>\n\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum"></div>\n <div class="table-cell" ng-show="vm.arguments.network_cumul && vm.arguments.network_sum">{{ vm.arguments.byte ? (network.cumulativeCx | bytes) : (network.cumulativeCx | bits) }}</div>\n</div>');
<ide> $templateCache.put('components/plugin-percpu/view.html','<section id="percpu" class="plugin">\n <div class="table">\n <div class="table-row">\n <div class="table-cell text-left title">PER CPU</div>\n <div class="table-cell" ng-repeat="percpu in vm.cpus">{{ percpu.total }}%</div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">user:</div>\n <div class="table-cell" ng-repeat="percpu in vm.cpus" ng-class="vm.getUserAlert(percpu)">\n {{ percpu.user }}%\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">system:</div>\n <div class="table-cell" ng-repeat="percpu in vm.cpus" ng-class="vm.getSystemAlert(percpu)">\n {{ percpu.system }}%\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">idle:</div>\n <div class="table-cell" ng-repeat="percpu in vm.cpus">{{ percpu.idle }}%</div>\n </div>\n <div class="table-row" ng-show="vm.cpus[0].iowait">\n <div class="table-cell text-left">iowait:</div>\n <div class="table-cell" ng-repeat="percpu in vm.cpus" ng-class="vm.getSystemAlert(percpu)">\n {{ percpu.iowait }}%\n </div>\n </div>\n <div class="table-row" ng-show="vm.cpus[0].steal">\n <div class="table-cell text-left">steal:</div>\n <div class="table-cell" ng-repeat="percpu in vm.cpus" ng-class="vm.getSystemAlert(percpu)">\n {{ percpu.steal }}%\n </div>\n </div>\n </div>\n</section>\n');
<ide> $templateCache.put('components/plugin-ports/view.html','<div class="table-row" ng-repeat="port in vm.ports">\n <div class="table-cell text-left">{{(port.description ? port.description : port.host + \' \' + port.port) | min_size: 20}}</div>\n <div class="table-cell"></div>\n <div ng-switch="port.status" ng-class="vm.getDecoration(port)" class="table-cell">\n <span ng-switch-when="null">Scanning</span>\n <span ng-switch-when="false">Timeout</span>\n <span ng-switch-when="true">Open</span>\n <span ng-switch-default>{{port.status * 1000.0 | number:0}}ms</span>\n </div>\n</div>');
<del>$templateCache.put('components/plugin-processcount/view.html','<section id="processcount" class="plugin">\n <span class="title">TASKS</span>\n <span>{{ vm.total }} ({{ vm.thread }} thr),</span>\n <span>{{ vm.running }} run,</span>\n <span>{{ vm.sleeping }} slp,</span>\n <span>{{ vm.stopped }} oth</span>\n <span> sorted {{ vm.sorter.auto ? \'automatically\' : \'\' }} by {{ vm.sorter.getColumnLabel(vm.sorter.column) }}, flat view</span>\n</section>');
<ide> $templateCache.put('components/plugin-process/view.html','<div ng-show="!vm.arguments.disable_process">\n <glances-plugin-processcount stats="vm.stats" sorter="vm.sorter"></glances-plugin-processcount>\n <glances-plugin-amps stats="vm.stats" arguments="vm.arguments" ng-if="!vm.arguments.disable_amps"></glances-plugin-amps>\n <glances-plugin-processlist stats="vm.stats" arguments="vm.arguments" sorter="vm.sorter"></glances-plugin-processlist>\n</div>\n<div ng-show="vm.arguments.disable_process">PROCESSES DISABLED (press \'z\' to display)</div>');
<add>$templateCache.put('components/plugin-processcount/view.html','<section id="processcount" class="plugin">\n <span class="title">TASKS</span>\n <span>{{ vm.total }} ({{ vm.thread }} thr),</span>\n <span>{{ vm.running }} run,</span>\n <span>{{ vm.sleeping }} slp,</span>\n <span>{{ vm.stopped }} oth</span>\n <span> sorted {{ vm.sorter.auto ? \'automatically\' : \'\' }} by {{ vm.sorter.getColumnLabel(vm.sorter.column) }}, flat view</span>\n</section>');
<ide> $templateCache.put('components/plugin-processlist/view.html','<section id="processlist-plugin" class="plugin">\n <div class="table">\n <div class="table-row">\n <div sortable-th sorter="vm.sorter" column="cpu_percent" class="table-cell">CPU%</div>\n <div sortable-th sorter="vm.sorter" column="memory_percent" class="table-cell">MEM%</div>\n <div class="table-cell hidden-xs hidden-sm">VIRT</div>\n <div class="table-cell hidden-xs hidden-sm">RES</div>\n <div class="table-cell">PID</div>\n <div sortable-th sorter="vm.sorter" column="username" class="table-cell text-left">USER</div>\n <div class="table-cell">NI</div>\n <div class="table-cell">S</div>\n <div sortable-th sorter="vm.sorter" column="timemillis" class="table-cell hidden-xs hidden-sm">TIME+</div>\n <div sortable-th sorter="vm.sorter" column="io_read" class="table-cell hidden-xs hidden-sm" ng-show="vm.ioReadWritePresent">IOR/s</div>\n <div sortable-th sorter="vm.sorter" column="io_write" class="table-cell hidden-xs hidden-sm" ng-show="vm.ioReadWritePresent">IOW/s</div>\n <div sortable-th sorter="vm.sorter" column="name" class="table-cell text-left">Command</div>\n </div>\n <div class="table-row" ng-repeat="process in vm.processes | orderBy:vm.sorter.column:vm.sorter.isReverseColumn(vm.sorter.column) | limitTo: vm.getLimit()">\n <div class="table-cell" ng-class="vm.getCpuPercentAlert(process)">{{process.cpu_percent | number:1}}</div>\n <div class="table-cell" ng-class="vm.getMemoryPercentAlert(process)">{{process.memory_percent | number:1}}</div>\n <div class="table-cell hidden-xs hidden-sm">{{process.memvirt | bytes}}</div>\n <div class="table-cell hidden-xs hidden-sm">{{process.memres | bytes}}</div>\n <div class="table-cell">{{process.pid}}</div>\n <div class="table-cell text-left">{{process.username}}</div>\n <div class="table-cell" ng-class="{nice: process.isNice}">{{process.nice | exclamation}}</div>\n <div class="table-cell" ng-class="{status: process.status == \'R\'}">{{process.status}}</div>\n <div class="table-cell hidden-xs hidden-sm">\n <span ng-show="process.timeplus.hours > 0" class="highlight">{{ process.timeplus.hours }}h</span>{{ process.timeplus.minutes | leftPad:2:\'0\' }}:{{ process.timeplus.seconds | leftPad:2:\'0\' }}<span ng-show="process.timeplus.hours <= 0">.{{ process.timeplus.milliseconds | leftPad:2:\'0\' }}</span>\n </div>\n <div class="table-cell hidden-xs hidden-sm" ng-show="vm.ioReadWritePresent">{{process.ioRead}}</div>\n <div class="table-cell hidden-xs hidden-sm" ng-show="vm.ioReadWritePresent">{{process.ioWrite}}</div>\n <div class="table-cell text-left" ng-show="vm.arguments.process_short_name">{{process.name}}</div>\n <div class="table-cell text-left" ng-show="!vm.arguments.process_short_name">{{process.cmdline}}</div>\n </div>\n </div>\n</section>\n');
<ide> $templateCache.put('components/plugin-quicklook/view.html','<section id="quicklook-plugin" class="plugin">\n <div class="cpu-name">\n {{ vm.cpu_name }}\n </div>\n <div class="table">\n <div class="table-row" ng-show="!vm.arguments.percpu">\n <div class="table-cell text-left">CPU</div>\n <div class="table-cell">\n <div class="progress">\n <div class="progress-bar progress-bar-{{ vm.getDecoration(\'cpu\') }}" role="progressbar" aria-valuenow="{{ vm.cpu }}" aria-valuemin="0" aria-valuemax="100" style="width: {{ vm.cpu }}%;">\n \n </div>\n </div>\n </div>\n <div class="table-cell">\n {{ vm.cpu }}%\n </div>\n </div>\n <div class="table-row" ng-show="vm.arguments.percpu" ng-repeat="percpu in vm.percpus">\n <div class="table-cell text-left">CPU{{ percpu.number }}</div>\n <div class="table-cell">\n <div class="progress">\n <div class="progress-bar progress-bar-{{ vm.getDecoration(\'cpu\') }}" role="progressbar" aria-valuenow="{{ percpu.total }}" aria-valuemin="0" aria-valuemax="100" style="width: {{ percpu.total }}%;">\n \n </div>\n </div>\n </div>\n <div class="table-cell">\n {{ percpu.total }}%\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">MEM</div>\n <div class="table-cell">\n <div class="progress">\n <div class="progress-bar progress-bar-{{ vm.getDecoration(\'mem\') }}" role="progressbar" aria-valuenow="{{ vm.mem }}" aria-valuemin="0" aria-valuemax="100" style="width: {{ vm.mem }}%;">\n \n </div>\n </div>\n </div>\n <div class="table-cell">\n {{ vm.mem }}%\n </div>\n </div>\n <div class="table-row">\n <div class="table-cell text-left">SWAP</div>\n <div class="table-cell">\n <div class="progress">\n <div class="progress-bar progress-bar-{{ vm.getDecoration(\'swap\') }}" role="progressbar" aria-valuenow="{{ vm.swap }}" aria-valuemin="0" aria-valuemax="100" style="width: {{ vm.swap }}%;">\n \n </div>\n </div>\n </div>\n <div class="table-cell">\n {{ vm.swap }}%\n </div>\n </div>\n </div>\n</section>\n');
<del>$templateCache.put('components/plugin-uptime/view.html','<section id="uptime">\n <span>Uptime: {{ vm.value }}</span>\n</section>\n');
<ide> $templateCache.put('components/plugin-system/view.html','<section id="system">\n <span ng-if="vm.isDisconnected" class="critical">Disconnected from</span>\n <span class="title">{{ vm.hostname }}</span>\n <span ng-show="vm.stats.isLinux" class="hidden-xs hidden-sm">({{ vm.humanReadableName }} / {{ vm.os.name }} {{ vm.os.version }})</span>\n <span ng-show="!vm.stats.isLinux" class="hidden-xs hidden-sm">({{ vm.os.name }} {{ vm.os.version }} {{ vm.platform }})</span>\n</section>\n');
<add>$templateCache.put('components/plugin-uptime/view.html','<section id="uptime">\n <span>Uptime: {{ vm.value }}</span>\n</section>\n');
<ide> $templateCache.put('components/plugin-wifi/view.html','<section id="wifi" class="plugin table-row-group" ng-if="vm.hotspots.length > 0">\n <div class="table-row">\n <div class="table-cell text-left title">WIFI</div>\n <div class="table-cell"></div>\n <div class="table-cell">dBm</div>\n </div>\n <div class="table-row" ng-repeat="hotspot in vm.hotspots">\n <div class="table-cell text-left">{{ hotspot.ssid|limitTo:20 }} <span ng-if="hotspot.encrypted">{{ hotspot.encryption_type }}</span></div>\n <div class="table-cell"></div>\n <div class="table-cell" ng-class="vm.getDecoration(hotspot, \'signal\')">{{ hotspot.signal }}</div>\n </div>\n</section>');}]);
<ide>\ No newline at end of file
<ide><path>glances/outputs/static/public/js/vendor.min.js
<ide> function $BrowserProvider() {
<ide> <input ng-model="newCacheValue" placeholder="Value">
<ide> <button ng-click="put(newCacheKey, newCacheValue)">Cache</button>
<ide>
<del> <p ng-if="keys.length">Cached Values</p>
<add> <p ng-show="keys.length">Cached Values</p>
<ide> <div ng-repeat="key in keys">
<ide> <span ng-bind="key"></span>
<ide> <span>: </span>
<ide> forEach(
<ide> * element is added to the DOM tree.
<ide> *
<ide> * @example
<del> <example module="ngAnimate" deps="angular-animate.js" animations="true" name="ng-if">
<add> <example module="ngAnimate" deps="angular-animate.js" animations="true" name="ng-show">
<ide> <file name="index.html">
<ide> <label>Click me: <input type="checkbox" ng-model="checked" ng-init="checked=true" /></label><br/>
<ide> Show when checked:
<del> <span ng-if="checked" class="animate-if">
<add> <span ng-show="checked" class="animate-if">
<ide> This is removed when the checkbox is unchecked.
<ide> </span>
<ide> </file>
<ide> var ngPluralizeDirective = ['$locale', '$interpolate', '$log', function($locale,
<ide> <li class="animate-repeat" ng-repeat="friend in friends | filter:q as results">
<ide> [{{$index + 1}}] {{friend.name}} who is {{friend.age}} years old.
<ide> </li>
<del> <li class="animate-repeat" ng-if="results.length === 0">
<add> <li class="animate-repeat" ng-show="results.length === 0">
<ide> <strong>No results found...</strong>
<ide> </li>
<ide> </ul> | 4 |
Python | Python | simplify string expressions | 41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9 | <ide><path>airflow/contrib/operators/gcs_to_gdrive_operator.py
<ide> from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator # noqa
<ide>
<ide> warnings.warn(
<del> "This module is deprecated. " "Please use `airflow.providers.google.suite.transfers.gcs_to_gdrive.",
<add> "This module is deprecated. Please use `airflow.providers.google.suite.transfers.gcs_to_gdrive.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide><path>airflow/executors/kubernetes_executor.py
<ide> def run(self) -> None:
<ide> )
<ide> except ReadTimeoutError:
<ide> self.log.warning(
<del> "There was a timeout error accessing the Kube API. " "Retrying request.", exc_info=True
<add> "There was a timeout error accessing the Kube API. Retrying request.", exc_info=True
<ide> )
<ide> time.sleep(1)
<ide> except Exception:
<ide> def adopt_launched_task(self, kube_client, pod, pod_ids: dict):
<ide> pod_id = create_pod_id(dag_id=dag_id, task_id=task_id)
<ide> if pod_id not in pod_ids:
<ide> self.log.error(
<del> "attempting to adopt task %s in dag %s" " which was not specified by database",
<add> "attempting to adopt task %s in dag %s which was not specified by database",
<ide> task_id,
<ide> dag_id,
<ide> )
<ide><path>airflow/jobs/scheduler_job.py
<ide> def manage_slas(self, dag: DAG, session: Session = None) -> None:
<ide> notification_sent = True
<ide> except Exception: # pylint: disable=broad-except
<ide> Stats.incr('sla_email_notification_failure')
<del> self.log.exception("Could not send SLA Miss email notification for" " DAG %s", dag.dag_id)
<add> self.log.exception("Could not send SLA Miss email notification for DAG %s", dag.dag_id)
<ide> # If we sent any notification, update the sla_miss table
<ide> if notification_sent:
<ide> for sla in slas:
<ide><path>airflow/kubernetes/pod_generator.py
<ide> def __init__( # pylint: disable=too-many-arguments,too-many-locals
<ide> ):
<ide> if not pod_template_file and not pod:
<ide> raise AirflowConfigException(
<del> "Podgenerator requires either a " "`pod` or a `pod_template_file` argument"
<add> "Podgenerator requires either a `pod` or a `pod_template_file` argument"
<ide> )
<ide> if pod_template_file and pod:
<del> raise AirflowConfigException("Cannot pass both `pod` " "and `pod_template_file` arguments")
<add> raise AirflowConfigException("Cannot pass both `pod` and `pod_template_file` arguments")
<ide>
<ide> if pod_template_file:
<ide> self.ud_pod = self.deserialize_model_file(pod_template_file)
<ide> def from_obj(obj) -> Optional[Union[dict, k8s.V1Pod]]:
<ide> return PodGenerator.from_legacy_obj(obj)
<ide> else:
<ide> raise TypeError(
<del> 'Cannot convert a non-kubernetes.client.models.V1Pod' 'object into a KubernetesExecutorConfig'
<add> 'Cannot convert a non-kubernetes.client.models.V1Pod object into a KubernetesExecutorConfig'
<ide> )
<ide>
<ide> @staticmethod
<ide><path>airflow/kubernetes/pod_launcher.py
<ide> def run_pod_async(self, pod: V1Pod, **kwargs):
<ide> )
<ide> self.log.debug('Pod Creation Response: %s', resp)
<ide> except Exception as e:
<del> self.log.exception('Exception when attempting ' 'to create Namespaced Pod: %s', json_pod)
<add> self.log.exception('Exception when attempting to create Namespaced Pod: %s', json_pod)
<ide> raise e
<ide> return resp
<ide>
<ide><path>airflow/models/connection.py
<ide> def log_info(self):
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<del> return "id: {}. Host: {}, Port: {}, Schema: {}, " "Login: {}, Password: {}, extra: {}".format(
<add> return "id: {}. Host: {}, Port: {}, Schema: {}, Login: {}, Password: {}, extra: {}".format(
<ide> self.conn_id,
<ide> self.host,
<ide> self.port,
<ide> def debug_info(self):
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<del> return "id: {}. Host: {}, Port: {}, Schema: {}, " "Login: {}, Password: {}, extra: {}".format(
<add> return "id: {}. Host: {}, Port: {}, Schema: {}, Login: {}, Password: {}, extra: {}".format(
<ide> self.conn_id,
<ide> self.host,
<ide> self.port,
<ide><path>airflow/models/dag.py
<ide> def clear(
<ide> if confirm_prompt:
<ide> ti_list = "\n".join([str(t) for t in tis])
<ide> question = (
<del> "You are about to delete these {count} tasks:\n" "{ti_list}\n\n" "Are you sure? (yes/no): "
<add> "You are about to delete these {count} tasks:\n{ti_list}\n\nAre you sure? (yes/no): "
<ide> ).format(count=count, ti_list=ti_list)
<ide> do_it = utils.helpers.ask_yesno(question)
<ide>
<ide> def clear_dags(
<ide> return 0
<ide> if confirm_prompt:
<ide> ti_list = "\n".join([str(t) for t in all_tis])
<del> question = (
<del> "You are about to delete these {} tasks:\n" "{}\n\n" "Are you sure? (yes/no): "
<del> ).format(count, ti_list)
<add> question = f"You are about to delete these {count} tasks:\n{ti_list}\n\nAre you sure? (yes/no): "
<ide> do_it = utils.helpers.ask_yesno(question)
<ide>
<ide> if do_it:
<ide><path>airflow/models/dagbag.py
<ide> def size(self) -> int:
<ide> def store_serialized_dags(self) -> bool:
<ide> """Whether or not to read dags from DB"""
<ide> warnings.warn(
<del> "The store_serialized_dags property has been deprecated. " "Use read_dags_from_db instead.",
<add> "The store_serialized_dags property has been deprecated. Use read_dags_from_db instead.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide><path>airflow/models/dagrun.py
<ide> def verify_integrity(self, session: Session = None):
<ide> if ti.state == State.REMOVED:
<ide> pass # ti has already been removed, just ignore it
<ide> elif self.state is not State.RUNNING and not dag.partial:
<del> self.log.warning(
<del> "Failed to get task '%s' for dag '%s'. " "Marking it as removed.", ti, dag
<del> )
<add> self.log.warning("Failed to get task '%s' for dag '%s'. Marking it as removed.", ti, dag)
<ide> Stats.incr(f"task_removed_from_dag.{dag.dag_id}", 1, 1)
<ide> ti.state = State.REMOVED
<ide>
<ide> should_restore_task = (task is not None) and ti.state == State.REMOVED
<ide> if should_restore_task:
<del> self.log.info("Restoring task '%s' which was previously " "removed from DAG '%s'", ti, dag)
<add> self.log.info("Restoring task '%s' which was previously removed from DAG '%s'", ti, dag)
<ide> Stats.incr(f"task_restored_to_dag.{dag.dag_id}", 1, 1)
<ide> ti.state = State.NONE
<ide> session.merge(ti)
<ide><path>airflow/models/serialized_dag.py
<ide> def read_all_dags(cls, session: Session = None) -> Dict[str, 'SerializedDAG']:
<ide> dags[row.dag_id] = dag
<ide> else:
<ide> log.warning(
<del> "dag_id Mismatch in DB: Row with dag_id '%s' has Serialised DAG " "with '%s' dag_id",
<add> "dag_id Mismatch in DB: Row with dag_id '%s' has Serialised DAG with '%s' dag_id",
<ide> row.dag_id,
<ide> dag.dag_id,
<ide> )
<ide> def remove_deleted_dags(cls, alive_dag_filelocs: List[str], session=None):
<ide> alive_fileloc_hashes = [DagCode.dag_fileloc_hash(fileloc) for fileloc in alive_dag_filelocs]
<ide>
<ide> log.debug(
<del> "Deleting Serialized DAGs (for which DAG files are deleted) " "from %s table ", cls.__tablename__
<add> "Deleting Serialized DAGs (for which DAG files are deleted) from %s table ", cls.__tablename__
<ide> )
<ide>
<ide> # pylint: disable=no-member
<ide><path>airflow/models/taskinstance.py
<ide> def __init__(self, task, execution_date: datetime, state: Optional[str] = None):
<ide> # make sure we have a localized execution_date stored in UTC
<ide> if execution_date and not timezone.is_localized(execution_date):
<ide> self.log.warning(
<del> "execution date %s has no timezone information. Using " "default from dag or system",
<add> "execution date %s has no timezone information. Using default from dag or system",
<ide> execution_date,
<ide> )
<ide> if self.task.has_dag():
<ide> def log_url(self):
<ide> """Log URL for TaskInstance"""
<ide> iso = quote(self.execution_date.isoformat())
<ide> base_url = conf.get('webserver', 'BASE_URL')
<del> return base_url + ( # noqa
<del> "/log?" "execution_date={iso}" "&task_id={task_id}" "&dag_id={dag_id}"
<del> ).format(iso=iso, task_id=self.task_id, dag_id=self.dag_id)
<add> return base_url + f"/log?execution_date={iso}&task_id={self.task_id}&dag_id={self.dag_id}"
<ide>
<ide> @property
<ide> def mark_success_url(self):
<ide> """URL to mark TI success"""
<ide> iso = quote(self.execution_date.isoformat())
<ide> base_url = conf.get('webserver', 'BASE_URL')
<del> return base_url + ( # noqa
<add> return base_url + (
<ide> "/success"
<del> "?task_id={task_id}"
<del> "&dag_id={dag_id}"
<del> "&execution_date={iso}"
<add> f"?task_id={self.task_id}"
<add> f"&dag_id={self.dag_id}"
<add> f"&execution_date={iso}"
<ide> "&upstream=false"
<ide> "&downstream=false"
<del> ).format(task_id=self.task_id, dag_id=self.dag_id, iso=iso)
<add> )
<ide>
<ide> @provide_session
<ide> def current_state(self, session=None) -> str:
<ide> def get_failed_dep_statuses(self, dep_context=None, session=None):
<ide> yield dep_status
<ide>
<ide> def __repr__(self):
<del> return ( # noqa
<del> "<TaskInstance: {ti.dag_id}.{ti.task_id} " "{ti.execution_date} [{ti.state}]>"
<del> ).format(ti=self)
<add> return f"<TaskInstance: {self.dag_id}.{self.task_id} {self.execution_date} [{self.state}]>"
<ide>
<ide> def next_retry_datetime(self):
<ide> """
<ide> def signal_handler(signum, frame): # pylint: disable=unused-argument
<ide> registered = task_copy.register_in_sensor_service(self, context)
<ide> except Exception as e:
<ide> self.log.warning(
<del> "Failed to register in sensor service." "Continue to run task in non smart sensor mode."
<add> "Failed to register in sensor service.Continue to run task in non smart sensor mode."
<ide> )
<ide> self.log.exception(e, exc_info=True)
<ide>
<ide><path>airflow/operators/google_api_to_s3_transfer.py
<ide> from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
<ide>
<ide> warnings.warn(
<del> "This module is deprecated. " "Please use `airflow.providers.amazon.aws.transfers.google_api_to_s3`.",
<add> "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.google_api_to_s3`.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide><path>airflow/providers/amazon/aws/hooks/base_aws.py
<ide> def _fetch_saml_assertion_using_http_spegno_auth(self, saml_config: Dict[str, An
<ide> log_idp_response = 'log_idp_response' in saml_config and saml_config['log_idp_response']
<ide> if log_idp_response:
<ide> self.log.warning(
<del> 'The IDP response contains sensitive information,' ' but log_idp_response is ON (%s).',
<add> 'The IDP response contains sensitive information, but log_idp_response is ON (%s).',
<ide> log_idp_response,
<ide> )
<ide> self.log.info('idp_response.content= %s', idp_response.content)
<ide><path>airflow/providers/amazon/aws/hooks/glue.py
<ide> def get_or_create_glue_job(self) -> str:
<ide> except glue_client.exceptions.EntityNotFoundException:
<ide> self.log.info("Job doesnt exist. Now creating and running AWS Glue Job")
<ide> if self.s3_bucket is None:
<del> raise AirflowException(
<del> 'Could not initialize glue job, ' 'error: Specify Parameter `s3_bucket`'
<del> )
<add> raise AirflowException('Could not initialize glue job, error: Specify Parameter `s3_bucket`')
<ide> s3_log_path = f's3://{self.s3_bucket}/{self.s3_glue_logs}{self.job_name}'
<ide> execution_role = self.get_iam_execution_role()
<ide> try:
<ide><path>airflow/providers/amazon/aws/hooks/sagemaker.py
<ide> def check_status(
<ide> try:
<ide> response = describe_function(job_name)
<ide> status = response[key]
<del> self.log.info('Job still running for %s seconds... ' 'current status is %s', sec, status)
<add> self.log.info('Job still running for %s seconds... current status is %s', sec, status)
<ide> except KeyError:
<ide> raise AirflowException('Could not get status of the SageMaker job')
<ide> except ClientError:
<ide><path>airflow/providers/amazon/aws/operators/datasync.py
<ide> def _create_datasync_task(self) -> None:
<ide> )
<ide> if not self.source_location_arn:
<ide> raise AirflowException(
<del> "Unable to determine source LocationArn." " Does a suitable DataSync Location exist?"
<add> "Unable to determine source LocationArn. Does a suitable DataSync Location exist?"
<ide> )
<ide>
<ide> self.destination_location_arn = self.choose_location(self.candidate_destination_location_arns)
<ide> def _create_datasync_task(self) -> None:
<ide> )
<ide> if not self.destination_location_arn:
<ide> raise AirflowException(
<del> "Unable to determine destination LocationArn." " Does a suitable DataSync Location exist?"
<add> "Unable to determine destination LocationArn. Does a suitable DataSync Location exist?"
<ide> )
<ide>
<ide> self.log.info("Creating a Task.")
<ide><path>airflow/providers/amazon/aws/sensors/glue.py
<ide> def __init__(self, *, job_name: str, run_id: str, aws_conn_id: str = 'aws_defaul
<ide>
<ide> def poke(self, context):
<ide> hook = AwsGlueJobHook(aws_conn_id=self.aws_conn_id)
<del> self.log.info("Poking for job run status :" "for Glue Job %s and ID %s", self.job_name, self.run_id)
<add> self.log.info("Poking for job run status :for Glue Job %s and ID %s", self.job_name, self.run_id)
<ide> job_state = hook.get_job_state(job_name=self.job_name, run_id=self.run_id)
<ide> if job_state in self.success_states:
<ide> self.log.info("Exiting Job %s Run State: %s", self.run_id, job_state)
<ide><path>airflow/providers/apache/druid/hooks/druid.py
<ide> def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None:
<ide> elif status == 'SUCCESS':
<ide> running = False # Great success!
<ide> elif status == 'FAILED':
<del> raise AirflowException('Druid indexing job failed, ' 'check console for more info')
<add> raise AirflowException('Druid indexing job failed, check console for more info')
<ide> else:
<ide> raise AirflowException(f'Could not get status of the job, got {status}')
<ide>
<ide><path>airflow/providers/apache/hdfs/hooks/hdfs.py
<ide> def get_conn(self) -> Any:
<ide> hdfs_namenode_principal=hdfs_namenode_principal,
<ide> )
<ide> else:
<del> raise HDFSHookException(
<del> "conn_id doesn't exist in the repository " "and autoconfig is not specified"
<del> )
<add> raise HDFSHookException("conn_id doesn't exist in the repository and autoconfig is not specified")
<ide>
<ide> return client
<ide><path>airflow/providers/apache/hive/hooks/hive.py
<ide> def _get_max_partition_from_part_specs(
<ide>
<ide> # Assuming all specs have the same keys.
<ide> if partition_key not in part_specs[0].keys():
<del> raise AirflowException("Provided partition_key {} " "is not in part_specs.".format(partition_key))
<add> raise AirflowException(f"Provided partition_key {partition_key} is not in part_specs.")
<ide> is_subset = None
<ide> if filter_map:
<ide> is_subset = set(filter_map.keys()).issubset(set(part_specs[0].keys()))
<ide> def max_partition(
<ide> if len(table.partitionKeys) == 1:
<ide> field = table.partitionKeys[0].name
<ide> elif not field:
<del> raise AirflowException("Please specify the field you want the max " "value for.")
<add> raise AirflowException("Please specify the field you want the max value for.")
<ide> elif field not in key_name_set:
<ide> raise AirflowException("Provided field is not a partition key.")
<ide>
<ide> if filter_map and not set(filter_map.keys()).issubset(key_name_set):
<del> raise AirflowException("Provided filter_map contains keys " "that are not partition key.")
<add> raise AirflowException("Provided filter_map contains keys that are not partition key.")
<ide>
<ide> part_names = client.get_partition_names(
<ide> schema, table_name, max_parts=HiveMetastoreHook.MAX_PART_COUNT
<ide> def get_conn(self, schema: Optional[str] = None) -> Any:
<ide> # pyhive uses GSSAPI instead of KERBEROS as a auth_mechanism identifier
<ide> if auth_mechanism == 'GSSAPI':
<ide> self.log.warning(
<del> "Detected deprecated 'GSSAPI' for authMechanism " "for %s. Please use 'KERBEROS' instead",
<add> "Detected deprecated 'GSSAPI' for authMechanism for %s. Please use 'KERBEROS' instead",
<ide> self.hiveserver2_conn_id, # type: ignore
<ide> )
<ide> auth_mechanism = 'KERBEROS'
<ide><path>airflow/providers/apache/pinot/hooks/pinot.py
<ide> def get_conn(self) -> Any:
<ide> path=conn.extra_dejson.get('endpoint', '/pql'),
<ide> scheme=conn.extra_dejson.get('schema', 'http'),
<ide> )
<del> self.log.info('Get the connection to pinot ' 'broker on %s', conn.host)
<add> self.log.info('Get the connection to pinot broker on %s', conn.host)
<ide> return pinot_broker_conn
<ide>
<ide> def get_uri(self) -> str:
<ide><path>airflow/providers/apache/sqoop/hooks/sqoop.py
<ide> def _get_export_format_argument(file_type: str = 'text') -> List[str]:
<ide> elif file_type == "text":
<ide> return ["--as-textfile"]
<ide> else:
<del> raise AirflowException("Argument file_type should be 'avro', " "'sequence', 'parquet' or 'text'.")
<add> raise AirflowException("Argument file_type should be 'avro', 'sequence', 'parquet' or 'text'.")
<ide>
<ide> def _import_cmd(
<ide> self,
<ide><path>airflow/providers/datadog/hooks/datadog.py
<ide> def __init__(self, datadog_conn_id: str = 'datadog_default') -> None:
<ide> self.host = conn.host
<ide>
<ide> if self.api_key is None:
<del> raise AirflowException("api_key must be specified in the " "Datadog connection details")
<add> raise AirflowException("api_key must be specified in the Datadog connection details")
<ide>
<ide> self.log.info("Setting up api keys for Datadog")
<ide> initialize(api_key=self.api_key, app_key=self.app_key)
<ide><path>airflow/providers/dingding/hooks/dingding.py
<ide> def _get_endpoint(self) -> str:
<ide> token = conn.password
<ide> if not token:
<ide> raise AirflowException(
<del> 'Dingding token is requests but get nothing, ' 'check you conn_id configuration.'
<add> 'Dingding token is requests but get nothing, check you conn_id configuration.'
<ide> )
<ide> return f'robot/send?access_token={token}'
<ide>
<ide><path>airflow/providers/discord/hooks/discord_webhook.py
<ide> def _get_webhook_endpoint(self, http_conn_id: Optional[str], webhook_endpoint: O
<ide> endpoint = extra.get('webhook_endpoint', '')
<ide> else:
<ide> raise AirflowException(
<del> 'Cannot get webhook endpoint: No valid Discord ' 'webhook endpoint or http_conn_id supplied.'
<add> 'Cannot get webhook endpoint: No valid Discord webhook endpoint or http_conn_id supplied.'
<ide> )
<ide>
<ide> # make sure endpoint matches the expected Discord webhook format
<ide> if not re.match('^webhooks/[0-9]+/[a-zA-Z0-9_-]+$', endpoint):
<ide> raise AirflowException(
<del> 'Expected Discord webhook endpoint in the form ' 'of "webhooks/{webhook.id}/{webhook.token}".'
<add> 'Expected Discord webhook endpoint in the form of "webhooks/{webhook.id}/{webhook.token}".'
<ide> )
<ide>
<ide> return endpoint
<ide> def _build_discord_payload(self) -> str:
<ide> if len(self.message) <= 2000:
<ide> payload['content'] = self.message
<ide> else:
<del> raise AirflowException('Discord message length must be 2000 or fewer ' 'characters.')
<add> raise AirflowException('Discord message length must be 2000 or fewer characters.')
<ide>
<ide> return json.dumps(payload)
<ide>
<ide><path>airflow/providers/ftp/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-ftp setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-ftp',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-ftp for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-ftp for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/google/cloud/operators/bigtable.py
<ide> def execute(self, context) -> None:
<ide> # Based on Instance.__eq__ instance with the same ID and client is
<ide> # considered as equal.
<ide> self.log.info(
<del> "The instance '%s' already exists in this project. " "Consider it as created",
<add> "The instance '%s' already exists in this project. Consider it as created",
<ide> self.instance_id,
<ide> )
<ide> return
<ide> def execute(self, context) -> None:
<ide> hook.delete_instance(project_id=self.project_id, instance_id=self.instance_id)
<ide> except google.api_core.exceptions.NotFound:
<ide> self.log.info(
<del> "The instance '%s' does not exist in project '%s'. " "Consider it as deleted",
<add> "The instance '%s' does not exist in project '%s'. Consider it as deleted",
<ide> self.instance_id,
<ide> self.project_id,
<ide> )
<ide><path>airflow/providers/google/cloud/operators/cloud_sql.py
<ide> def execute(self, context) -> None:
<ide> if not self._check_if_instance_exists(self.instance, hook):
<ide> hook.create_instance(project_id=self.project_id, body=self.body)
<ide> else:
<del> self.log.info("Cloud SQL instance with ID %s already exists. " "Aborting create.", self.instance)
<add> self.log.info("Cloud SQL instance with ID %s already exists. Aborting create.", self.instance)
<ide>
<ide> instance_resource = hook.get_instance(project_id=self.project_id, instance=self.instance)
<ide> service_account_email = instance_resource["serviceAccountEmailAddress"]
<ide> def execute(self, context) -> Optional[bool]:
<ide> )
<ide> if self._check_if_db_exists(database, hook):
<ide> self.log.info(
<del> "Cloud SQL instance with ID %s already contains database" " '%s'. Aborting database insert.",
<add> "Cloud SQL instance with ID %s already contains database '%s'. Aborting database insert.",
<ide> self.instance,
<ide> database,
<ide> )
<ide><path>airflow/providers/google/cloud/operators/dataproc.py
<ide> def _build_gce_cluster_config(self, cluster_data):
<ide>
<ide> if self.internal_ip_only:
<ide> if not self.subnetwork_uri:
<del> raise AirflowException("Set internal_ip_only to true only when" " you pass a subnetwork_uri.")
<add> raise AirflowException("Set internal_ip_only to true only when you pass a subnetwork_uri.")
<ide> cluster_data['gce_cluster_config']['internal_ip_only'] = True
<ide>
<ide> if self.tags:
<ide><path>airflow/providers/google/cloud/operators/mlengine.py
<ide> def __init__(
<ide> if not self._project_id:
<ide> raise AirflowException('Google Cloud project id is required.')
<ide> if not self._job_id:
<del> raise AirflowException('An unique job id is required for Google MLEngine prediction ' 'job.')
<add> raise AirflowException('An unique job id is required for Google MLEngine prediction job.')
<ide>
<ide> if self._uri:
<ide> if self._model_name or self._version_name:
<ide> raise AirflowException(
<del> 'Ambiguous model origin: Both uri and ' 'model/version name are provided.'
<add> 'Ambiguous model origin: Both uri and model/version name are provided.'
<ide> )
<ide>
<ide> if self._version_name and not self._model_name:
<ide> raise AirflowException(
<del> 'Missing model: Batch prediction expects ' 'a model name when a version name is provided.'
<add> 'Missing model: Batch prediction expects a model name when a version name is provided.'
<ide> )
<ide>
<ide> if not (self._uri or self._model_name):
<ide> def execute(self, context):
<ide>
<ide> if self._operation == 'create':
<ide> if not self._version:
<del> raise ValueError(
<del> "version attribute of {} could not " "be empty".format(self.__class__.__name__)
<del> )
<add> raise ValueError(f"version attribute of {self.__class__.__name__} could not be empty")
<ide> return hook.create_version(
<ide> project_id=self._project_id, model_name=self._model_name, version_spec=self._version
<ide> )
<ide> def __init__(
<ide> if not self._project_id:
<ide> raise AirflowException('Google Cloud project id is required.')
<ide> if not self._job_id:
<del> raise AirflowException('An unique job id is required for Google MLEngine training ' 'job.')
<add> raise AirflowException('An unique job id is required for Google MLEngine training job.')
<ide> if not package_uris:
<del> raise AirflowException('At least one python package is required for MLEngine ' 'Training job.')
<add> raise AirflowException('At least one python package is required for MLEngine Training job.')
<ide> if not training_python_module:
<ide> raise AirflowException(
<del> 'Python module name to run after installing required ' 'packages is required.'
<add> 'Python module name to run after installing required packages is required.'
<ide> )
<ide> if not self._region:
<ide> raise AirflowException('Google Compute Engine region is required.')
<ide><path>airflow/providers/google/cloud/operators/pubsub.py
<ide> def __init__(
<ide> # TODO: remove one day
<ide> if project:
<ide> warnings.warn(
<del> "The project parameter has been deprecated. You should pass " "the project_id parameter.",
<add> "The project parameter has been deprecated. You should pass the project_id parameter.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide> def __init__(
<ide> # TODO: remove one day
<ide> if project:
<ide> warnings.warn(
<del> "The project parameter has been deprecated. You should pass " "the project_id parameter.",
<add> "The project parameter has been deprecated. You should pass the project_id parameter.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide> def __init__(
<ide> # TODO: remove one day
<ide> if project:
<ide> warnings.warn(
<del> "The project parameter has been deprecated. You should pass " "the project_id parameter.",
<add> "The project parameter has been deprecated. You should pass the project_id parameter.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide> def __init__(
<ide> # TODO: remove one day
<ide> if project:
<ide> warnings.warn(
<del> "The project parameter has been deprecated. You should pass " "the project_id parameter.",
<add> "The project parameter has been deprecated. You should pass the project_id parameter.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide><path>airflow/providers/google/cloud/operators/spanner.py
<ide> def _validate_inputs(self) -> None:
<ide> if self.project_id == '':
<ide> raise AirflowException("The required parameter 'project_id' is empty")
<ide> if not self.instance_id:
<del> raise AirflowException("The required parameter 'instance_id' " "is empty or None")
<add> raise AirflowException("The required parameter 'instance_id' is empty or None")
<ide>
<ide> def execute(self, context) -> None:
<ide> hook = SpannerHook(
<ide> def _validate_inputs(self) -> None:
<ide> if self.project_id == '':
<ide> raise AirflowException("The required parameter 'project_id' is empty")
<ide> if not self.instance_id:
<del> raise AirflowException("The required parameter 'instance_id' " "is empty or None")
<add> raise AirflowException("The required parameter 'instance_id' is empty or None")
<ide>
<ide> def execute(self, context) -> Optional[bool]:
<ide> hook = SpannerHook(
<ide> def execute(self, context) -> Optional[bool]:
<ide> return hook.delete_instance(project_id=self.project_id, instance_id=self.instance_id)
<ide> else:
<ide> self.log.info(
<del> "Instance '%s' does not exist in project '%s'. " "Aborting delete.",
<add> "Instance '%s' does not exist in project '%s'. Aborting delete.",
<ide> self.instance_id,
<ide> self.project_id,
<ide> )
<ide> def _validate_inputs(self) -> None:
<ide> if self.project_id == '':
<ide> raise AirflowException("The required parameter 'project_id' is empty")
<ide> if not self.instance_id:
<del> raise AirflowException("The required parameter 'instance_id' " "is empty or None")
<add> raise AirflowException("The required parameter 'instance_id' is empty or None")
<ide> if not self.database_id:
<del> raise AirflowException("The required parameter 'database_id' " "is empty or None")
<add> raise AirflowException("The required parameter 'database_id' is empty or None")
<ide> if not self.query:
<ide> raise AirflowException("The required parameter 'query' is empty")
<ide>
<ide> def execute(self, context):
<ide> queries = [x.strip() for x in self.query.split(';')]
<ide> self.sanitize_queries(queries)
<ide> self.log.info(
<del> "Executing DML query(-ies) on " "projects/%s/instances/%s/databases/%s",
<add> "Executing DML query(-ies) on projects/%s/instances/%s/databases/%s",
<ide> self.project_id,
<ide> self.instance_id,
<ide> self.database_id,
<ide> def _validate_inputs(self) -> None:
<ide> if self.project_id == '':
<ide> raise AirflowException("The required parameter 'project_id' is empty")
<ide> if not self.instance_id:
<del> raise AirflowException("The required parameter 'instance_id' is empty " "or None")
<add> raise AirflowException("The required parameter 'instance_id' is empty or None")
<ide> if not self.database_id:
<del> raise AirflowException("The required parameter 'database_id' is empty" " or None")
<add> raise AirflowException("The required parameter 'database_id' is empty or None")
<ide>
<ide> def execute(self, context) -> Optional[bool]:
<ide> hook = SpannerHook(
<ide> def execute(self, context) -> Optional[bool]:
<ide> project_id=self.project_id, instance_id=self.instance_id, database_id=self.database_id
<ide> ):
<ide> self.log.info(
<del> "Creating Cloud Spanner database " "'%s' in project '%s' and instance '%s'",
<add> "Creating Cloud Spanner database '%s' in project '%s' and instance '%s'",
<ide> self.database_id,
<ide> self.project_id,
<ide> self.instance_id,
<ide> def _validate_inputs(self) -> None:
<ide> if self.project_id == '':
<ide> raise AirflowException("The required parameter 'project_id' is empty")
<ide> if not self.instance_id:
<del> raise AirflowException("The required parameter 'instance_id' is empty" " or None")
<add> raise AirflowException("The required parameter 'instance_id' is empty or None")
<ide> if not self.database_id:
<del> raise AirflowException("The required parameter 'database_id' is empty" " or None")
<add> raise AirflowException("The required parameter 'database_id' is empty or None")
<ide> if not self.ddl_statements:
<del> raise AirflowException("The required parameter 'ddl_statements' is empty" " or None")
<add> raise AirflowException("The required parameter 'ddl_statements' is empty or None")
<ide>
<ide> def execute(self, context) -> None:
<ide> hook = SpannerHook(
<ide> def _validate_inputs(self) -> None:
<ide> if self.project_id == '':
<ide> raise AirflowException("The required parameter 'project_id' is empty")
<ide> if not self.instance_id:
<del> raise AirflowException("The required parameter 'instance_id' is empty" " or None")
<add> raise AirflowException("The required parameter 'instance_id' is empty or None")
<ide> if not self.database_id:
<del> raise AirflowException("The required parameter 'database_id' is empty" " or None")
<add> raise AirflowException("The required parameter 'database_id' is empty or None")
<ide>
<ide> def execute(self, context) -> bool:
<ide> hook = SpannerHook(
<ide><path>airflow/providers/google/cloud/sensors/pubsub.py
<ide> def __init__(
<ide> # TODO: remove one day
<ide> if project:
<ide> warnings.warn(
<del> "The project parameter has been deprecated. You should pass " "the project_id parameter.",
<add> "The project parameter has been deprecated. You should pass the project_id parameter.",
<ide> DeprecationWarning,
<ide> stacklevel=2,
<ide> )
<ide><path>airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py
<ide> def execute(self, context):
<ide> if files:
<ide> self.log.info("All done, uploaded %d files to Google Cloud Storage.", len(files))
<ide> else:
<del> self.log.info('In sync, no files needed to be uploaded to Google Cloud' 'Storage')
<add> self.log.info('In sync, no files needed to be uploaded to Google Cloud Storage')
<ide>
<ide> return files
<ide><path>airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
<ide> def execute(self, context):
<ide> schema_fields = json.loads(gcs_hook.download(self.bucket, self.schema_object).decode("utf-8"))
<ide> elif self.schema_object is None and self.autodetect is False:
<ide> raise AirflowException(
<del> 'At least one of `schema_fields`, ' '`schema_object`, or `autodetect` must be passed.'
<add> 'At least one of `schema_fields`, `schema_object`, or `autodetect` must be passed.'
<ide> )
<ide> else:
<ide> schema_fields = None
<ide><path>airflow/providers/google/cloud/transfers/s3_to_gcs.py
<ide> def __init__(
<ide> 'leave it empty for the root of the bucket.'
<ide> )
<ide> raise AirflowException(
<del> 'The destination Google Cloud Storage path ' 'must end with a slash "/" or be empty.'
<add> 'The destination Google Cloud Storage path must end with a slash "/" or be empty.'
<ide> )
<ide>
<ide> def execute(self, context):
<ide> def execute(self, context):
<ide>
<ide> self.log.info("All done, uploaded %d files to Google Cloud Storage", len(files))
<ide> else:
<del> self.log.info('In sync, no files needed to be uploaded to Google Cloud' 'Storage')
<add> self.log.info('In sync, no files needed to be uploaded to Google Cloud Storage')
<ide>
<ide> return files
<ide><path>airflow/providers/google/cloud/transfers/sql_to_gcs.py
<ide> def _get_col_type_dict(self):
<ide> elif isinstance(self.schema, list):
<ide> schema = self.schema
<ide> elif self.schema is not None:
<del> self.log.warning('Using default schema due to unexpected type.' 'Should be a string or list.')
<add> self.log.warning('Using default schema due to unexpected type. Should be a string or list.')
<ide>
<ide> col_type_dict = {}
<ide> try:
<ide><path>airflow/providers/google/cloud/utils/field_validator.py
<ide> def _validate_field(self, validation_spec, dictionary_to_validate, parent=None,
<ide> )
<ide> elif field_type is None:
<ide> self.log.debug(
<del> "The type of field '%s' is not specified in '%s'. " "Not validating its content.",
<add> "The type of field '%s' is not specified in '%s'. Not validating its content.",
<ide> full_field_path,
<ide> validation_spec,
<ide> )
<ide><path>airflow/providers/grpc/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-grpc setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-grpc',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-grpc for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-grpc for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/http/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-http setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-http',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-http for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-http for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/imap/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-imap setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-imap',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-imap for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-imap for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/jdbc/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-jdbc setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-jdbc',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-jdbc for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-jdbc for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/jenkins/operators/jenkins_job_trigger.py
<ide> def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> Optio
<ide> response_headers = response.headers
<ide> if response_body is None:
<ide> raise jenkins.EmptyResponseException(
<del> "Error communicating with server[%s]: " "empty response" % jenkins_server.server
<add> "Error communicating with server[%s]: empty response" % jenkins_server.server
<ide> )
<ide> return {'body': response_body.decode('utf-8'), 'headers': response_headers}
<ide> except HTTPError as e:
<ide> def execute(self, context: Mapping[Any, Any]) -> Optional[str]:
<ide> ' being able to use this operator'
<ide> )
<ide> raise AirflowException(
<del> 'The jenkins_connection_id parameter is missing,' 'impossible to trigger the job'
<add> 'The jenkins_connection_id parameter is missing, impossible to trigger the job'
<ide> )
<ide>
<ide> if not self.job_name:
<ide> self.log.error("Please specify the job name to use in the job_name parameter")
<del> raise AirflowException('The job_name parameter is missing,' 'impossible to trigger the job')
<add> raise AirflowException('The job_name parameter is missing,impossible to trigger the job')
<ide>
<ide> self.log.info(
<ide> 'Triggering the job %s on the jenkins : %s with the parameters : %s',
<ide><path>airflow/providers/jira/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-jira setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-jira',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-jira for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-jira for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/microsoft/azure/operators/azure_batch.py
<ide> def _check_inputs(self) -> Any:
<ide> ]
<ide> ):
<ide> raise AirflowException(
<del> "Some required parameters are missing.Please you must set " "all the required parameters. "
<add> "Some required parameters are missing.Please you must set all the required parameters. "
<ide> )
<ide>
<ide> def execute(self, context: dict) -> None:
<ide><path>airflow/providers/microsoft/azure/operators/azure_container_instances.py
<ide> def _monitor_logging(self, resource_group: str, name: str) -> int:
<ide> last_line_logged = self._log_last(logs, last_line_logged)
<ide> except CloudError:
<ide> self.log.exception(
<del> "Exception while getting logs from " "container instance, retrying..."
<add> "Exception while getting logs from container instance, retrying..."
<ide> )
<ide>
<ide> if state == "Terminated":
<ide><path>airflow/providers/mongo/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-mongo setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-mongo',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-mongo for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-mongo for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/mongo/sensors/mongo.py
<ide> def __init__(
<ide>
<ide> def poke(self, context: dict) -> bool:
<ide> self.log.info(
<del> "Sensor check existence of the document " "that matches the following query: %s", self.query
<add> "Sensor check existence of the document that matches the following query: %s", self.query
<ide> )
<ide> hook = MongoHook(self.mongo_conn_id)
<ide> return hook.find(self.collection, self.query, find_one=True) is not None
<ide><path>airflow/providers/mysql/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-mysql setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-mysql',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-mysql for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-mysql for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/odbc/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-odbc setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-odbc',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-odbc for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-odbc for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/opsgenie/hooks/opsgenie_alert.py
<ide> def _get_api_key(self) -> str:
<ide> api_key = conn.password
<ide> if not api_key:
<ide> raise AirflowException(
<del> 'Opsgenie API Key is required for this hook, ' 'please check your conn_id configuration.'
<add> 'Opsgenie API Key is required for this hook, please check your conn_id configuration.'
<ide> )
<ide> return api_key
<ide>
<ide><path>airflow/providers/oracle/hooks/oracle.py
<ide> def insert_rows(
<ide> else:
<ide> lst.append(str(cell))
<ide> values = tuple(lst)
<del> sql = 'INSERT /*+ APPEND */ ' 'INTO {} {} VALUES ({})'.format(
<del> table, target_fields, ','.join(values)
<del> )
<add> sql = 'INSERT /*+ APPEND */ INTO {} {} VALUES ({})'.format(table, target_fields, ','.join(values))
<ide> cur.execute(sql)
<ide> if i % commit_every == 0:
<ide> conn.commit() # type: ignore[attr-defined]
<ide><path>airflow/providers/qubole/hooks/qubole.py
<ide> def handle_failure_retry(context) -> None:
<ide> cmd = Command.find(cmd_id)
<ide> if cmd is not None:
<ide> if cmd.status == 'done':
<del> log.info(
<del> 'Command ID: %s has been succeeded, hence marking this ' 'TI as Success.', cmd_id
<del> )
<add> log.info('Command ID: %s has been succeeded, hence marking this TI as Success.', cmd_id)
<ide> ti.state = State.SUCCESS
<ide> elif cmd.status == 'running':
<ide> log.info('Cancelling the Qubole Command Id: %s', cmd_id)
<ide><path>airflow/providers/redis/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-redis setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-redis',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-redis for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-redis for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/samba/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-samba setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-samba',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-samba for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-samba for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/segment/hooks/segment.py
<ide> def get_conn(self) -> analytics:
<ide>
<ide> def on_error(self, error: str, items: str) -> None:
<ide> """Handles error callbacks when using Segment with segment_debug_mode set to True"""
<del> self.log.error('Encountered Segment error: %s with ' 'items: %s', error, items)
<add> self.log.error('Encountered Segment error: %s with items: %s', error, items)
<ide> raise AirflowException(f'Segment error: {error}')
<ide><path>airflow/providers/sftp/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-sftp setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-sftp',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-sftp for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-sftp for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/sftp/operators/sftp.py
<ide> def execute(self, context: Any) -> str:
<ide> self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
<ide> else:
<ide> self.log.info(
<del> "ssh_hook is not provided or invalid. " "Trying ssh_conn_id to create SSHHook."
<add> "ssh_hook is not provided or invalid. Trying ssh_conn_id to create SSHHook."
<ide> )
<ide> self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id)
<ide>
<ide><path>airflow/providers/slack/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-slack setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-slack',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-slack for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-slack for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/snowflake/example_dags/example_snowflake.py
<ide> table=SNOWFLAKE_SAMPLE_TABLE,
<ide> schema=SNOWFLAKE_SCHEMA,
<ide> stage=SNOWFLAKE_STAGE,
<del> file_format="(type = 'CSV'," "field_delimiter = ';')",
<add> file_format="(type = 'CSV',field_delimiter = ';')",
<ide> dag=dag,
<ide> )
<ide>
<ide><path>airflow/providers/ssh/backport_provider_setup.py
<ide> def do_setup(version_suffix_for_pypi=''):
<ide> """Perform the package apache-airflow-backport-providers-ssh setup."""
<ide> setup(
<ide> name='apache-airflow-backport-providers-ssh',
<del> description='Backport provider package ' 'apache-airflow-backport-providers-ssh for Apache Airflow',
<add> description='Backport provider package apache-airflow-backport-providers-ssh for Apache Airflow',
<ide> long_description=long_description,
<ide> long_description_content_type='text/markdown',
<ide> license='Apache License 2.0',
<ide><path>airflow/providers/ssh/operators/ssh.py
<ide> def execute(self, context) -> Union[bytes, str, bool]:
<ide> self.log.info("ssh_conn_id is ignored when ssh_hook is provided.")
<ide> else:
<ide> self.log.info(
<del> "ssh_hook is not provided or invalid. " "Trying ssh_conn_id to create SSHHook."
<add> "ssh_hook is not provided or invalid. Trying ssh_conn_id to create SSHHook."
<ide> )
<ide> self.ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id, timeout=self.timeout)
<ide>
<ide><path>airflow/task/task_runner/cgroup_task_runner.py
<ide> def start(self):
<ide> cgroups.get("memory") and cgroups.get("memory") != "/"
<ide> ):
<ide> self.log.debug(
<del> "Already running in a cgroup (cpu: %s memory: %s) so not " "creating another one",
<add> "Already running in a cgroup (cpu: %s memory: %s) so not creating another one",
<ide> cgroups.get("cpu"),
<ide> cgroups.get("memory"),
<ide> )
<ide><path>airflow/ti_deps/deps/prev_dagrun_dep.py
<ide> class PrevDagrunDep(BaseTIDep):
<ide> def _get_dep_statuses(self, ti, session, dep_context):
<ide> if dep_context.ignore_depends_on_past:
<ide> yield self._passing_status(
<del> reason="The context specified that the state of past DAGs could be " "ignored."
<add> reason="The context specified that the state of past DAGs could be ignored."
<ide> )
<ide> return
<ide>
<ide><path>airflow/ti_deps/deps/ready_to_reschedule.py
<ide> def _get_dep_statuses(self, ti, session, dep_context):
<ide> """
<ide> if dep_context.ignore_in_reschedule_period:
<ide> yield self._passing_status(
<del> reason="The context specified that being in a reschedule period was " "permitted."
<add> reason="The context specified that being in a reschedule period was permitted."
<ide> )
<ide> return
<ide>
<ide><path>airflow/ti_deps/deps/task_concurrency_dep.py
<ide> def _get_dep_statuses(self, ti, session, dep_context):
<ide> return
<ide>
<ide> if ti.get_num_running_task_instances(session) >= ti.task.task_concurrency:
<del> yield self._failing_status(reason="The max task concurrency " "has been reached.")
<add> yield self._failing_status(reason="The max task concurrency has been reached.")
<ide> return
<ide> else:
<del> yield self._passing_status(reason="The max task concurrency " "has not been reached.")
<add> yield self._passing_status(reason="The max task concurrency has not been reached.")
<ide> return
<ide><path>airflow/utils/dag_processing.py
<ide> def _run_parsing_loop(self):
<ide>
<ide> if max_runs_reached:
<ide> self.log.info(
<del> "Exiting dag parsing loop as all files " "have been processed %s times", self._max_runs
<add> "Exiting dag parsing loop as all files have been processed %s times", self._max_runs
<ide> )
<ide> break
<ide>
<ide> def _kill_timed_out_processors(self):
<ide> duration = now - processor.start_time
<ide> if duration > self._processor_timeout:
<ide> self.log.error(
<del> "Processor for %s with PID %s started at %s has timed out, " "killing it.",
<add> "Processor for %s with PID %s started at %s has timed out, killing it.",
<ide> file_path,
<ide> processor.pid,
<ide> processor.start_time.isoformat(),
<ide><path>airflow/utils/db.py
<ide> def check_migrations(timeout):
<ide> if source_heads == db_heads:
<ide> break
<ide> if ticker >= timeout:
<del> raise TimeoutError("There are still unapplied migrations after {} " "seconds.".format(ticker))
<add> raise TimeoutError(f"There are still unapplied migrations after {ticker} seconds.")
<ide> ticker += 1
<ide> time.sleep(1)
<ide> log.info('Waiting for migrations... %s second(s)', ticker)
<ide><path>airflow/utils/log/file_processor_handler.py
<ide> def _symlink_latest_log_directory(self):
<ide> else:
<ide> os.symlink(log_directory, latest_log_directory_path)
<ide> except OSError:
<del> logging.warning("OSError while attempting to symlink " "the latest log directory")
<add> logging.warning("OSError while attempting to symlink the latest log directory")
<ide>
<ide> def _init_file(self, filename):
<ide> """
<ide><path>airflow/utils/log/logging_mixin.py
<ide> class RedirectStdHandler(StreamHandler):
<ide> def __init__(self, stream):
<ide> if not isinstance(stream, str):
<ide> raise Exception(
<del> "Cannot use file like objects. Use 'stdout' or 'stderr'" " as a str and without 'ext://'."
<add> "Cannot use file like objects. Use 'stdout' or 'stderr' as a str and without 'ext://'."
<ide> )
<ide>
<ide> self._use_stderr = True
<ide><path>airflow/www/views.py
<ide> def task(self):
<ide> dag = current_app.dag_bag.get_dag(dag_id)
<ide>
<ide> if not dag or task_id not in dag.task_ids:
<del> flash("Task [{}.{}] doesn't seem to exist" " at the moment".format(dag_id, task_id), "error")
<add> flash(f"Task [{dag_id}.{task_id}] doesn't seem to exist at the moment", "error")
<ide> return redirect(url_for('Airflow.index'))
<ide> task = copy.copy(dag.get_task(task_id))
<ide> task.resolve_template_files()
<ide> def xcom(self, session=None):
<ide> ti = session.query(ti_db).filter(ti_db.dag_id == dag_id and ti_db.task_id == task_id).first()
<ide>
<ide> if not ti:
<del> flash("Task [{}.{}] doesn't seem to exist" " at the moment".format(dag_id, task_id), "error")
<add> flash(f"Task [{dag_id}.{task_id}] doesn't seem to exist at the moment", "error")
<ide> return redirect(url_for('Airflow.index'))
<ide>
<ide> xcomlist = (
<ide> def run(self):
<ide> ignore_ti_state=ignore_ti_state,
<ide> )
<ide> executor.heartbeat()
<del> flash("Sent {} to the message queue, " "it should start any moment now.".format(ti))
<add> flash(f"Sent {ti} to the message queue, it should start any moment now.")
<ide> return redirect(origin)
<ide>
<ide> @expose('/delete', methods=['POST'])
<ide> def delete(self):
<ide> flash(f"DAG with id {dag_id} not found. Cannot delete", 'error')
<ide> return redirect(request.referrer)
<ide> except DagFileExists:
<del> flash("Dag id {} is still in DagBag. " "Remove the DAG file first.".format(dag_id), 'error')
<add> flash(f"Dag id {dag_id} is still in DagBag. Remove the DAG file first.", 'error')
<ide> return redirect(request.referrer)
<ide>
<del> flash("Deleting DAG with id {}. May take a couple minutes to fully" " disappear.".format(dag_id))
<add> flash(f"Deleting DAG with id {dag_id}. May take a couple minutes to fully disappear.")
<ide>
<ide> # Upon success return to origin.
<ide> return redirect(origin)
<ide> def trigger(self, session=None):
<ide> dag_hash=current_app.dag_bag.dags_hash.get(dag_id),
<ide> )
<ide>
<del> flash("Triggered {}, " "it should start any moment now.".format(dag_id))
<add> flash(f"Triggered {dag_id}, it should start any moment now.")
<ide> return redirect(origin)
<ide>
<ide> def _clear_dag_tis(
<ide> def _clear_dag_tis(
<ide>
<ide> response = self.render_template(
<ide> 'airflow/confirm.html',
<del> message=("Here's the list of task instances you are about " "to clear:"),
<add> message=("Here's the list of task instances you are about to clear:"),
<ide> details=details,
<ide> )
<ide>
<ide><path>tests/core/test_core.py
<ide> def test_illegal_args_forbidden(self):
<ide> illegal_argument_1234='hello?',
<ide> )
<ide> self.assertIn(
<del> ('Invalid arguments were passed to BashOperator ' '(task_id: test_illegal_args).'),
<add> ('Invalid arguments were passed to BashOperator (task_id: test_illegal_args).'),
<ide> str(ctx.exception),
<ide> )
<ide>
<ide><path>tests/kubernetes/test_pod_generator.py
<ide> def test_from_obj(self):
<ide> self.assertEqual(
<ide> result_from_pod,
<ide> expected_from_pod,
<del> "There was a discrepency" " between KubernetesExecutor and pod_override",
<add> "There was a discrepency between KubernetesExecutor and pod_override",
<ide> )
<ide>
<ide> self.assertEqual(
<ide><path>tests/providers/amazon/aws/log/test_s3_task_handler.py
<ide> def test_read(self):
<ide> log, metadata = self.s3_task_handler.read(self.ti)
<ide> self.assertEqual(
<ide> log[0][0][-1],
<del> '*** Reading remote log from s3://bucket/remote/log/location/1.log.\n' 'Log line\n\n',
<add> '*** Reading remote log from s3://bucket/remote/log/location/1.log.\nLog line\n\n',
<ide> )
<ide> self.assertEqual(metadata, [{'end_of_log': True}])
<ide>
<ide><path>tests/providers/apache/hive/hooks/test_hive.py
<ide> def test_load_file_without_create_table(self, mock_run_cli):
<ide> hook = MockHiveCliHook()
<ide> hook.load_file(filepath=filepath, table=table, create=False)
<ide>
<del> query = "LOAD DATA LOCAL INPATH '{filepath}' " "OVERWRITE INTO TABLE {table} ;\n".format(
<add> query = "LOAD DATA LOCAL INPATH '{filepath}' OVERWRITE INTO TABLE {table} ;\n".format(
<ide> filepath=filepath, table=table
<ide> )
<ide> calls = [mock.call(query)]
<ide> def test_load_file_create_table(self, mock_run_cli):
<ide> "STORED AS textfile\n;".format(table=table, fields=fields)
<ide> )
<ide>
<del> load_data = "LOAD DATA LOCAL INPATH '{filepath}' " "OVERWRITE INTO TABLE {table} ;\n".format(
<add> load_data = "LOAD DATA LOCAL INPATH '{filepath}' OVERWRITE INTO TABLE {table} ;\n".format(
<ide> filepath=filepath, table=table
<ide> )
<ide> calls = [mock.call(create_table), mock.call(load_data)]
<ide><path>tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py
<ide> },
<ide> "status": {
<ide> "applicationState": {
<del> "errorMessage": "driver pod failed with " "ExitCode: 101, Reason: Error",
<add> "errorMessage": "driver pod failed with ExitCode: 101, Reason: Error",
<ide> "state": "FAILED",
<ide> },
<ide> "driverInfo": {
<ide><path>tests/providers/google/cloud/operators/test_cloud_sql.py
<ide> def test_create_should_validate_non_empty_fields(self, mock_hook):
<ide> )
<ide> op.execute(None)
<ide> err = cm.exception
<del> self.assertIn("The body field 'settings.tier' can't be empty. " "Please provide a value.", str(err))
<add> self.assertIn("The body field 'settings.tier' can't be empty. Please provide a value.", str(err))
<ide> mock_hook.assert_called_once_with(
<ide> api_version="v1beta4",
<ide> gcp_conn_id="google_cloud_default",
<ide><path>tests/providers/google/cloud/operators/test_dataproc.py
<ide> CLUSTER_NAME = "cluster_name"
<ide> CONFIG = {
<ide> "gce_cluster_config": {
<del> "zone_uri": "https://www.googleapis.com/compute/v1/projects/" "project_id/zones/zone",
<add> "zone_uri": "https://www.googleapis.com/compute/v1/projects/project_id/zones/zone",
<ide> "metadata": {"metadata": "data"},
<ide> "network_uri": "network_uri",
<ide> "subnetwork_uri": "subnetwork_uri",
<ide><path>tests/providers/google/cloud/operators/test_functions.py
<ide> def test_delete_execute(self, mock_hook):
<ide> @mock.patch('airflow.providers.google.cloud.operators.functions.CloudFunctionsHook')
<ide> def test_correct_name(self, mock_hook):
<ide> op = CloudFunctionDeleteFunctionOperator(
<del> name="projects/project_name/locations/project_location/functions" "/function_name", task_id="id"
<add> name="projects/project_name/locations/project_location/functions/function_name", task_id="id"
<ide> )
<ide> op.execute(None)
<ide> mock_hook.assert_called_once_with(
<ide><path>tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
<ide> def test_execute_more_than_1_wildcard(self, mock_hook):
<ide>
<ide> total_wildcards = operator.source_object.count(WILDCARD)
<ide>
<del> error_msg = "Only one wildcard '[*]' is allowed in source_object parameter. " "Found {}".format(
<add> error_msg = "Only one wildcard '[*]' is allowed in source_object parameter. Found {}".format(
<ide> total_wildcards
<ide> )
<ide>
<ide><path>tests/providers/google/marketing_platform/operators/test_analytics.py
<ide>
<ide>
<ide> class TestGoogleAnalyticsListAccountsOperator(unittest.TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "analytics.GoogleAnalyticsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsHook")
<ide> def test_execute(self, hook_mock):
<ide> op = GoogleAnalyticsListAccountsOperator(
<ide> api_version=API_VERSION,
<ide> def test_execute(self, hook_mock):
<ide>
<ide>
<ide> class TestGoogleAnalyticsRetrieveAdsLinksListOperator(unittest.TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "analytics.GoogleAnalyticsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsHook")
<ide> def test_execute(self, hook_mock):
<ide> op = GoogleAnalyticsRetrieveAdsLinksListOperator(
<ide> account_id=ACCOUNT_ID,
<ide> def test_execute(self, hook_mock):
<ide>
<ide>
<ide> class TestGoogleAnalyticsGetAdsLinkOperator(unittest.TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "analytics.GoogleAnalyticsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsHook")
<ide> def test_execute(self, hook_mock):
<ide> op = GoogleAnalyticsGetAdsLinkOperator(
<ide> account_id=ACCOUNT_ID,
<ide> def test_execute(self, hook_mock):
<ide>
<ide>
<ide> class TestGoogleAnalyticsDataImportUploadOperator(unittest.TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "analytics.GoogleAnalyticsHook")
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "analytics.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.GCSHook")
<ide> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.NamedTemporaryFile")
<ide> def test_execute(self, mock_tempfile, gcs_hook_mock, ga_hook_mock):
<ide> filename = "file/"
<ide> def test_execute(self, mock_tempfile, gcs_hook_mock, ga_hook_mock):
<ide>
<ide>
<ide> class TestGoogleAnalyticsDeletePreviousDataUploadsOperator:
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "analytics.GoogleAnalyticsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.GoogleAnalyticsHook")
<ide> def test_execute(self, mock_hook):
<ide> mock_hook.return_value.list_uploads.return_value = [
<ide> {"id": 1},
<ide> def test_modify_column_headers(self):
<ide> with open(tmp.name) as f:
<ide> assert expected_data == f.read()
<ide>
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "analytics.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.analytics.GCSHook")
<ide> @mock.patch(
<ide> "airflow.providers.google.marketing_platform.operators."
<ide> "analytics.GoogleAnalyticsModifyFileHeadersDataImportOperator._modify_column_headers"
<ide><path>tests/providers/google/marketing_platform/operators/test_campaign_manager.py
<ide>
<ide> class TestGoogleCampaignManagerDeleteReportOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerHook"
<add> "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator")
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide> profile_id = "PROFILE_ID"
<ide> report_id = "REPORT_ID"
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide>
<ide>
<ide> class TestGoogleCampaignManagerGetReportOperator(TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.http")
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.tempfile")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.http")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.tempfile")
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerHook"
<add> "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.GCSHook")
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator")
<ide> @mock.patch(
<ide> "airflow.providers.google.marketing_platform.operators."
<ide> "campaign_manager.GoogleCampaignManagerDownloadReportOperator.xcom_push"
<ide> def test_execute(
<ide>
<ide> class TestGoogleCampaignManagerInsertReportOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerHook"
<add> "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator")
<ide> @mock.patch(
<ide> "airflow.providers.google.marketing_platform.operators."
<ide> "campaign_manager.GoogleCampaignManagerInsertReportOperator.xcom_push"
<ide> def test_prepare_template(self):
<ide>
<ide> class TestGoogleCampaignManagerRunReportOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerHook"
<add> "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator")
<ide> @mock.patch(
<ide> "airflow.providers.google.marketing_platform.operators."
<ide> "campaign_manager.GoogleCampaignManagerRunReportOperator.xcom_push"
<ide> def test_execute(self, xcom_mock, mock_base_op, hook_mock):
<ide>
<ide> class TestGoogleCampaignManagerBatchInsertConversionsOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerHook"
<add> "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator")
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide> profile_id = "PROFILE_ID"
<ide> op = GoogleCampaignManagerBatchInsertConversionsOperator(
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide>
<ide> class TestGoogleCampaignManagerBatchUpdateConversionOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "campaign_manager.GoogleCampaignManagerHook"
<add> "airflow.providers.google.marketing_platform.operators.campaign_manager.GoogleCampaignManagerHook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "campaign_manager.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.campaign_manager.BaseOperator")
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide> profile_id = "PROFILE_ID"
<ide> op = GoogleCampaignManagerBatchUpdateConversionsOperator(
<ide><path>tests/providers/google/marketing_platform/operators/test_display_video.py
<ide> class TestGoogleDisplayVideo360CreateReportOperator(TestCase):
<ide> "display_video.GoogleDisplayVideo360CreateReportOperator.xcom_push"
<ide> )
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.BaseOperator")
<ide> def test_execute(self, mock_base_op, hook_mock, xcom_mock):
<ide> body = {"body": "test"}
<ide> query_id = "TEST"
<ide> def test_prepare_template(self):
<ide>
<ide> class TestGoogleDisplayVideo360DeleteReportOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.BaseOperator")
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide> query_id = "QUERY_ID"
<ide> op = GoogleDisplayVideo360DeleteReportOperator(
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide>
<ide>
<ide> class TestGoogleDisplayVideo360GetReportOperator(TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.shutil")
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.urllib.request")
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.tempfile")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.shutil")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.urllib.request")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.tempfile")
<ide> @mock.patch(
<ide> "airflow.providers.google.marketing_platform.operators."
<ide> "display_video.GoogleDisplayVideo360DownloadReportOperator.xcom_push"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.GCSHook")
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.BaseOperator")
<ide> def test_execute(
<ide> self,
<ide> mock_base_op,
<ide> def test_execute(
<ide>
<ide> class TestGoogleDisplayVideo360RunReportOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.BaseOperator")
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide> report_id = "QUERY_ID"
<ide> params = {"param": "test"}
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide>
<ide> class TestGoogleDisplayVideo360DownloadLineItemsOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.GCSHook")
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.tempfile")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.tempfile")
<ide> def test_execute(self, mock_temp, gcs_hook_mock, hook_mock):
<ide> request_body = {
<ide> "filterType": "filter_type",
<ide> def test_execute(self, mock_temp, gcs_hook_mock, hook_mock):
<ide>
<ide>
<ide> class TestGoogleDisplayVideo360UploadLineItemsOperator(TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.tempfile")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.tempfile")
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.GCSHook")
<ide> def test_execute(self, gcs_hook_mock, hook_mock, mock_tempfile):
<ide> filename = "filename"
<ide> object_name = "object_name"
<ide> def test_execute(self, gcs_hook_mock, hook_mock, mock_tempfile):
<ide>
<ide> class TestGoogleDisplayVideo360SDFtoGCSOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.GCSHook")
<del> @mock.patch("airflow.providers.google.marketing_platform.operators." "display_video.tempfile")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.display_video.tempfile")
<ide> def test_execute(self, mock_temp, gcs_mock_hook, mock_hook):
<ide> operation_name = "operation_name"
<ide> operation = {"key": "value"}
<ide> def test_execute(self, mock_temp, gcs_mock_hook, mock_hook):
<ide>
<ide> class TestGoogleDisplayVideo360CreateSDFDownloadTaskOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.operators." "display_video.GoogleDisplayVideo360Hook"
<add> "airflow.providers.google.marketing_platform.operators.display_video.GoogleDisplayVideo360Hook"
<ide> )
<ide> def test_execute(self, mock_hook):
<ide> body_request = {
<ide><path>tests/providers/google/marketing_platform/operators/test_search_ads.py
<ide>
<ide>
<ide> class TestGoogleSearchAdsInsertReportOperator(TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform." "operators.search_ads.GoogleSearchAdsHook")
<del> @mock.patch("airflow.providers.google.marketing_platform." "operators.search_ads.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.search_ads.BaseOperator")
<ide> @mock.patch(
<ide> "airflow.providers.google.marketing_platform."
<ide> "operators.search_ads.GoogleSearchAdsInsertReportOperator.xcom_push"
<ide> def test_prepare_template(self):
<ide>
<ide>
<ide> class TestGoogleSearchAdsDownloadReportOperator(TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform." "operators.search_ads.NamedTemporaryFile")
<del> @mock.patch("airflow.providers.google.marketing_platform." "operators.search_ads.GCSHook")
<del> @mock.patch("airflow.providers.google.marketing_platform." "operators.search_ads.GoogleSearchAdsHook")
<del> @mock.patch("airflow.providers.google.marketing_platform." "operators.search_ads.BaseOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.search_ads.NamedTemporaryFile")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.search_ads.GCSHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.search_ads.GoogleSearchAdsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.operators.search_ads.BaseOperator")
<ide> @mock.patch(
<ide> "airflow.providers.google.marketing_platform."
<ide> "operators.search_ads.GoogleSearchAdsDownloadReportOperator.xcom_push"
<ide><path>tests/providers/google/marketing_platform/sensors/test_campaign_manager.py
<ide>
<ide> class TestGoogleCampaignManagerDeleteReportOperator(TestCase):
<ide> @mock.patch(
<del> "airflow.providers.google.marketing_platform.sensors." "campaign_manager.GoogleCampaignManagerHook"
<add> "airflow.providers.google.marketing_platform.sensors.campaign_manager.GoogleCampaignManagerHook"
<ide> )
<del> @mock.patch("airflow.providers.google.marketing_platform.sensors." "campaign_manager.BaseSensorOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.sensors.campaign_manager.BaseSensorOperator")
<ide> def test_execute(self, mock_base_op, hook_mock):
<ide> profile_id = "PROFILE_ID"
<ide> report_id = "REPORT_ID"
<ide><path>tests/providers/google/marketing_platform/sensors/test_search_ads.py
<ide>
<ide>
<ide> class TestSearchAdsReportSensor(TestCase):
<del> @mock.patch("airflow.providers.google.marketing_platform.sensors." "search_ads.GoogleSearchAdsHook")
<del> @mock.patch("airflow.providers.google.marketing_platform.sensors." "search_ads.BaseSensorOperator")
<add> @mock.patch("airflow.providers.google.marketing_platform.sensors.search_ads.GoogleSearchAdsHook")
<add> @mock.patch("airflow.providers.google.marketing_platform.sensors.search_ads.BaseSensorOperator")
<ide> def test_poke(self, mock_base_op, hook_mock):
<ide> report_id = "REPORT_ID"
<ide> op = GoogleSearchAdsReportSensor(report_id=report_id, api_version=API_VERSION, task_id="test_task")
<ide><path>tests/providers/hashicorp/_internal_client/test_vault_client.py
<ide> def test_get_secret_metadata_v1(self, mock_hvac):
<ide> url="http://localhost:8180",
<ide> )
<ide> with self.assertRaisesRegex(
<del> VaultError, "Metadata might only be used with" " version 2 of the KV engine."
<add> VaultError, "Metadata might only be used with version 2 of the KV engine."
<ide> ):
<ide> vault_client.get_secret_metadata(secret_path="missing")
<ide>
<ide> def test_get_secret_including_metadata_v1(self, mock_hvac):
<ide> url="http://localhost:8180",
<ide> )
<ide> with self.assertRaisesRegex(
<del> VaultError, "Metadata might only be used with" " version 2 of the KV engine."
<add> VaultError, "Metadata might only be used with version 2 of the KV engine."
<ide> ):
<ide> vault_client.get_secret_including_metadata(secret_path="missing")
<ide>
<ide><path>tests/providers/microsoft/azure/operators/test_azure_batch.py
<ide> def test_operator_fails(self, wait_mock):
<ide> self.operator_fail.execute(None)
<ide> self.assertEqual(
<ide> str(e.exception),
<del> "Either target_dedicated_nodes or enable_auto_scale " "must be set. None was set",
<add> "Either target_dedicated_nodes or enable_auto_scale must be set. None was set",
<ide> )
<ide>
<ide> @mock.patch.object(AzureBatchHook, "wait_for_all_node_state")
<ide><path>tests/providers/mysql/hooks/test_mysql.py
<ide> def test_mysql_hook_test_bulk_dump(self, client):
<ide> elif priv == ("",):
<ide> hook.bulk_dump("INFORMATION_SCHEMA.TABLES", f"TABLES_{client}_{uuid.uuid1()}")
<ide> else:
<del> self.skipTest("Skip test_mysql_hook_test_bulk_load " "since file output is not permitted")
<add> self.skipTest("Skip test_mysql_hook_test_bulk_load since file output is not permitted")
<ide>
<ide> @parameterized.expand(
<ide> [
<ide><path>tests/providers/ssh/hooks/test_ssh.py
<ide> def setUpClass(cls) -> None:
<ide> conn_id=cls.CONN_SSH_WITH_EXTRA,
<ide> host='localhost',
<ide> conn_type='ssh',
<del> extra='{"compress" : true, "no_host_key_check" : "true", ' '"allow_host_key_change": false}',
<add> extra='{"compress" : true, "no_host_key_check" : "true", "allow_host_key_change": false}',
<ide> )
<ide> )
<ide> db.merge_conn( | 90 |
Java | Java | remove dead code of myoganodeclonefunction | d898574fb260258db923d81ab8bd6ee77fada5b1 | <ide><path>ReactAndroid/src/main/java/com/facebook/yoga/YogaConfigJNIBase.java
<ide> */
<ide> package com.facebook.yoga;
<ide>
<del>import com.facebook.soloader.SoLoader;
<del>
<ide> public abstract class YogaConfigJNIBase extends YogaConfig {
<ide>
<ide> protected long mNativePointer;
<ide> private YogaLogger mLogger;
<del> private YogaNodeCloneFunction mYogaNodeCloneFunction;
<ide>
<ide> private YogaConfigJNIBase(long nativePointer) {
<ide> if (nativePointer == 0) {
<ide><path>ReactAndroid/src/main/java/com/facebook/yoga/YogaNodeCloneFunction.java
<del>/**
<del> * Copyright (c) Facebook, Inc. and its affiliates.
<del> *
<del> * This source code is licensed under the MIT license found in the LICENSE
<del> * file in the root directory of this source tree.
<del> */
<del>package com.facebook.yoga;
<del>
<del>import com.facebook.proguard.annotations.DoNotStrip;
<del>
<del>@DoNotStrip
<del>public interface YogaNodeCloneFunction {
<del>
<del> @DoNotStrip
<del> YogaNode cloneNode(YogaNode oldNode, YogaNode parent, int childIndex);
<del>} | 2 |
Javascript | Javascript | add support for jstransform es6-call-spread | 0bc4aafb74bec3cafd10b6910525aedda2918bdb | <ide><path>vendor/fbtransform/visitors.js
<ide> var es6ObjectShortNotation =
<ide> require('jstransform/visitors/es6-object-short-notation-visitors');
<ide> var es6RestParameters = require('jstransform/visitors/es6-rest-param-visitors');
<ide> var es6Templates = require('jstransform/visitors/es6-template-visitors');
<add>var es6CallSpread =
<add> require('jstransform/visitors/es6-call-spread-visitors');
<ide> var es7SpreadProperty =
<ide> require('jstransform/visitors/es7-spread-property-visitors');
<ide> var react = require('./transforms/react');
<ide> var transformVisitors = {
<ide> 'es6-object-short-notation': es6ObjectShortNotation.visitorList,
<ide> 'es6-rest-params': es6RestParameters.visitorList,
<ide> 'es6-templates': es6Templates.visitorList,
<add> 'es6-call-spread': es6CallSpread.visitorList,
<ide> 'es7-spread-property': es7SpreadProperty.visitorList,
<ide> 'react': react.visitorList.concat(reactDisplayName.visitorList),
<ide> 'reserved-words': reservedWords.visitorList
<ide> var transformSets = {
<ide> 'es6-rest-params',
<ide> 'es6-templates',
<ide> 'es6-destructuring',
<add> 'es6-call-spread',
<ide> 'es7-spread-property'
<ide> ],
<ide> 'es3': [
<ide> var transformRunOrder = [
<ide> 'es6-rest-params',
<ide> 'es6-templates',
<ide> 'es6-destructuring',
<add> 'es6-call-spread',
<ide> 'es7-spread-property',
<ide> 'react'
<ide> ]; | 1 |
Python | Python | fix stateful lstm example | 2091bfe91187dc7903bea07cdf070f81ea1bdf56 | <ide><path>examples/lstm_stateful_seq.py
<del>'''Example script to predict sequence using stateful rnns.
<del>At least 10 epochs are required before the generated text
<del>starts sounding coherent.
<del>'''
<del>
<del>import numpy as np
<del>import matplotlib.pyplot as mpl
<del>from keras.models import Sequential
<del>from keras.layers.core import Dense
<del>from keras.layers.recurrent import LSTM
<del>
<del>
<del># since we are using stateful rnn tsteps can be set to 1
<del>tsteps = 1
<del>batch_size = 25
<del>epochs = 25
<del># number of elements ahead that are used to make the prediction
<del>lahead = 1
<del>
<del>
<del>def gen_cosine_amp(amp=100, period=25, x0=0, xn=50000, step=1, k=0.0001):
<del> """
<del> Generates an absolute cosine time series with the amplitude exponentially
<del> decreasing
<del>
<del> Keyword arguments:
<del> amp -- amplitude of the cosine function
<del> period -- period of the cosine function
<del> x0 -- initial x of the time series
<del> xn -- final x of the time series
<del> step -- step of the time series discretization
<del> k -- exponential rate
<del> """
<del> cos = np.zeros(((xn - x0) * step, 1, 1))
<del> for i in range(len(cos)):
<del> idx = x0 + i * step
<del> cos[i, 0, 0] = amp * np.cos(idx / (2 * np.pi * period))
<del> cos[i, 0, 0] = cos[i, 0, 0] * np.exp(-k * idx)
<del> return cos
<del>
<del>
<del>print('Creating Data')
<del>cos = gen_cosine_amp()
<del>print('Input shape:')
<del>print(cos.shape)
<del>print('Calculating expected predicted_out')
<del>expected_out = np.zeros((len(cos), 1))
<del>for i in range(len(cos) - lahead):
<del> expected_out[i, 0] = np.mean(cos[i + 1:i + lahead + 1])
<del>
<del>print('Output shape')
<del>print(expected_out.shape)
<del>
<del>print('Creating Model')
<del>model = Sequential()
<del>model.add(
<del> LSTM(
<del> 50,
<del> batch_input_shape=(
<del> batch_size,
<del> tsteps,
<del> 1),
<del> return_sequences=True,
<del> stateful=True))
<del>model.add(
<del> LSTM(
<del> 50,
<del> batch_input_shape=(
<del> batch_size,
<del> tsteps,
<del> 1),
<del> return_sequences=False,
<del> stateful=True))
<del>model.add(Dense(1))
<del>model.compile(loss='rmse', optimizer='rmsprop')
<del>
<del>print('Training')
<del>for i in range(epochs):
<del> model.fit(
<del> cos,
<del> expected_out,
<del> batch_size=batch_size,
<del> verbose=1,
<del> nb_epoch=1)
<del> model.reset_states()
<del>
<del>print('Predicting')
<del>predicted_out = model.predict(cos, batch_size=batch_size)
<del>
<del>print('Ploting Results')
<del>mpl.subplot(2, 1, 1)
<del>mpl.plot(expected_out)
<del>mpl.title('Expected')
<del>mpl.subplot(2, 1, 2)
<del>mpl.plot(predicted_out)
<del>mpl.title('Predicted')
<del>mpl.show()
<ide><path>examples/stateful_lstm.py
<add>'''Example script showing how to use stateful RNNs
<add>to model long sequences efficiently.
<add>'''
<add>from __future__ import print_function
<add>import numpy as np
<add>import matplotlib.pyplot as plt
<add>from keras.models import Sequential
<add>from keras.layers.core import Dense
<add>from keras.layers.recurrent import LSTM
<add>
<add>
<add># since we are using stateful rnn tsteps can be set to 1
<add>tsteps = 1
<add>batch_size = 25
<add>epochs = 25
<add># number of elements ahead that are used to make the prediction
<add>lahead = 1
<add>
<add>
<add>def gen_cosine_amp(amp=100, period=25, x0=0, xn=50000, step=1, k=0.0001):
<add> """Generates an absolute cosine time series with the amplitude
<add> exponentially decreasing
<add>
<add> Arguments:
<add> amp: amplitude of the cosine function
<add> period: period of the cosine function
<add> x0: initial x of the time series
<add> xn: final x of the time series
<add> step: step of the time series discretization
<add> k: exponential rate
<add> """
<add> cos = np.zeros(((xn - x0) * step, 1, 1))
<add> for i in range(len(cos)):
<add> idx = x0 + i * step
<add> cos[i, 0, 0] = amp * np.cos(idx / (2 * np.pi * period))
<add> cos[i, 0, 0] = cos[i, 0, 0] * np.exp(-k * idx)
<add> return cos
<add>
<add>
<add>print('Generating Data')
<add>cos = gen_cosine_amp()
<add>print('Input shape:', cos.shape)
<add>
<add>expected_output = np.zeros((len(cos), 1))
<add>for i in range(len(cos) - lahead):
<add> expected_output[i, 0] = np.mean(cos[i + 1:i + lahead + 1])
<add>
<add>print('Output shape')
<add>print(expected_output.shape)
<add>
<add>print('Creating Model')
<add>model = Sequential()
<add>model.add(LSTM(50,
<add> batch_input_shape=(batch_size, tsteps, 1),
<add> return_sequences=True,
<add> stateful=True))
<add>model.add(LSTM(50,
<add> batch_input_shape=(batch_size, tsteps, 1),
<add> return_sequences=False,
<add> stateful=True))
<add>model.add(Dense(1))
<add>model.compile(loss='rmse', optimizer='rmsprop')
<add>
<add>print('Training')
<add>for i in range(epochs):
<add> print('Epoch', i, '/', epochs)
<add> model.fit(cos,
<add> expected_output,
<add> batch_size=batch_size,
<add> verbose=1,
<add> nb_epoch=1)
<add> model.reset_states()
<add>
<add>print('Predicting')
<add>predicted_output = model.predict(cos, batch_size=batch_size)
<add>
<add>print('Ploting Results')
<add>plt.subplot(2, 1, 1)
<add>plt.plot(expected_output)
<add>plt.title('Expected')
<add>plt.subplot(2, 1, 2)
<add>plt.plot(predicted_output)
<add>plt.title('Predicted')
<add>plt.show() | 2 |
Python | Python | remove extraneous batch_input_shape | 8d20bac7fa534383fde1054334110691a11735c9 | <ide><path>examples/stateful_lstm.py
<ide> def gen_cosine_amp(amp=100, period=1000, x0=0, xn=50000, step=1, k=0.0001):
<ide> return_sequences=True,
<ide> stateful=True))
<ide> model.add(LSTM(50,
<del> batch_input_shape=(batch_size, tsteps, 1),
<ide> return_sequences=False,
<ide> stateful=True))
<ide> model.add(Dense(1)) | 1 |
PHP | PHP | update error configuration in app skeleton | 263ac6c244f9767a21956442cea850866cda9f88 | <ide><path>App/Config/error.php
<ide> *
<ide> * @copyright Copyright 2005-2012, Cake Software Foundation, Inc. (http://cakefoundation.org)
<ide> * @link http://cakephp.org CakePHP(tm) Project
<del> * @package app.Config
<ide> * @since CakePHP(tm) v3.0.0
<ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php)
<ide> */
<ide> namespace App\Config;
<ide>
<del>use Cake\Core\Configure;
<add>use Cake\Error\ErrorHandler;
<add>use Cake\Console\ConsoleErrorHandler;
<ide>
<ide> /**
<del> * Configure the Error handler used to handle errors for your application. By default
<del> * ErrorHandler::handleError() is used. It will display errors using Debugger, when debug > 0
<del> * and log errors with Cake Log when debug = 0.
<add> * Configure the Error and Exception handlers used by your application.
<ide> *
<del> * Options:
<add> * By default errors are displayed using Debugger, when debug > 0 and logged by
<add> * Cake\Log\Log when debug = 0.
<ide> *
<del> * - `handler` - callback - The callback to handle errors. You can set this to any callable type,
<del> * including anonymous functions.
<del> * - `consoleHandler` - callback - The callback to handle errors. You can set this to any callable type,
<del> * including anonymous functions.
<del> * - `level` - int - The level of errors you are interested in capturing.
<del> * - `trace` - boolean - Include stack traces for errors in log files.
<del> *
<del> * @see ErrorHandler for more information on error handling and configuration.
<del> */
<del> Configure::write('Error', [
<del> 'handler' => 'Cake\Error\ErrorHandler::handleError',
<del> 'consoleHandler' => 'Cake\Console\ConsoleErrorHandler::handleError',
<del> 'level' => E_ALL & ~E_DEPRECATED,
<del> 'trace' => true
<del> ]);
<del>
<del>/**
<del> * Configure the Exception handler used for uncaught exceptions. By default,
<del> * ErrorHandler::handleException() is used. It will display a HTML page for the exception, and
<del> * while debug > 0, framework errors like Missing Controller will be displayed. When debug = 0,
<del> * framework errors will be coerced into generic HTTP errors.
<add> * In CLI environments exceptions will be printed to stderr with a backtrace.
<add> * In web environments an HTML page will be displayed for the exception.
<add> * While debug > 0, framework errors like Missing Controller will be displayed.
<add> * When debug = 0, framework errors will be coerced into generic HTTP errors.
<ide> *
<ide> * Options:
<ide> *
<del> * - `handler` - callback - The callback to handle exceptions. You can set this to any callback type,
<del> * including anonymous functions.
<del> * - `renderer` - string - The class responsible for rendering uncaught exceptions. If you choose a custom class you
<del> * should place the file for that class in app/Lib/Error. This class needs to implement a render method.
<del> * - `log` - boolean - Should Exceptions be logged?
<del> * - `skipLog` - array - list of exceptions to skip for logging. Exceptions that
<add> * - `errorLevel` - int - The level of errors you are interested in capturing.
<add> * - `trace` - boolean - Whether or not backtraces should be included in
<add> * logged errors/exceptions.
<add> * - `exceptionRenderer` - string - The class responsible for rendering
<add> * uncaught exceptions. If you choose a custom class you should place
<add> * the file for that class in app/Lib/Error. This class needs to implement a render method.
<add> * - `skipLog` - array - List of exceptions to skip for logging. Exceptions that
<ide> * extend one of the listed exceptions will also be skipped for logging.
<ide> * Example: `'skipLog' => array('Cake\Error\NotFoundException', 'Cake\Error\UnauthorizedException')`
<ide> *
<del> * @see ErrorHandler for more information on exception handling and configuration.
<add> * @see ErrorHandler for more information on error handling and configuration.
<ide> */
<del> Configure::write('Exception', [
<del> 'handler' => 'Cake\Error\ErrorHandler::handleException',
<del> 'consoleHandler' => 'Cake\Console\ConsoleErrorHandler::handleException',
<del> 'renderer' => 'Cake\Error\ExceptionRenderer',
<del> 'log' => true
<del> ]);
<add>$options = [
<add> 'errorLevel' => E_ALL & ~E_DEPRECATED,
<add> 'exceptionRenderer' => 'Cake\Error\ExceptionRenderer',
<add> 'skipLog' => [],
<add> 'log' => true,
<add> 'trace' => true,
<add>];
<ide>
<del>/**
<del> * Once configured, set the error/exception handlers to PHP's default handlers.
<del> */
<del> Configure::setErrorHandlers();
<add>if (php_sapi_name() == 'cli') {
<add> $errorHandler = new ConsoleErrorHandler($options);
<add>} else {
<add> $errorHandler = new ErrorHandler($options);
<add>}
<add>$errorHandler->register(); | 1 |
Java | Java | improve documentation of springfactoriesloader | 1a880076880a794016e2eeadea4f7f67c38e68ce | <ide><path>spring-core/src/main/java/org/springframework/core/io/support/SpringFactoriesLoader.java
<ide> /**
<ide> * General purpose factory loading mechanism for internal use within the framework.
<ide> *
<del> * <p>The {@code SpringFactoriesLoader} loads and instantiates factories of a given type
<del> * from "META-INF/spring.factories" files. The file should be in {@link Properties} format,
<del> * where the key is the fully qualified interface or abstract class name, and the value
<del> * is a comma-separated list of implementation class names. For instance:
<add> * <p>{@code SpringFactoriesLoader} {@linkplain #loadFactories loads} and instantiates
<add> * factories of a given type from {@value #FACTORIES_RESOURCE_LOCATION} files which
<add> * may be present in multiple JAR files in the classpath. The {@code spring.factories}
<add> * file must be in {@link Properties} format, where the key is the fully qualified
<add> * name of the interface or abstract class, and the value is a comma-separated list of
<add> * implementation class names. For example:
<ide> *
<ide> * <pre class="code">example.MyService=example.MyServiceImpl1,example.MyServiceImpl2</pre>
<ide> *
<del> * where {@code MyService} is the name of the interface, and {@code MyServiceImpl1} and
<del> * {@code MyServiceImpl2} are the two implementations.
<add> * where {@code example.MyService} is the name of the interface, and {@code MyServiceImpl1}
<add> * and {@code MyServiceImpl2} are two implementations.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @author Juergen Hoeller
<ide> */
<ide> public abstract class SpringFactoriesLoader {
<ide>
<del> /** The location to look for the factories. Can be present in multiple JAR files. */
<del> public static final String FACTORIES_RESOURCE_LOCATION = "META-INF/spring.factories";
<del>
<ide> private static final Log logger = LogFactory.getLog(SpringFactoriesLoader.class);
<ide>
<add> /**
<add> * The location to look for factories.
<add> * <p>Can be present in multiple JAR files.
<add> */
<add> public static final String FACTORIES_RESOURCE_LOCATION = "META-INF/spring.factories";
<add>
<ide>
<ide> /**
<del> * Load the factory implementations of the given type from the default location,
<del> * using the given class loader.
<del> * <p>The returned factories are ordered in accordance with the {@link AnnotationAwareOrderComparator}.
<add> * Load and instantiate the factory implementations of the given type from
<add> * {@value #FACTORIES_RESOURCE_LOCATION}, using the given class loader.
<add> * <p>The returned factories are sorted in accordance with the {@link AnnotationAwareOrderComparator}.
<add> * <p>If a custom instantiation strategy is required, use {@link #loadFactoryNames}
<add> * to obtain all registered factory names.
<ide> * @param factoryClass the interface or abstract class representing the factory
<ide> * @param classLoader the ClassLoader to use for loading (can be {@code null} to use the default)
<add> * @see #loadFactoryNames
<add> * @throws IllegalArgumentException if any factory implementation class cannot
<add> * be loaded or if an error occurs while instantiating any factory
<ide> */
<ide> public static <T> List<T> loadFactories(Class<T> factoryClass, ClassLoader classLoader) {
<ide> Assert.notNull(factoryClass, "'factoryClass' must not be null");
<ide> public static <T> List<T> loadFactories(Class<T> factoryClass, ClassLoader class
<ide> return result;
<ide> }
<ide>
<add> /**
<add> * Load the fully qualified class names of factory implementations of the
<add> * given type from {@value #FACTORIES_RESOURCE_LOCATION}, using the given
<add> * class loader.
<add> * @param factoryClass the interface or abstract class representing the factory
<add> * @param classLoader the ClassLoader to use for loading resources; can be
<add> * {@code null} to use the default
<add> * @see #loadFactories
<add> * @throws IllegalArgumentException if an error occurs while loading factory names
<add> */
<ide> public static List<String> loadFactoryNames(Class<?> factoryClass, ClassLoader classLoader) {
<ide> String factoryClassName = factoryClass.getName();
<ide> try { | 1 |
Text | Text | add tf protein notebook to notebooks doc | e9d9982e7c99da93c7b5ed0058bdd49e749aee5b | <ide><path>notebooks/README.md
<ide> You can open any page of the documentation as a notebook in colab (there is a bu
<ide> | [How to fine-tune a model on multiple choice](https://github.com/huggingface/notebooks/blob/main/examples/multiple_choice-tf.ipynb)| Show how to preprocess the data and fine-tune a pretrained model on SWAG. | [](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/multiple_choice-tf.ipynb)| [](https://studiolab.sagemaker.aws/import/github/huggingface/notebooks/blob/main/examples/multiple_choice-tf.ipynb)|
<ide> | [How to fine-tune a model on translation](https://github.com/huggingface/notebooks/blob/main/examples/translation-tf.ipynb)| Show how to preprocess the data and fine-tune a pretrained model on WMT. | [](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/translation-tf.ipynb)| [](https://studiolab.sagemaker.aws/import/github/huggingface/notebooks/blob/main/examples/translation-tf.ipynb)|
<ide> | [How to fine-tune a model on summarization](https://github.com/huggingface/notebooks/blob/main/examples/summarization-tf.ipynb)| Show how to preprocess the data and fine-tune a pretrained model on XSUM. | [](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/summarization-tf.ipynb)| [](https://studiolab.sagemaker.aws/import/github/huggingface/notebooks/blob/main/examples/summarization-tf.ipynb)|
<add>| [How to fine-tune a pre-trained protein model](https://github.com/huggingface/notebooks/blob/main/examples/protein_language_modeling-tf.ipynb) | See how to tokenize proteins and fine-tune a large pre-trained protein "language" model | [](https://colab.research.google.com/github/huggingface/notebooks/blob/main/examples/protein_language_modeling-tf.ipynb) | [](https://studiolab.sagemaker.aws/import/github/huggingface/notebooks/blob/main/examples/protein_language_modeling-tf.ipynb) |
<ide>
<ide> ### Optimum notebooks
<ide> | 1 |
Mixed | Go | add an option to disable ip masquerading | 4dc4d56db9797159e2e329845e0b94e3e0f780a0 | <ide><path>daemon/config.go
<ide> type Config struct {
<ide> Mirrors []string
<ide> EnableIptables bool
<ide> EnableIpForward bool
<add> EnableIpMasq bool
<ide> DefaultIp net.IP
<ide> BridgeIface string
<ide> BridgeIP string
<ide> func (config *Config) InstallFlags() {
<ide> flag.BoolVar(&config.AutoRestart, []string{"#r", "#-restart"}, true, "--restart on the daemon has been deprecated in favor of --restart policies on docker run")
<ide> flag.BoolVar(&config.EnableIptables, []string{"#iptables", "-iptables"}, true, "Enable Docker's addition of iptables rules")
<ide> flag.BoolVar(&config.EnableIpForward, []string{"#ip-forward", "-ip-forward"}, true, "Enable net.ipv4.ip_forward")
<add> flag.BoolVar(&config.EnableIpMasq, []string{"-ip-masq"}, true, "Enable IP masquerading for bridge's IP range")
<ide> flag.StringVar(&config.BridgeIP, []string{"#bip", "-bip"}, "", "Use this CIDR notation address for the network bridge's IP, not compatible with -b")
<ide> flag.StringVar(&config.BridgeIface, []string{"b", "-bridge"}, "", "Attach containers to a pre-existing network bridge\nuse 'none' to disable container networking")
<ide> flag.BoolVar(&config.InterContainerCommunication, []string{"#icc", "-icc"}, true, "Enable inter-container communication")
<ide><path>daemon/daemon.go
<ide> func NewDaemonFromDirectory(config *Config, eng *engine.Engine) (*Daemon, error)
<ide> if !config.EnableIptables && !config.InterContainerCommunication {
<ide> return nil, fmt.Errorf("You specified --iptables=false with --icc=false. ICC uses iptables to function. Please set --icc or --iptables to true.")
<ide> }
<add> if !config.EnableIptables && config.EnableIpMasq {
<add> return nil, fmt.Errorf("You specified --iptables=false with --ipmasq=true. IP masquerading uses iptables to function. Please set --ipmasq to false or --iptables to true.")
<add> }
<ide> config.DisableNetwork = config.BridgeIface == disableNetworkBridge
<ide>
<ide> // Claim the pidfile first, to avoid any and all unexpected race conditions.
<ide> func NewDaemonFromDirectory(config *Config, eng *engine.Engine) (*Daemon, error)
<ide> job.SetenvBool("EnableIptables", config.EnableIptables)
<ide> job.SetenvBool("InterContainerCommunication", config.InterContainerCommunication)
<ide> job.SetenvBool("EnableIpForward", config.EnableIpForward)
<add> job.SetenvBool("EnableIpMasq", config.EnableIpMasq)
<ide> job.Setenv("BridgeIface", config.BridgeIface)
<ide> job.Setenv("BridgeIP", config.BridgeIP)
<ide> job.Setenv("DefaultBindingIP", config.DefaultIp.String())
<ide><path>daemon/networkdriver/bridge/driver.go
<ide> func InitDriver(job *engine.Job) engine.Status {
<ide> network *net.IPNet
<ide> enableIPTables = job.GetenvBool("EnableIptables")
<ide> icc = job.GetenvBool("InterContainerCommunication")
<add> ipMasq = job.GetenvBool("EnableIpMasq")
<ide> ipForward = job.GetenvBool("EnableIpForward")
<ide> bridgeIP = job.Getenv("BridgeIP")
<ide> )
<ide> func InitDriver(job *engine.Job) engine.Status {
<ide>
<ide> // Configure iptables for link support
<ide> if enableIPTables {
<del> if err := setupIPTables(addr, icc); err != nil {
<add> if err := setupIPTables(addr, icc, ipMasq); err != nil {
<ide> return job.Error(err)
<ide> }
<ide> }
<ide> func InitDriver(job *engine.Job) engine.Status {
<ide> return engine.StatusOK
<ide> }
<ide>
<del>func setupIPTables(addr net.Addr, icc bool) error {
<add>func setupIPTables(addr net.Addr, icc, ipmasq bool) error {
<ide> // Enable NAT
<del> natArgs := []string{"POSTROUTING", "-t", "nat", "-s", addr.String(), "!", "-o", bridgeIface, "-j", "MASQUERADE"}
<ide>
<del> if !iptables.Exists(natArgs...) {
<del> if output, err := iptables.Raw(append([]string{"-I"}, natArgs...)...); err != nil {
<del> return fmt.Errorf("Unable to enable network bridge NAT: %s", err)
<del> } else if len(output) != 0 {
<del> return fmt.Errorf("Error iptables postrouting: %s", output)
<add> if ipmasq {
<add> natArgs := []string{"POSTROUTING", "-t", "nat", "-s", addr.String(), "!", "-o", bridgeIface, "-j", "MASQUERADE"}
<add>
<add> if !iptables.Exists(natArgs...) {
<add> if output, err := iptables.Raw(append([]string{"-I"}, natArgs...)...); err != nil {
<add> return fmt.Errorf("Unable to enable network bridge NAT: %s", err)
<add> } else if len(output) != 0 {
<add> return fmt.Errorf("Error iptables postrouting: %s", output)
<add> }
<ide> }
<ide> }
<ide>
<ide><path>docs/man/docker.1.md
<ide> unix://[/path/to/socket] to use.
<ide> **--ip**=""
<ide> Default IP address to use when binding container ports. Default is `0.0.0.0`.
<ide>
<add>**--ip-masq**=*true*|*false*
<add> Enable IP masquerading for bridge's IP range. Default is true.
<add>
<ide> **--iptables**=*true*|*false*
<ide> Disable Docker's addition of iptables rules. Default is true.
<ide>
<ide><path>docs/sources/reference/commandline/cli.md
<ide> expect an integer, and they can only be specified once.
<ide> --icc=true Enable inter-container communication
<ide> --ip=0.0.0.0 Default IP address to use when binding container ports
<ide> --ip-forward=true Enable net.ipv4.ip_forward
<add> --ip-masq=true Enable IP masquerading for bridge's IP range.
<ide> --iptables=true Enable Docker's addition of iptables rules
<ide> --mtu=0 Set the containers network MTU
<ide> if no value is provided: default to the default route MTU or 1500 if no default route is available
<ide> the `-H` flag for the client.
<ide> $ sudo docker ps
<ide> # both are equal
<ide>
<add>IP masquerading uses address translation to allow containers without a public IP to talk
<add>to other machines on the Internet. This may interfere with some network topologies and
<add>can be disabled with --ip-masq=false.
<add>
<ide> To run the daemon with [systemd socket activation](
<ide> http://0pointer.de/blog/projects/socket-activation.html), use
<ide> `docker -d -H fd://`. Using `fd://` will work perfectly for most setups but | 5 |
Javascript | Javascript | collect more information on debug t48643168 | fb90f64d9c5ead26e243a9a7bae80ba5487d1cff | <ide><path>Libraries/Renderer/shims/ReactNativeViewConfigRegistry.js
<ide> exports.register = function(name: string, callback: ViewConfigGetter): string {
<ide> 'Tried to register two views with the same name %s',
<ide> name,
<ide> );
<add> invariant(
<add> typeof callback === 'function',
<add> 'View config getter callback for component `%s` must be a function (received `%s`)',
<add> name,
<add> callback === null ? 'null' : typeof callback,
<add> );
<ide> viewConfigCallbacks.set(name, callback);
<ide> return name;
<ide> };
<ide> exports.get = function(name: string): ReactNativeBaseComponentViewConfig<> {
<ide> if (typeof callback !== 'function') {
<ide> invariant(
<ide> false,
<del> 'View config not found for name %s.%s',
<add> 'View config getter callback for component `%s` must be a function (received `%s`).%s',
<ide> name,
<add> callback === null ? 'null' : typeof callback,
<ide> typeof name[0] === 'string' && /[a-z]/.test(name[0])
<ide> ? ' Make sure to start component names with a capital letter.'
<ide> : '',
<ide> );
<ide> }
<del> viewConfigCallbacks.set(name, null);
<ide> viewConfig = callback();
<ide> processEventTypes(viewConfig);
<ide> viewConfigs.set(name, viewConfig);
<add>
<add> // Clear the callback after the config is set so that
<add> // we don't mask any errors during registration.
<add> viewConfigCallbacks.set(name, null);
<ide> } else {
<ide> viewConfig = viewConfigs.get(name);
<ide> } | 1 |
Python | Python | remove celery from intersphinx mapping temporarily | e6ed2f28f4c0857dea9660e2758b171212794958 | <ide><path>docs/conf.py
<ide> def _get_params(root_schema: dict, prefix: str = "", default_section: str = "")
<ide> pkg_name: (f"{THIRD_PARTY_INDEXES[pkg_name]}/", (f'{INVENTORY_CACHE_DIR}/{pkg_name}/objects.inv',))
<ide> for pkg_name in [
<ide> 'boto3',
<del> 'celery',
<add> # 'celery', # Temporarily remove celery as it disappeared from Internet
<ide> 'docker',
<ide> 'hdfs',
<ide> 'jinja2', | 1 |
Javascript | Javascript | ignore negative areas in treemap layout | ae6ce3f37326d7eac8cee42f6e814b5c85e75baf | <ide><path>d3.layout.js
<ide> d3.layout.treemap = function() {
<ide> row.area = 0;
<ide> while ((n = children.length) > 0) {
<ide> child = children[n - 1];
<del> if (!child.area) {
<add> if (isNaN(child.area) || child.area <= 0) {
<ide> children.pop();
<ide> continue;
<ide> }
<ide><path>d3.layout.min.js
<del>(function(){function bh(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function bg(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function bf(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function be(a,b){function c(a,d){var e=a.children;if(e){var f,g=null,h=-1,i=e.length;while(++h<i)f=e[h],c(f,g),g=f}b(a,d)}c(a,null)}function bd(a,b){return a.depth-b.depth}function bc(a,b){return b.x-a.x}function bb(a,b){return a.x-b.x}function ba(a,b){var c=a.children;if(c){var d,e=c.length,f=-1;while(++f<e)b(d=ba(c[f],b),a)>0&&(a=d)}return a}function _(a){return a.children?a.children[a.children.length-1]:a._tree.thread}function $(a){return a.children?a.children[0]:a._tree.thread}function Z(a,b){return a.parent==b.parent?1:2}function Y(a){var b=a.children;return b?Y(b[b.length-1]):a}function X(a){var b=a.children;return b?X(b[0]):a}function W(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function V(a){return 1+d3.max(a,function(a){return a.y})}function U(a,b,c){var d=b.r+c.r,e=a.r+c.r,f=b.x-a.x,g=b.y-a.y,h=Math.sqrt(f*f+g*g),i=(e*e+h*h-d*d)/(2*e*h),j=Math.acos(i),k=i*e,l=Math.sin(j)*e;f/=h,g/=h,c.x=a.x+k*f+l*g,c.y=a.y+k*g-l*f}function T(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f<g)T(e[f],b,c,d)}}function S(a){var b=a.children;b?(b.forEach(S),a.r=P(b)):a.r=Math.sqrt(a.value)}function R(a){delete a._pack_next,delete a._pack_prev}function Q(a){a._pack_next=a._pack_prev=a}function P(a){function l(a){b=Math.min(a.x-a.r,b),c=Math.max(a.x+a.r,c),d=Math.min(a.y-a.r,d),e=Math.max(a.y+a.r,e)}var b=Infinity,c=-Infinity,d=Infinity,e=-Infinity,f=a.length,g,h,i,j,k;a.forEach(Q),g=a[0],g.x=-g.r,g.y=0,l(g);if(f>1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],U(g,h,i),l(i),M(g,i),g._pack_prev=i,M(i,h),h=g._pack_next;for(var m=3;m<f;m++){U(g,h,i=a[m]);var n=0,o=1,p=1;for(j=h._pack_next;j!==h;j=j._pack_next,o++)if(O(j,i)){n=1;break}if(n==1)for(k=g._pack_prev;k!==j._pack_prev;k=k._pack_prev,p++)if(O(k,i)){p<o&&(n=-1,j=k);break}n==0?(M(g,i),h=i,l(i)):n>0?(N(g,j),h=j,m--):(N(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m<f;m++){var t=a[m];t.x-=q,t.y-=r,s=Math.max(s,t.r+Math.sqrt(t.x*t.x+t.y*t.y))}a.forEach(R);return s}function O(a,b){var c=b.x-a.x,d=b.y-a.y,e=a.r+b.r;return e*e-c*c-d*d>.001}function N(a,b){a._pack_next=b,b._pack_prev=a}function M(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function L(a,b){return a.value-b.value}function J(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function I(a,b){return b.value-a.value}function H(a){return a.value}function G(a){return a.children}function F(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=J,a.value=d3.rebind(a,b.value),a.nodes=function(b){K=!0;return(a.nodes=a)(b)};return a}function E(a){return[d3.min(a),d3.max(a)]}function D(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function C(a,b){return D(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function B(a,b){return a+b[1]}function A(a){return a.reduce(B,0)}function z(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;b<f;++b)(e=a[b][1])>d&&(c=b,d=e);return c}function w(a,b,c){a.y0=b,a.y=c}function v(a){return a.y}function u(a){return a.x}function t(a){return 1}function s(a){return 20}function r(a){var b=0,c=0;a.count=0,a.leaf||a.nodes.forEach(function(d){r(d),a.count+=d.count,b+=d.count*d.cx,c+=d.count*d.cy}),a.point&&(a.leaf||(a.point.x+=Math.random()-.5,a.point.y+=Math.random()-.5),a.count++,b+=a.point.x,c+=a.point.y),a.cx=b/a.count,a.cy=c/a.count}function q(){d3.event.stopPropagation(),d3.event.preventDefault()}function p(){i&&(q(),i=!1)}function o(){!f||(g&&(i=!0,q()),d3.event.type==="mouseup"&&n(),f.fixed=!1,e=h=f=j=null)}function n(){if(!!f){var a=j.parentNode;if(!a){f.fixed=!1,h=f=j=null;return}var b=m(a);g=!0,f.px=b[0]-h[0],f.py=b[1]-h[1],q(),e.resume()}}function m(a){return d3.event.touches?d3.svg.touches(a)[0]:d3.svg.mouse(a)}function l(a){a!==f&&(a.fixed=!1)}function k(a){a.fixed=!0}function c(a,c){if(a===c)return a;var d=b(a),e=b(c),f=d.pop(),g=e.pop(),h=null;while(f===g)h=f,f=d.pop(),g=e.pop();return h}function b(a){var b=[],c=a.parent;while(c!=null)b.push(a),a=c,c=c.parent;b.push(a);return b}function a(a){var b=a.source,d=a.target,e=c(b,d),f=[b];while(b!==e)b=b.parent,f.push(b);var g=f.length;while(d!==e)f.splice(g,0,d),d=d.parent;return f}d3.layout={},d3.layout.bundle=function(){return function(b){var c=[],d=-1,e=b.length;while(++d<e)c.push(a(b[d]));return c}},d3.layout.chord=function(){function k(){b.sort(function(a,b){return i(a.target.value,b.target.value)})}function j(){var a={},j=[],l=d3.range(e),m=[],n,o,p,q,r;b=[],c=[],n=0,q=-1;while(++q<e){o=0,r=-1;while(++r<e)o+=d[q][r];j.push(o),m.push(d3.range(e)),n+=o}g&&l.sort(function(a,b){return g(j[a],j[b])}),h&&m.forEach(function(a,b){a.sort(function(a,c){return h(d[b][a],d[b][c])})}),n=(2*Math.PI-f*e)/n,o=0,q=-1;while(++q<e){p=o,r=-1;while(++r<e){var s=l[q],t=m[q][r],u=d[s][t];a[s+"-"+t]={index:s,subindex:t,startAngle:o,endAngle:o+=u*n,value:u}}c.push({index:s,startAngle:p,endAngle:o,value:(o-p)/n}),o+=f}q=-1;while(++q<e){r=q-1;while(++r<e){var v=a[q+"-"+r],w=a[r+"-"+q];(v.value||w.value)&&b.push(v.value<w.value?{source:w,target:v}:{source:v,target:w})}}i&&k()}var a={},b,c,d,e,f=0,g,h,i;a.matrix=function(f){if(!arguments.length)return d;e=(d=f)&&d.length,b=c=null;return a},a.padding=function(d){if(!arguments.length)return f;f=d,b=c=null;return a},a.sortGroups=function(d){if(!arguments.length)return g;g=d,b=c=null;return a},a.sortSubgroups=function(c){if(!arguments.length)return h;h=c,b=null;return a},a.sortChords=function(c){if(!arguments.length)return i;i=c,b&&k();return a},a.chords=function(){b||j();return b},a.groups=function(){c||j();return c};return a},d3.layout.force=function(){function G(b,c){var d=m(this.parentNode);(f=b).fixed=!0,g=!1,j=this,e=a,h=[d[0]-b.x,d[1]-b.y],q()}function F(){var a=A.length,e=B.length,f=d3.geom.quadtree(A),g,h,j,k,l,m,n;for(g=0;g<e;++g){h=B[g],j=h.source,k=h.target,m=k.x-j.x,n=k.y-j.y;if(l=m*m+n*n)l=d*D[g]*((l=Math.sqrt(l))-C[g])/l,m*=l,n*=l,k.x-=m,k.y-=n,j.x+=m,j.y+=n}var o=d*x;m=c[0]/2,n=c[1]/2,g=-1;while(++g<a)h=A[g],h.x+=(m-h.x)*o,h.y+=(n-h.y)*o;r(f);var p=d*w;g=-1;while(++g<a)f.visit(E(A[g],p));g=-1;while(++g<a)h=A[g],h.fixed?(h.x=h.px,h.y=h.py):(h.x-=(h.px-(h.px=h.x))*i,h.y-=(h.py-(h.py=h.y))*i);b.tick.dispatch({type:"tick",alpha:d});return(d*=.99)<.005}function E(a,b){return function(c,d,e,f,g){if(c.point!==a){var h=c.cx-a.x,i=c.cy-a.y,j=1/Math.sqrt(h*h+i*i);if((f-d)*j<y){var k=b*c.count*j*j;a.x+=h*k,a.y+=i*k;return!0}if(c.point&&isFinite(j)){var k=b*j*j;a.x+=h*k,a.y+=i*k}}}}var a={},b=d3.dispatch("tick"),c=[1,1],d,i=.9,u=s,v=t,w=-30,x=.1,y=.8,z,A=[],B=[],C,D;a.on=function(c,d){b[c].add(d);return a},a.nodes=function(b){if(!arguments.length)return A;A=b;return a},a.links=function(b){if(!arguments.length)return B;B=b;return a},a.size=function(b){if(!arguments.length)return c;c=b;return a},a.linkDistance=function(b){if(!arguments.length)return u;u=d3.functor(b);return a},a.distance=a.linkDistance,a.linkStrength=function(b){if(!arguments.length)return v;v=d3.functor(b);return a},a.friction=function(b){if(!arguments.length)return i;i=b;return a},a.charge=function(b){if(!arguments.length)return w;w=b;return a},a.gravity=function(b){if(!arguments.length)return x;x=b;return a},a.theta=function(b){if(!arguments.length)return y;y=b;return a},a.start=function(){function l(){if(!i){i=[];for(d=0;d<e;++d)i[d]=[];for(d=0;d<f;++d){var a=B[d];i[a.source.index].push(a.target),i[a.target.index].push(a.source)}}return i[b]}function k(a,c){var d=l(b),e=-1,f=d.length,g;while(++e<f)if(!isNaN(g=d[e][a]))return g;return Math.random()*c}var b,d,e=A.length,f=B.length,g=c[0],h=c[1],i,j;for(b=0;b<e;++b)(j=A[b]).index=b;C=[],D=[];for(b=0;b<f;++b)j=B[b],typeof j.source=="number"&&(j.source=A[j.source]),typeof j.target=="number"&&(j.target=A[j.target]),C[b]=u.call(this,j,b),D[b]=v.call(this,j,b);for(b=0;b<e;++b)j=A[b],isNaN(j.x)&&(j.x=k("x",g)),isNaN(j.y)&&(j.y=k("y",h)),isNaN(j.px)&&(j.px=j.x),isNaN(j.py)&&(j.py=j.y);return a.resume()},a.resume=function(){d=.1,d3.timer(F);return a},a.stop=function(){d=0;return a},a.drag=function(){this.on("mouseover.force",k).on("mouseout.force",l).on("mousedown.force",G).on("touchstart.force",G),d3.select(window).on("mousemove.force",n).on("touchmove.force",n).on("mouseup.force",o,!0).on("touchend.force",o,!0).on("click.force",p,!0);return a};return a};var e,f,g,h,i,j;d3.layout.partition=function(){function e(e,f){var g=a.call(this,e,f);c(g[0],0,b[0],b[1]/d(g[0]));return g}function d(a){var b=a.children,c=0;if(b){var e=-1,f=b.length;while(++e<f)c=Math.max(c,d(b[e]))}return 1+c}function c(a,b,d,e){var f=a.children;a.x=b,a.y=a.depth*e,a.dx=d,a.dy=e;if(f){var g=-1,h=f.length,i,j;d/=a.value;while(++g<h)c(i=f[g],b,j=i.value*d,e),b+=j}}var a=d3.layout.hierarchy(),b=[1,1];e.size=function(a){if(!arguments.length)return b;b=a;return e};return F(e,a)},d3.layout.pie=function(){function f(f,g){var h=+(typeof c=="function"?c.apply(this,arguments):c),i=(typeof e=="function"?e.apply(this,arguments):e)-c,j=d3.range(f.length);b!=null&&j.sort(function(a,c){return b(f[a],f[c])});var k=f.map(a);i/=k.reduce(function(a,b){return a+b},0);var l=j.map(function(a){return{value:d=k[a],startAngle:h,endAngle:h+=d*i}});return f.map(function(a,b){return l[j[b]]})}var a=Number,b=null,c=0,e=2*Math.PI;f.value=function(b){if(!arguments.length)return a;a=b;return f},f.sort=function(a){if(!arguments.length)return b;b=a;return f},f.startAngle=function(a){if(!arguments.length)return c;c=a;return f},f.endAngle=function(a){if(!arguments.length)return e;e=a;return f};return f},d3.layout.stack=function(){function g(h,i){var j=h.map(function(b,c){return a.call(g,b,c)}),k=j.map(function(a,b){return a.map(function(a,b){return[e.call(g,a,b),f.call(g,a,b)]})}),l=b.call(g,k,i);j=d3.permute(j,l),k=d3.permute(k,l);var m=c.call(g,k,i),n=j.length,o=j[0].length,p,q,r;for(q=0;q<o;++q){d.call(g,j[0][q],r=m[q],k[0][q][1]);for(p=1;p<n;++p)d.call(g,j[p][q],r+=k[p-1][q][1],k[p][q][1])}return h}var a=Object,b=x["default"],c=y.zero,d=w,e=u,f=v;g.values=function(b){if(!arguments.length)return a;a=b;return g},g.order=function(a){if(!arguments.length)return b;b=typeof a=="function"?a:x[a];return g},g.offset=function(a){if(!arguments.length)return c;c=typeof a=="function"?a:y[a];return g},g.x=function(a){if(!arguments.length)return e;e=a;return g},g.y=function(a){if(!arguments.length)return f;f=a;return g},g.out=function(a){if(!arguments.length)return d;d=a;return g};return g};var x={"inside-out":function(a){var b=a.length,c,d,e=a.map(z),f=a.map(A),g=d3.range(b).sort(function(a,b){return e[a]-e[b]}),h=0,i=0,j=[],k=[];for(c=0;c<b;++c)d=g[c],h<i?(h+=f[d],j.push(d)):(i+=f[d],k.push(d));return k.reverse().concat(j)},reverse:function(a){return d3.range(a.length).reverse()},"default":function(a){return d3.range(a.length)}},y={silhouette:function(a){var b=a.length,c=a[0].length,d=[],e=0,f,g,h,i=[];for(g=0;g<c;++g){for(f=0,h=0;f<b;f++)h+=a[f][g][1];h>e&&(e=h),d.push(h)}for(g=0;g<c;++g)i[g]=(e-d[g])/2;return i},wiggle:function(a){var b=a.length,c=a[0],d=c.length,e=0,f,g,h,i,j,k,l,m,n,o=[];o[0]=m=n=0;for(g=1;g<d;++g){for(f=0,i=0;f<b;++f)i+=a[f][g][1];for(f=0,j=0,l=c[g][0]-c[g-1][0];f<b;++f){for(h=0,k=(a[f][g][1]-a[f][g-1][1])/(2*l);h<f;++h)k+=(a[h][g][1]-a[h][g-1][1])/l;j+=k*a[f][g][1]}o[g]=m-=i?j/i*l:0,m<n&&(n=m)}for(g=0;g<d;++g)o[g]-=n;return o},expand:function(a){var b=a.length,c=a[0].length,d=1/b,e,f,g,h=[];for(f=0;f<c;++f){for(e=0,g=0;e<b;e++)g+=a[e][f][1];if(g)for(e=0;e<b;e++)a[e][f][1]/=g;else for(e=0;e<b;e++)a[e][f][1]=d}for(f=0;f<c;++f)h[f]=0;return h},zero:function(a){var b=-1,c=a[0].length,d=[];while(++b<c)d[b]=0;return d}};d3.layout.histogram=function(){function e(e,f){var g=[],h=e.map(b,this),i=c.call(this,h,f),j=d.call(this,i,h,f),k,f=-1,l=h.length,m=j.length-1,n=a?1/l:1,o;while(++f<m)k=g[f]=[],k.dx=j[f+1]-(k.x=j[f]),k.y=0;f=-1;while(++f<l)o=h[f],o>=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=E,d=C;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return D(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d){var h=-1,i=d.length,j=b+1;while(++h<i)e+=f(d[h],j)}else c&&(e=c.call(g,K?a:a.data,b));c&&(a.value=e);return e}function e(f,h,i){var j=b.call(g,f,h),k=K?f:{data:f};k.depth=h,i.push(k);if(j){var l=-1,m=j.length,n=k.children=[],o=0,p=h+1;while(++l<m)d=e(j[l],p,i),d.parent=k,n.push(d),o+=d.value;a&&n.sort(a),c&&(k.value=o)}else c&&(k.value=c.call(g,f,h));return k}var a=I,b=G,c=H;g.sort=function(b){if(!arguments.length)return a;a=b;return g},g.children=function(a){if(!arguments.length)return b;b=a;return g},g.value=function(a){if(!arguments.length)return c;c=a;return g},g.revalue=function(a){f(a,0);return a};return g};var K=!1;d3.layout.pack=function(){function c(c,d){var e=a.call(this,c,d),f=e[0];f.x=0,f.y=0,S(f);var g=b[0],h=b[1],i=1/Math.max(2*f.r/g,2*f.r/h);T(f,g/2,h/2,i);return e}var a=d3.layout.hierarchy().sort(L),b=[1,1];c.size=function(a){if(!arguments.length)return b;b=a;return c};return F(c,a)},d3.layout.cluster=function(){function d(d,e){var f=a.call(this,d,e),g=f[0],h,i=0,j,k;be(g,function(a){a.children?(a.x=W(a.children),a.y=V(a.children)):(a.x=h?i+=b(a,h):0,a.y=0,h=a)});var l=X(g),m=Y(g),n=l.x-b(l,m)/2,o=m.x+b(m,l)/2;be(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=(1-a.y/g.y)*c[1]});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=Z,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return F(d,a)},d3.layout.tree=function(){function d(d,e){function j(a,c,d){if(c){var e=a,f=a,g=c,h=a.parent.children[0],i=e._tree.mod,j=f._tree.mod,k=g._tree.mod,l=h._tree.mod,m;while(g=_(g),e=$(e),g&&e)h=$(h),f=_(f),f._tree.ancestor=a,m=g._tree.prelim+k-e._tree.prelim-i+b(g,e),m>0&&(bg(bh(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!_(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!$(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c){var d=-1,e=c.length;b+=a._tree.mod;while(++d<e)i(c[d],b)}}function h(a,c){var d=a.children,e=a._tree;if(d){var f=d.length,g=d[0],i,k=g,l,m=-1;while(++m<f)l=d[m],h(l,i),k=j(l,i,k),i=l;bf(a);var n=.5*(g._tree.prelim+l._tree.prelim);c?(e.prelim=c._tree.prelim+b(a,c),e.mod=e.prelim-n):e.prelim=n}else c&&(e.prelim=c._tree.prelim+b(a,c))}var f=a.call(this,d,e),g=f[0];be(g,function(a,b){a._tree={ancestor:a,prelim:0,mod:0,change:0,shift:0,number:b?b._tree.number+1:0}}),h(g),i(g,-g._tree.prelim);var k=ba(g,bc),l=ba(g,bb),m=ba(g,bd),n=k.x-b(k,l)/2,o=l.x+b(l,k)/2,p=m.depth||1;be(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=a.depth/p*c[1],delete a._tree});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=Z,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return F(d,a)},d3.layout.treemap=function(){function l(b){var f=e||a(b),j=f[0];j.x=0,j.y=0,j.dx=c[0],j.dy=c[1],e&&a.revalue(j),g(j,c[0]*c[1]/j.value),(e?i:h)(j),d&&(e=f);return f}function k(a,c,d,e){var f=-1,g=a.length,h=d.x,i=d.y,j=c?b(a.area/c):0,k;if(c==d.dx){if(e||j>d.dy)j=d.dy;while(++f<g)k=a[f],k.x=h,k.y=i,k.dy=j,h+=k.dx=b(k.area/j);k.z=!0,k.dx+=d.x+d.dx-h,d.y+=j,d.dy-=j}else{if(e||j>d.dx)j=d.dx;while(++f<g)k=a[f],k.x=h,k.y=i,k.dx=j,i+=k.dy=b(k.area/j);k.z=!1,k.dy+=d.y+d.dy-i,d.x+=j,d.dx-=j}}function j(a,b){var c=a.area,d,e=0,g=Infinity,h=-1,i=a.length;while(++h<i)d=a[h].area,d<g&&(g=d),d>e&&(e=d);c*=c,b*=b;return Math.max(b*e*f/c,c/(b*g*f))}function i(a){if(!!a.children){var b={x:a.x,y:a.y,dx:a.dx,dy:a.dy},c=a.children.slice(),d,e=[];e.area=0;while(d=c.pop())e.push(d),e.area+=d.area,d.z!=null&&(k(e,d.z?b.dx:b.dy,b,!c.length),e.length=e.area=0);a.children.forEach(i)}}function h(a){if(!!a.children){var b={x:a.x,y:a.y,dx:a.dx,dy:a.dy},c=[],d=a.children.slice(),e,f=Infinity,g,i=Math.min(b.dx,b.dy),l;c.area=0;while((l=d.length)>0){e=d[l-1];if(!e.area){d.pop();continue}c.push(e),c.area+=e.area,(g=j(c,i))<=f?(d.pop(),f=g):(c.area-=c.pop().area,k(c,i,b,!1),i=Math.min(b.dx,b.dy),c.length=c.area=0,f=Infinity)}c.length&&(k(c,i,b,!0),c.length=c.area=0),a.children.forEach(h)}}function g(a,b){var c=a.children;a.area=a.value*b;if(c){var d=-1,e=c.length;while(++d<e)g(c[d],b)}}var a=d3.layout.hierarchy(),b=Math.round,c=[1,1],d=!1,e,f=.5*(1+Math.sqrt(5));l.size=function(a){if(!arguments.length)return c;c=a;return l},l.round=function(a){if(!arguments.length)return b!=Number;b=a?Math.round:Number;return l},l.sticky=function(a){if(!arguments.length)return d;d=a,e=null;return l},l.ratio=function(a){if(!arguments.length)return f;f=a;return l};return F(l,a)}})()
<ide>\ No newline at end of file
<add>(function(){function bh(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function bg(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function bf(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function be(a,b){function c(a,d){var e=a.children;if(e){var f,g=null,h=-1,i=e.length;while(++h<i)f=e[h],c(f,g),g=f}b(a,d)}c(a,null)}function bd(a,b){return a.depth-b.depth}function bc(a,b){return b.x-a.x}function bb(a,b){return a.x-b.x}function ba(a,b){var c=a.children;if(c){var d,e=c.length,f=-1;while(++f<e)b(d=ba(c[f],b),a)>0&&(a=d)}return a}function _(a){return a.children?a.children[a.children.length-1]:a._tree.thread}function $(a){return a.children?a.children[0]:a._tree.thread}function Z(a,b){return a.parent==b.parent?1:2}function Y(a){var b=a.children;return b?Y(b[b.length-1]):a}function X(a){var b=a.children;return b?X(b[0]):a}function W(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function V(a){return 1+d3.max(a,function(a){return a.y})}function U(a,b,c){var d=b.r+c.r,e=a.r+c.r,f=b.x-a.x,g=b.y-a.y,h=Math.sqrt(f*f+g*g),i=(e*e+h*h-d*d)/(2*e*h),j=Math.acos(i),k=i*e,l=Math.sin(j)*e;f/=h,g/=h,c.x=a.x+k*f+l*g,c.y=a.y+k*g-l*f}function T(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f<g)T(e[f],b,c,d)}}function S(a){var b=a.children;b?(b.forEach(S),a.r=P(b)):a.r=Math.sqrt(a.value)}function R(a){delete a._pack_next,delete a._pack_prev}function Q(a){a._pack_next=a._pack_prev=a}function P(a){function l(a){b=Math.min(a.x-a.r,b),c=Math.max(a.x+a.r,c),d=Math.min(a.y-a.r,d),e=Math.max(a.y+a.r,e)}var b=Infinity,c=-Infinity,d=Infinity,e=-Infinity,f=a.length,g,h,i,j,k;a.forEach(Q),g=a[0],g.x=-g.r,g.y=0,l(g);if(f>1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],U(g,h,i),l(i),M(g,i),g._pack_prev=i,M(i,h),h=g._pack_next;for(var m=3;m<f;m++){U(g,h,i=a[m]);var n=0,o=1,p=1;for(j=h._pack_next;j!==h;j=j._pack_next,o++)if(O(j,i)){n=1;break}if(n==1)for(k=g._pack_prev;k!==j._pack_prev;k=k._pack_prev,p++)if(O(k,i)){p<o&&(n=-1,j=k);break}n==0?(M(g,i),h=i,l(i)):n>0?(N(g,j),h=j,m--):(N(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m<f;m++){var t=a[m];t.x-=q,t.y-=r,s=Math.max(s,t.r+Math.sqrt(t.x*t.x+t.y*t.y))}a.forEach(R);return s}function O(a,b){var c=b.x-a.x,d=b.y-a.y,e=a.r+b.r;return e*e-c*c-d*d>.001}function N(a,b){a._pack_next=b,b._pack_prev=a}function M(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function L(a,b){return a.value-b.value}function J(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function I(a,b){return b.value-a.value}function H(a){return a.value}function G(a){return a.children}function F(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=J,a.value=d3.rebind(a,b.value),a.nodes=function(b){K=!0;return(a.nodes=a)(b)};return a}function E(a){return[d3.min(a),d3.max(a)]}function D(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function C(a,b){return D(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function B(a,b){return a+b[1]}function A(a){return a.reduce(B,0)}function z(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;b<f;++b)(e=a[b][1])>d&&(c=b,d=e);return c}function w(a,b,c){a.y0=b,a.y=c}function v(a){return a.y}function u(a){return a.x}function t(a){return 1}function s(a){return 20}function r(a){var b=0,c=0;a.count=0,a.leaf||a.nodes.forEach(function(d){r(d),a.count+=d.count,b+=d.count*d.cx,c+=d.count*d.cy}),a.point&&(a.leaf||(a.point.x+=Math.random()-.5,a.point.y+=Math.random()-.5),a.count++,b+=a.point.x,c+=a.point.y),a.cx=b/a.count,a.cy=c/a.count}function q(){d3.event.stopPropagation(),d3.event.preventDefault()}function p(){i&&(q(),i=!1)}function o(){!f||(g&&(i=!0,q()),d3.event.type==="mouseup"&&n(),f.fixed=!1,e=h=f=j=null)}function n(){if(!!f){var a=j.parentNode;if(!a){f.fixed=!1,h=f=j=null;return}var b=m(a);g=!0,f.px=b[0]-h[0],f.py=b[1]-h[1],q(),e.resume()}}function m(a){return d3.event.touches?d3.svg.touches(a)[0]:d3.svg.mouse(a)}function l(a){a!==f&&(a.fixed=!1)}function k(a){a.fixed=!0}function c(a,c){if(a===c)return a;var d=b(a),e=b(c),f=d.pop(),g=e.pop(),h=null;while(f===g)h=f,f=d.pop(),g=e.pop();return h}function b(a){var b=[],c=a.parent;while(c!=null)b.push(a),a=c,c=c.parent;b.push(a);return b}function a(a){var b=a.source,d=a.target,e=c(b,d),f=[b];while(b!==e)b=b.parent,f.push(b);var g=f.length;while(d!==e)f.splice(g,0,d),d=d.parent;return f}d3.layout={},d3.layout.bundle=function(){return function(b){var c=[],d=-1,e=b.length;while(++d<e)c.push(a(b[d]));return c}},d3.layout.chord=function(){function k(){b.sort(function(a,b){return i(a.target.value,b.target.value)})}function j(){var a={},j=[],l=d3.range(e),m=[],n,o,p,q,r;b=[],c=[],n=0,q=-1;while(++q<e){o=0,r=-1;while(++r<e)o+=d[q][r];j.push(o),m.push(d3.range(e)),n+=o}g&&l.sort(function(a,b){return g(j[a],j[b])}),h&&m.forEach(function(a,b){a.sort(function(a,c){return h(d[b][a],d[b][c])})}),n=(2*Math.PI-f*e)/n,o=0,q=-1;while(++q<e){p=o,r=-1;while(++r<e){var s=l[q],t=m[q][r],u=d[s][t];a[s+"-"+t]={index:s,subindex:t,startAngle:o,endAngle:o+=u*n,value:u}}c.push({index:s,startAngle:p,endAngle:o,value:(o-p)/n}),o+=f}q=-1;while(++q<e){r=q-1;while(++r<e){var v=a[q+"-"+r],w=a[r+"-"+q];(v.value||w.value)&&b.push(v.value<w.value?{source:w,target:v}:{source:v,target:w})}}i&&k()}var a={},b,c,d,e,f=0,g,h,i;a.matrix=function(f){if(!arguments.length)return d;e=(d=f)&&d.length,b=c=null;return a},a.padding=function(d){if(!arguments.length)return f;f=d,b=c=null;return a},a.sortGroups=function(d){if(!arguments.length)return g;g=d,b=c=null;return a},a.sortSubgroups=function(c){if(!arguments.length)return h;h=c,b=null;return a},a.sortChords=function(c){if(!arguments.length)return i;i=c,b&&k();return a},a.chords=function(){b||j();return b},a.groups=function(){c||j();return c};return a},d3.layout.force=function(){function G(b,c){var d=m(this.parentNode);(f=b).fixed=!0,g=!1,j=this,e=a,h=[d[0]-b.x,d[1]-b.y],q()}function F(){var a=A.length,e=B.length,f=d3.geom.quadtree(A),g,h,j,k,l,m,n;for(g=0;g<e;++g){h=B[g],j=h.source,k=h.target,m=k.x-j.x,n=k.y-j.y;if(l=m*m+n*n)l=d*D[g]*((l=Math.sqrt(l))-C[g])/l,m*=l,n*=l,k.x-=m,k.y-=n,j.x+=m,j.y+=n}var o=d*x;m=c[0]/2,n=c[1]/2,g=-1;while(++g<a)h=A[g],h.x+=(m-h.x)*o,h.y+=(n-h.y)*o;r(f);var p=d*w;g=-1;while(++g<a)f.visit(E(A[g],p));g=-1;while(++g<a)h=A[g],h.fixed?(h.x=h.px,h.y=h.py):(h.x-=(h.px-(h.px=h.x))*i,h.y-=(h.py-(h.py=h.y))*i);b.tick.dispatch({type:"tick",alpha:d});return(d*=.99)<.005}function E(a,b){return function(c,d,e,f,g){if(c.point!==a){var h=c.cx-a.x,i=c.cy-a.y,j=1/Math.sqrt(h*h+i*i);if((f-d)*j<y){var k=b*c.count*j*j;a.x+=h*k,a.y+=i*k;return!0}if(c.point&&isFinite(j)){var k=b*j*j;a.x+=h*k,a.y+=i*k}}}}var a={},b=d3.dispatch("tick"),c=[1,1],d,i=.9,u=s,v=t,w=-30,x=.1,y=.8,z,A=[],B=[],C,D;a.on=function(c,d){b[c].add(d);return a},a.nodes=function(b){if(!arguments.length)return A;A=b;return a},a.links=function(b){if(!arguments.length)return B;B=b;return a},a.size=function(b){if(!arguments.length)return c;c=b;return a},a.linkDistance=function(b){if(!arguments.length)return u;u=d3.functor(b);return a},a.distance=a.linkDistance,a.linkStrength=function(b){if(!arguments.length)return v;v=d3.functor(b);return a},a.friction=function(b){if(!arguments.length)return i;i=b;return a},a.charge=function(b){if(!arguments.length)return w;w=b;return a},a.gravity=function(b){if(!arguments.length)return x;x=b;return a},a.theta=function(b){if(!arguments.length)return y;y=b;return a},a.start=function(){function l(){if(!i){i=[];for(d=0;d<e;++d)i[d]=[];for(d=0;d<f;++d){var a=B[d];i[a.source.index].push(a.target),i[a.target.index].push(a.source)}}return i[b]}function k(a,c){var d=l(b),e=-1,f=d.length,g;while(++e<f)if(!isNaN(g=d[e][a]))return g;return Math.random()*c}var b,d,e=A.length,f=B.length,g=c[0],h=c[1],i,j;for(b=0;b<e;++b)(j=A[b]).index=b;C=[],D=[];for(b=0;b<f;++b)j=B[b],typeof j.source=="number"&&(j.source=A[j.source]),typeof j.target=="number"&&(j.target=A[j.target]),C[b]=u.call(this,j,b),D[b]=v.call(this,j,b);for(b=0;b<e;++b)j=A[b],isNaN(j.x)&&(j.x=k("x",g)),isNaN(j.y)&&(j.y=k("y",h)),isNaN(j.px)&&(j.px=j.x),isNaN(j.py)&&(j.py=j.y);return a.resume()},a.resume=function(){d=.1,d3.timer(F);return a},a.stop=function(){d=0;return a},a.drag=function(){this.on("mouseover.force",k).on("mouseout.force",l).on("mousedown.force",G).on("touchstart.force",G),d3.select(window).on("mousemove.force",n).on("touchmove.force",n).on("mouseup.force",o,!0).on("touchend.force",o,!0).on("click.force",p,!0);return a};return a};var e,f,g,h,i,j;d3.layout.partition=function(){function e(e,f){var g=a.call(this,e,f);c(g[0],0,b[0],b[1]/d(g[0]));return g}function d(a){var b=a.children,c=0;if(b){var e=-1,f=b.length;while(++e<f)c=Math.max(c,d(b[e]))}return 1+c}function c(a,b,d,e){var f=a.children;a.x=b,a.y=a.depth*e,a.dx=d,a.dy=e;if(f){var g=-1,h=f.length,i,j;d/=a.value;while(++g<h)c(i=f[g],b,j=i.value*d,e),b+=j}}var a=d3.layout.hierarchy(),b=[1,1];e.size=function(a){if(!arguments.length)return b;b=a;return e};return F(e,a)},d3.layout.pie=function(){function f(f,g){var h=+(typeof c=="function"?c.apply(this,arguments):c),i=(typeof e=="function"?e.apply(this,arguments):e)-c,j=d3.range(f.length);b!=null&&j.sort(function(a,c){return b(f[a],f[c])});var k=f.map(a);i/=k.reduce(function(a,b){return a+b},0);var l=j.map(function(a){return{value:d=k[a],startAngle:h,endAngle:h+=d*i}});return f.map(function(a,b){return l[j[b]]})}var a=Number,b=null,c=0,e=2*Math.PI;f.value=function(b){if(!arguments.length)return a;a=b;return f},f.sort=function(a){if(!arguments.length)return b;b=a;return f},f.startAngle=function(a){if(!arguments.length)return c;c=a;return f},f.endAngle=function(a){if(!arguments.length)return e;e=a;return f};return f},d3.layout.stack=function(){function g(h,i){var j=h.map(function(b,c){return a.call(g,b,c)}),k=j.map(function(a,b){return a.map(function(a,b){return[e.call(g,a,b),f.call(g,a,b)]})}),l=b.call(g,k,i);j=d3.permute(j,l),k=d3.permute(k,l);var m=c.call(g,k,i),n=j.length,o=j[0].length,p,q,r;for(q=0;q<o;++q){d.call(g,j[0][q],r=m[q],k[0][q][1]);for(p=1;p<n;++p)d.call(g,j[p][q],r+=k[p-1][q][1],k[p][q][1])}return h}var a=Object,b=x["default"],c=y.zero,d=w,e=u,f=v;g.values=function(b){if(!arguments.length)return a;a=b;return g},g.order=function(a){if(!arguments.length)return b;b=typeof a=="function"?a:x[a];return g},g.offset=function(a){if(!arguments.length)return c;c=typeof a=="function"?a:y[a];return g},g.x=function(a){if(!arguments.length)return e;e=a;return g},g.y=function(a){if(!arguments.length)return f;f=a;return g},g.out=function(a){if(!arguments.length)return d;d=a;return g};return g};var x={"inside-out":function(a){var b=a.length,c,d,e=a.map(z),f=a.map(A),g=d3.range(b).sort(function(a,b){return e[a]-e[b]}),h=0,i=0,j=[],k=[];for(c=0;c<b;++c)d=g[c],h<i?(h+=f[d],j.push(d)):(i+=f[d],k.push(d));return k.reverse().concat(j)},reverse:function(a){return d3.range(a.length).reverse()},"default":function(a){return d3.range(a.length)}},y={silhouette:function(a){var b=a.length,c=a[0].length,d=[],e=0,f,g,h,i=[];for(g=0;g<c;++g){for(f=0,h=0;f<b;f++)h+=a[f][g][1];h>e&&(e=h),d.push(h)}for(g=0;g<c;++g)i[g]=(e-d[g])/2;return i},wiggle:function(a){var b=a.length,c=a[0],d=c.length,e=0,f,g,h,i,j,k,l,m,n,o=[];o[0]=m=n=0;for(g=1;g<d;++g){for(f=0,i=0;f<b;++f)i+=a[f][g][1];for(f=0,j=0,l=c[g][0]-c[g-1][0];f<b;++f){for(h=0,k=(a[f][g][1]-a[f][g-1][1])/(2*l);h<f;++h)k+=(a[h][g][1]-a[h][g-1][1])/l;j+=k*a[f][g][1]}o[g]=m-=i?j/i*l:0,m<n&&(n=m)}for(g=0;g<d;++g)o[g]-=n;return o},expand:function(a){var b=a.length,c=a[0].length,d=1/b,e,f,g,h=[];for(f=0;f<c;++f){for(e=0,g=0;e<b;e++)g+=a[e][f][1];if(g)for(e=0;e<b;e++)a[e][f][1]/=g;else for(e=0;e<b;e++)a[e][f][1]=d}for(f=0;f<c;++f)h[f]=0;return h},zero:function(a){var b=-1,c=a[0].length,d=[];while(++b<c)d[b]=0;return d}};d3.layout.histogram=function(){function e(e,f){var g=[],h=e.map(b,this),i=c.call(this,h,f),j=d.call(this,i,h,f),k,f=-1,l=h.length,m=j.length-1,n=a?1/l:1,o;while(++f<m)k=g[f]=[],k.dx=j[f+1]-(k.x=j[f]),k.y=0;f=-1;while(++f<l)o=h[f],o>=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=E,d=C;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return D(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d){var h=-1,i=d.length,j=b+1;while(++h<i)e+=f(d[h],j)}else c&&(e=c.call(g,K?a:a.data,b));c&&(a.value=e);return e}function e(f,h,i){var j=b.call(g,f,h),k=K?f:{data:f};k.depth=h,i.push(k);if(j){var l=-1,m=j.length,n=k.children=[],o=0,p=h+1;while(++l<m)d=e(j[l],p,i),d.parent=k,n.push(d),o+=d.value;a&&n.sort(a),c&&(k.value=o)}else c&&(k.value=c.call(g,f,h));return k}var a=I,b=G,c=H;g.sort=function(b){if(!arguments.length)return a;a=b;return g},g.children=function(a){if(!arguments.length)return b;b=a;return g},g.value=function(a){if(!arguments.length)return c;c=a;return g},g.revalue=function(a){f(a,0);return a};return g};var K=!1;d3.layout.pack=function(){function c(c,d){var e=a.call(this,c,d),f=e[0];f.x=0,f.y=0,S(f);var g=b[0],h=b[1],i=1/Math.max(2*f.r/g,2*f.r/h);T(f,g/2,h/2,i);return e}var a=d3.layout.hierarchy().sort(L),b=[1,1];c.size=function(a){if(!arguments.length)return b;b=a;return c};return F(c,a)},d3.layout.cluster=function(){function d(d,e){var f=a.call(this,d,e),g=f[0],h,i=0,j,k;be(g,function(a){a.children?(a.x=W(a.children),a.y=V(a.children)):(a.x=h?i+=b(a,h):0,a.y=0,h=a)});var l=X(g),m=Y(g),n=l.x-b(l,m)/2,o=m.x+b(m,l)/2;be(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=(1-a.y/g.y)*c[1]});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=Z,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return F(d,a)},d3.layout.tree=function(){function d(d,e){function j(a,c,d){if(c){var e=a,f=a,g=c,h=a.parent.children[0],i=e._tree.mod,j=f._tree.mod,k=g._tree.mod,l=h._tree.mod,m;while(g=_(g),e=$(e),g&&e)h=$(h),f=_(f),f._tree.ancestor=a,m=g._tree.prelim+k-e._tree.prelim-i+b(g,e),m>0&&(bg(bh(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!_(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!$(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c){var d=-1,e=c.length;b+=a._tree.mod;while(++d<e)i(c[d],b)}}function h(a,c){var d=a.children,e=a._tree;if(d){var f=d.length,g=d[0],i,k=g,l,m=-1;while(++m<f)l=d[m],h(l,i),k=j(l,i,k),i=l;bf(a);var n=.5*(g._tree.prelim+l._tree.prelim);c?(e.prelim=c._tree.prelim+b(a,c),e.mod=e.prelim-n):e.prelim=n}else c&&(e.prelim=c._tree.prelim+b(a,c))}var f=a.call(this,d,e),g=f[0];be(g,function(a,b){a._tree={ancestor:a,prelim:0,mod:0,change:0,shift:0,number:b?b._tree.number+1:0}}),h(g),i(g,-g._tree.prelim);var k=ba(g,bc),l=ba(g,bb),m=ba(g,bd),n=k.x-b(k,l)/2,o=l.x+b(l,k)/2,p=m.depth||1;be(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=a.depth/p*c[1],delete a._tree});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=Z,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return F(d,a)},d3.layout.treemap=function(){function l(b){var f=e||a(b),j=f[0];j.x=0,j.y=0,j.dx=c[0],j.dy=c[1],e&&a.revalue(j),g(j,c[0]*c[1]/j.value),(e?i:h)(j),d&&(e=f);return f}function k(a,c,d,e){var f=-1,g=a.length,h=d.x,i=d.y,j=c?b(a.area/c):0,k;if(c==d.dx){if(e||j>d.dy)j=d.dy;while(++f<g)k=a[f],k.x=h,k.y=i,k.dy=j,h+=k.dx=b(k.area/j);k.z=!0,k.dx+=d.x+d.dx-h,d.y+=j,d.dy-=j}else{if(e||j>d.dx)j=d.dx;while(++f<g)k=a[f],k.x=h,k.y=i,k.dx=j,i+=k.dy=b(k.area/j);k.z=!1,k.dy+=d.y+d.dy-i,d.x+=j,d.dx-=j}}function j(a,b){var c=a.area,d,e=0,g=Infinity,h=-1,i=a.length;while(++h<i)d=a[h].area,d<g&&(g=d),d>e&&(e=d);c*=c,b*=b;return Math.max(b*e*f/c,c/(b*g*f))}function i(a){if(!!a.children){var b={x:a.x,y:a.y,dx:a.dx,dy:a.dy},c=a.children.slice(),d,e=[];e.area=0;while(d=c.pop())e.push(d),e.area+=d.area,d.z!=null&&(k(e,d.z?b.dx:b.dy,b,!c.length),e.length=e.area=0);a.children.forEach(i)}}function h(a){if(!!a.children){var b={x:a.x,y:a.y,dx:a.dx,dy:a.dy},c=[],d=a.children.slice(),e,f=Infinity,g,i=Math.min(b.dx,b.dy),l;c.area=0;while((l=d.length)>0){e=d[l-1];if(isNaN(e.area)||e.area<=0){d.pop();continue}c.push(e),c.area+=e.area,(g=j(c,i))<=f?(d.pop(),f=g):(c.area-=c.pop().area,k(c,i,b,!1),i=Math.min(b.dx,b.dy),c.length=c.area=0,f=Infinity)}c.length&&(k(c,i,b,!0),c.length=c.area=0),a.children.forEach(h)}}function g(a,b){var c=a.children;a.area=a.value*b;if(c){var d=-1,e=c.length;while(++d<e)g(c[d],b)}}var a=d3.layout.hierarchy(),b=Math.round,c=[1,1],d=!1,e,f=.5*(1+Math.sqrt(5));l.size=function(a){if(!arguments.length)return c;c=a;return l},l.round=function(a){if(!arguments.length)return b!=Number;b=a?Math.round:Number;return l},l.sticky=function(a){if(!arguments.length)return d;d=a,e=null;return l},l.ratio=function(a){if(!arguments.length)return f;f=a;return l};return F(l,a)}})()
<ide>\ No newline at end of file
<ide><path>src/layout/treemap.js
<ide> d3.layout.treemap = function() {
<ide> row.area = 0;
<ide> while ((n = children.length) > 0) {
<ide> child = children[n - 1];
<del> if (!child.area) {
<add> if (isNaN(child.area) || child.area <= 0) {
<ide> children.pop();
<ide> continue;
<ide> } | 3 |
Python | Python | fix typo in `automodelformaskedlm` docs | e477eb919f675edf2c2bdc35ce67cdbd880ebb5b | <ide><path>src/transformers/modeling_auto.py
<ide> class AutoModelForMaskedLM:
<ide> This is a generic model class that will be instantiated as one of the model classes of the library---with a masked
<ide> language modeling head---when created with the when created with the
<ide> :meth:`~transformers.AutoModelForMaskedLM.from_pretrained` class method or the
<del> :meth:`~transformers.AutoModelForMasedLM.from_config` class method.
<add> :meth:`~transformers.AutoModelForMaskedLM.from_config` class method.
<ide>
<ide> This class cannot be instantiated directly using ``__init__()`` (throws an error).
<ide> """ | 1 |
Python | Python | add log_steps with faster logging for 8xgpu. | 8390b3622284499df8f94f12ce27564b92c9c7c0 | <ide><path>official/staging/shakespeare/shakespeare_benchmark.py
<ide> def __init__(self, output_dir=None, root_data_dir=None, **kwargs):
<ide> def_flags['training_data'] = self.train_data
<ide> def_flags['model_dir'] = ''
<ide> def_flags['train_epochs'] = 4
<add> def_flags['log_steps'] = 50
<ide>
<ide> super(ShakespeareKerasBenchmarkReal, self).__init__(
<ide> output_dir=output_dir,
<ide> def benchmark_8_gpu(self):
<ide> self._setup()
<ide> FLAGS.num_gpus = 8
<ide> FLAGS.batch_size = 64 * 8
<add> FLAGS.log_steps = 10
<ide> self._run_and_report_benchmark()
<ide>
<ide> def benchmark_xla_8_gpu(self):
<ide> """Benchmark 8 gpu w/xla."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 1
<ide> FLAGS.batch_size = 64 * 8
<add> FLAGS.log_steps = 10
<ide> FLAGS.enable_xla = True
<ide> self._run_and_report_benchmark()
<ide>
<ide> def _run_and_report_benchmark(self):
<ide> """Run and report benchmark."""
<ide> super(ShakespeareKerasBenchmarkReal, self)._run_and_report_benchmark(
<del> top_1_train_min=None)
<add> top_1_train_min=None, log_steps=FLAGS.log_steps)
<ide><path>official/staging/shakespeare/shakespeare_main.py
<ide> def define_flags():
<ide> flags.DEFINE_integer(
<ide> name='predict_length', default=1000,
<ide> help='Length of the predicted text including the context.')
<add> flags.DEFINE_integer(
<add> name='log_steps', default=100,
<add> help='For every log_steps, we log the timing information such as '
<add> 'examples per second.')
<ide> flags.DEFINE_string(
<ide> name='training_data', default=None,
<ide> help='Path to file containing the training data.')
<ide> def train_model(flags_obj, dataset, vocab_size, strategy, checkpoint_dir=None):
<ide> filepath=checkpoint_prefix,
<ide> save_weights_only=True)
<ide> callbacks.append(checkpoint_callback)
<del> time_callback = keras_utils.TimeHistory(flags_obj.batch_size, 100)
<add> time_callback = keras_utils.TimeHistory(flags_obj.batch_size,
<add> flags_obj.log_steps)
<ide> callbacks.append(time_callback)
<ide> history = model.fit(dataset,
<ide> epochs=flags_obj.train_epochs, | 2 |
Javascript | Javascript | add mozcontral options | 6aa161566ee3a27b38b8b25242db6993c0f5414d | <ide><path>make.js
<ide> var EXTENSION_WEB_FILES =
<ide> 'web/viewer.js',
<ide> 'web/viewer.html',
<ide> 'external/webL10n/l10n.js',
<del> 'web/locale.properties',
<ide> 'web/viewer-production.html'],
<ide> EXTENSION_BASE_VERSION = 'f0f0418a9c6637981fe1182b9212c2d592774c7d',
<ide> EXTENSION_VERSION_PREFIX = '0.3.',
<ide> target.firefox = function() {
<ide> FIREFOX_EXTENSION_FILES_TO_COPY =
<ide> ['*.js',
<ide> '*.rdf',
<add> '*.svg',
<ide> '*.png',
<del> 'install.rdf.in',
<del> 'README.mozilla',
<ide> 'components',
<del> '../../LICENSE'];
<add> '../../LICENSE'],
<ide> FIREFOX_EXTENSION_FILES =
<ide> ['bootstrap.js',
<ide> 'install.rdf',
<ide> 'icon.png',
<ide> 'icon64.png',
<ide> 'components',
<ide> 'content',
<del> 'LICENSE'];
<del> FIREFOX_MC_EXTENSION_FILES =
<del> ['bootstrap.js',
<del> 'icon.png',
<del> 'icon64.png',
<del> 'components',
<del> 'content',
<del> 'LICENSE'];
<add> 'LICENSE'],
<ide> FIREFOX_EXTENSION_NAME = 'pdf.js.xpi',
<ide> FIREFOX_AMO_EXTENSION_NAME = 'pdf.js.amo.xpi';
<ide>
<ide> target.firefox = function() {
<ide> // Copy a standalone version of pdf.js inside the content directory
<ide> cp(BUILD_TARGET, FIREFOX_BUILD_CONTENT_DIR + BUILD_DIR);
<ide> cp('-R', EXTENSION_WEB_FILES, FIREFOX_BUILD_CONTENT_DIR + '/web');
<add> cp('web/locale.properties', FIREFOX_BUILD_CONTENT_DIR + '/web');
<ide> rm(FIREFOX_BUILD_CONTENT_DIR + '/web/viewer-production.html');
<ide>
<ide> // Copy over the firefox extension snippet so we can inline pdf.js in it
<ide> target.firefox = function() {
<ide> // Update the build version number
<ide> sed('-i', /PDFJSSCRIPT_VERSION/, EXTENSION_VERSION, FIREFOX_BUILD_DIR + '/install.rdf');
<ide> sed('-i', /PDFJSSCRIPT_VERSION/, EXTENSION_VERSION, FIREFOX_BUILD_DIR + '/update.rdf');
<del> sed('-i', /PDFJSSCRIPT_VERSION/, EXTENSION_VERSION, FIREFOX_BUILD_DIR + '/install.rdf.in');
<del> sed('-i', /PDFJSSCRIPT_VERSION/, EXTENSION_VERSION, FIREFOX_BUILD_DIR + '/README.mozilla');
<ide>
<ide> // Update localized metadata
<ide> var localizedMetadata = cat(EXTENSION_SRC_DIR + '/firefox/metadata.inc');
<ide> sed('-i', /.*PDFJS_LOCALIZED_METADATA.*\n/, localizedMetadata, FIREFOX_BUILD_DIR + '/install.rdf');
<del> sed('-i', /.*PDFJS_LOCALIZED_METADATA.*\n/, localizedMetadata, FIREFOX_BUILD_DIR + '/install.rdf.in');
<ide>
<ide> // Create the xpi
<ide> cd(FIREFOX_BUILD_DIR);
<ide> target.firefox = function() {
<ide> exec('zip -r ' + FIREFOX_AMO_EXTENSION_NAME + ' ' + FIREFOX_EXTENSION_FILES.join(' '));
<ide> echo('AMO extension created: ' + FIREFOX_AMO_EXTENSION_NAME);
<ide> cd(ROOT_DIR);
<add>};
<add>
<add>//
<add>// make mozcentral
<add>//
<add>target.mozcentral = function() {
<add> cd(ROOT_DIR);
<add> echo();
<add> echo('### Building mozilla-central extension');
<add>
<add> var MOZCENTRAL_DIR = BUILD_DIR + '/mozcentral',
<add> MOZCENTRAL_CONTENT_DIR = MOZCENTRAL_DIR + '/content/',
<add> MOZCENTRAL_L10N_DIR = MOZCENTRAL_DIR + '/l10n/',
<add> FIREFOX_CONTENT_DIR = EXTENSION_SRC_DIR + '/firefox/content/',
<add> FIREFOX_EXTENSION_FILES_TO_COPY =
<add> ['*.js',
<add> '*.svg',
<add> '*.png',
<add> 'install.rdf.in',
<add> 'README.mozilla',
<add> 'components',
<add> '../../LICENSE'],
<add> DEFAULT_LOCALE_FILES =
<add> ['l10n/en-US/viewer.properties',
<add> 'l10n/en-US/metadata.inc'],
<add> FIREFOX_MC_EXTENSION_FILES =
<add> ['bootstrap.js',
<add> 'icon.png',
<add> 'icon64.png',
<add> 'components',
<add> 'content',
<add> 'LICENSE'];
<add>
<add> target.production();
<add> target.buildnumber();
<add> cd(ROOT_DIR);
<add>
<add> // Clear out everything in the firefox extension build directory
<add> rm('-rf', MOZCENTRAL_DIR);
<add> mkdir('-p', MOZCENTRAL_CONTENT_DIR);
<add> mkdir('-p', MOZCENTRAL_L10N_DIR);
<add> mkdir('-p', MOZCENTRAL_CONTENT_DIR + BUILD_DIR);
<add> mkdir('-p', MOZCENTRAL_CONTENT_DIR + '/web');
<add>
<add> // Copy extension files
<add> cd('extensions/firefox');
<add> cp('-R', FIREFOX_EXTENSION_FILES_TO_COPY, ROOT_DIR + MOZCENTRAL_DIR);
<add> cd(ROOT_DIR);
<add>
<add> // Copy a standalone version of pdf.js inside the content directory
<add> cp(BUILD_TARGET, MOZCENTRAL_CONTENT_DIR + BUILD_DIR);
<add> cp('-R', EXTENSION_WEB_FILES, MOZCENTRAL_CONTENT_DIR + '/web');
<add> rm(MOZCENTRAL_CONTENT_DIR + '/web/viewer-production.html');
<add>
<add> // Copy over the firefox extension snippet so we can inline pdf.js in it
<add> cp('web/viewer-snippet-firefox-extension.html', MOZCENTRAL_CONTENT_DIR + '/web');
<add>
<add> // Modify the viewer so it does all the extension-only stuff.
<add> cd(MOZCENTRAL_CONTENT_DIR + '/web');
<add> sed('-i', /.*PDFJSSCRIPT_INCLUDE_BUNDLE.*\n/, cat(ROOT_DIR + BUILD_TARGET), 'viewer-snippet-firefox-extension.html');
<add> sed('-i', /.*PDFJSSCRIPT_REMOVE_CORE.*\n/g, '', 'viewer.html');
<add> sed('-i', /.*PDFJSSCRIPT_REMOVE_FIREFOX_EXTENSION.*\n/g, '', 'viewer.html');
<add> sed('-i', /.*PDFJSSCRIPT_INCLUDE_FIREFOX_EXTENSION.*\n/, cat('viewer-snippet-firefox-extension.html'), 'viewer.html');
<add> cd(ROOT_DIR);
<add>
<add> // We don't need pdf.js anymore since its inlined
<add> rm('-Rf', MOZCENTRAL_CONTENT_DIR + BUILD_DIR);
<add> rm(MOZCENTRAL_CONTENT_DIR + '/web/viewer-snippet-firefox-extension.html');
<add> // Remove '.DS_Store' and other hidden files
<add> find(MOZCENTRAL_DIR).forEach(function(file) {
<add> if (file.match(/^\./))
<add> rm('-f', file);
<add> });
<add>
<add> // Copy default localization files
<add> cp(DEFAULT_LOCALE_FILES, MOZCENTRAL_L10N_DIR);
<add>
<add> // Update the build version number
<add> sed('-i', /PDFJSSCRIPT_VERSION/, EXTENSION_VERSION, MOZCENTRAL_DIR + '/install.rdf.in');
<add> sed('-i', /PDFJSSCRIPT_VERSION/, EXTENSION_VERSION, MOZCENTRAL_DIR + '/README.mozilla');
<ide>
<ide> // List all files for mozilla-central
<del> cd(FIREFOX_BUILD_DIR);
<add> cd(MOZCENTRAL_DIR);
<ide> var extensionFiles = '';
<ide> find(FIREFOX_MC_EXTENSION_FILES).forEach(function(file){
<ide> if (test('-f', file))
<ide> target.chrome = function() {
<ide> // Copy a standalone version of pdf.js inside the content directory
<ide> cp(BUILD_TARGET, CHROME_BUILD_CONTENT_DIR + BUILD_DIR);
<ide> cp('-R', EXTENSION_WEB_FILES, CHROME_BUILD_CONTENT_DIR + '/web');
<add> cp('web/locale.properties', CHROME_BUILD_CONTENT_DIR + '/web');
<ide> mv('-f', CHROME_BUILD_CONTENT_DIR + '/web/viewer-production.html',
<ide> CHROME_BUILD_CONTENT_DIR + '/web/viewer.html');
<ide> }; | 1 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.