content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
PHP | PHP | fix restoration of xml entity loader flag | e202a1d9bc75c46f7780092c30cef41aba8d3712 | <ide><path>src/Utility/Xml.php
<ide> public static function loadHtml(string $input, array $options = [])
<ide>
<ide> $internalErrors = libxml_use_internal_errors(true);
<ide> if (!$options['loadEntities']) {
<del> libxml_disable_entity_loader(true);
<add> $previousDisabledEntityLoader = libxml_disable_entity_loader(true);
<ide> }
<ide> $flags = 0;
<ide> if (!empty($options['parseHuge'])) {
<ide> public static function loadHtml(string $input, array $options = [])
<ide> } catch (Exception $e) {
<ide> throw new XmlException('Xml cannot be read. ' . $e->getMessage(), null, $e);
<ide> } finally {
<del> if (!$options['loadEntities']) {
<del> libxml_disable_entity_loader(false);
<add> if (isset($previousDisabledEntityLoader)) {
<add> libxml_disable_entity_loader($previousDisabledEntityLoader);
<ide> }
<ide> libxml_use_internal_errors($internalErrors);
<ide> } | 1 |
Javascript | Javascript | add types for modal | 58cd046bb455e285c2980d1cd82d23bea76ccbb8 | <ide><path>Libraries/Modal/Modal.js
<ide> const PropTypes = require('prop-types');
<ide> const StyleSheet = require('../StyleSheet/StyleSheet');
<ide> const View = require('../Components/View/View');
<ide>
<del>const RCTModalHostView = require('./RCTModalHostViewNativeComponent');
<add>import RCTModalHostView from './RCTModalHostViewNativeComponent';
<ide> const ModalEventEmitter =
<ide> Platform.OS === 'ios' && NativeModalManager != null
<ide> ? new NativeEventEmitter(NativeModalManager)
<ide><path>Libraries/Modal/ModalSchema.js
<ide> const ModalSchema: SchemaType = {
<ide> components: {
<ide> ModalHostView: {
<ide> interfaceOnly: true,
<add> paperComponentName: 'RCTModalHostView',
<ide> extendsProps: [
<ide> {
<ide> type: 'ReactNativeBuiltInType',
<ide> const ModalSchema: SchemaType = {
<ide> {
<ide> name: 'onShow',
<ide> optional: true,
<del> bubblingType: 'bubble',
<add> bubblingType: 'direct',
<ide> typeAnnotation: {
<ide> type: 'EventTypeAnnotation',
<ide> argument: {
<ide> const ModalSchema: SchemaType = {
<ide> {
<ide> name: 'onOrientationChange',
<ide> optional: true,
<del> bubblingType: 'bubble',
<add> bubblingType: 'direct',
<ide> typeAnnotation: {
<ide> type: 'EventTypeAnnotation',
<ide> argument: {
<ide> type: 'ObjectTypeAnnotation',
<del> properties: [],
<add> properties: [
<add> {
<add> type: 'StringEnumTypeAnnotation',
<add> name: 'orientation',
<add> optional: false,
<add> options: [
<add> {
<add> name: 'portrait',
<add> },
<add> {
<add> name: 'landscape',
<add> },
<add> ],
<add> },
<add> ],
<ide> },
<ide> },
<ide> },
<ide> const ModalSchema: SchemaType = {
<ide> optional: true,
<ide> typeAnnotation: {
<ide> type: 'BooleanTypeAnnotation',
<del> default: true,
<add> default: false,
<add> },
<add> },
<add> {
<add> name: 'animated',
<add> optional: true,
<add> typeAnnotation: {
<add> type: 'BooleanTypeAnnotation',
<add> default: false,
<ide> },
<ide> },
<ide> {
<ide> name: 'supportedOrientations',
<ide> optional: true,
<ide> typeAnnotation: {
<del> type: 'StringEnumTypeAnnotation',
<del> default: 'portrait',
<del> options: [
<del> {
<del> name: 'portrait',
<del> },
<del> {
<del> name: 'portrait-upside-down',
<del> },
<del> {
<del> name: 'landscape',
<del> },
<del> {
<del> name: 'landscape-left',
<del> },
<del> {
<del> name: 'landscape-right',
<del> },
<del> ],
<add> type: 'ArrayTypeAnnotation',
<add> elementType: {
<add> type: 'StringEnumTypeAnnotation',
<add> default: 'portrait',
<add> options: [
<add> {
<add> name: 'portrait',
<add> },
<add> {
<add> name: 'portrait-upside-down',
<add> },
<add> {
<add> name: 'landscape',
<add> },
<add> {
<add> name: 'landscape-left',
<add> },
<add> {
<add> name: 'landscape-right',
<add> },
<add> ],
<add> },
<ide> },
<ide> },
<ide> {
<ide><path>Libraries/Modal/RCTModalHostViewNativeComponent.js
<ide>
<ide> 'use strict';
<ide>
<del>const requireNativeComponent = require('../ReactNative/requireNativeComponent');
<add>import codegenNativeComponent from '../Utilities/codegenNativeComponent';
<add>import type {
<add> WithDefault,
<add> BubblingEvent,
<add> DirectEvent,
<add> Int32,
<add>} from '../Types/CodegenTypes';
<ide>
<ide> import type {ViewProps} from '../Components/View/ViewPropTypes';
<del>import type {SyntheticEvent} from '../Types/CoreEventTypes';
<del>import type {NativeComponent} from '../Renderer/shims/ReactNative';
<ide>
<del>type OrientationChangeEvent = SyntheticEvent<
<del> $ReadOnly<{|
<del> orientation: 'portrait' | 'landscape',
<del> |}>,
<del>>;
<add>type OrientationChangeEvent = $ReadOnly<{|
<add> orientation: 'portrait' | 'landscape',
<add>|}>;
<ide>
<del>type ModalNativeProps = $ReadOnly<{|
<add>type NativeProps = $ReadOnly<{|
<ide> ...ViewProps,
<ide>
<ide> /**
<ide> * The `animationType` prop controls how the modal animates.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#animationtype
<ide> */
<del> animationType?: ?('none' | 'slide' | 'fade'),
<add> animationType?: ?WithDefault<'none' | 'slide' | 'fade', 'none'>,
<ide>
<ide> /**
<ide> * The `presentationStyle` prop controls how the modal appears.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#presentationstyle
<ide> */
<del> presentationStyle?: ?(
<del> | 'fullScreen'
<del> | 'pageSheet'
<del> | 'formSheet'
<del> | 'overFullScreen'
<del> ),
<add> presentationStyle?: ?WithDefault<
<add> 'fullScreen' | 'pageSheet' | 'formSheet' | 'overFullScreen',
<add> 'fullScreen',
<add> >,
<ide>
<ide> /**
<ide> * The `transparent` prop determines whether your modal will fill the
<ide> * entire view.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#transparent
<ide> */
<del> transparent?: ?boolean,
<add> transparent?: ?WithDefault<boolean, false>,
<ide>
<ide> /**
<ide> * The `hardwareAccelerated` prop controls whether to force hardware
<ide> * acceleration for the underlying window.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#hardwareaccelerated
<ide> */
<del> hardwareAccelerated?: ?boolean,
<add> hardwareAccelerated?: ?WithDefault<boolean, false>,
<ide>
<ide> /**
<ide> * The `visible` prop determines whether your modal is visible.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#visible
<ide> */
<del> visible?: ?boolean,
<add> visible?: ?WithDefault<boolean, false>,
<ide>
<ide> /**
<ide> * The `onRequestClose` callback is called when the user taps the hardware
<ide> type ModalNativeProps = $ReadOnly<{|
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#onrequestclose
<ide> */
<del> onRequestClose?: ?(event?: SyntheticEvent<null>) => mixed,
<add> onRequestClose?: ?(event?: DirectEvent<null>) => mixed,
<ide>
<ide> /**
<ide> * The `onShow` prop allows passing a function that will be called once the
<ide> * modal has been shown.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#onshow
<ide> */
<del> onShow?: ?(event?: SyntheticEvent<null>) => mixed,
<add> onShow?: ?(event?: DirectEvent<null>) => mixed,
<ide>
<ide> /**
<ide> * The `onDismiss` prop allows passing a function that will be called once
<ide> * the modal has been dismissed.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#ondismiss
<ide> */
<del> onDismiss?: ?() => mixed,
<add> onDismiss?: ?(event?: BubblingEvent<null>) => mixed,
<ide>
<ide> /**
<ide> * Deprecated. Use the `animationType` prop instead.
<ide> */
<del> animated?: ?boolean,
<add> animated?: ?WithDefault<boolean, false>,
<ide>
<ide> /**
<ide> * The `supportedOrientations` prop allows the modal to be rotated to any of the specified orientations.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#supportedorientations
<ide> */
<del> supportedOrientations?: ?$ReadOnlyArray<
<del> | 'portrait'
<del> | 'portrait-upside-down'
<del> | 'landscape'
<del> | 'landscape-left'
<del> | 'landscape-right',
<add> supportedOrientations?: ?WithDefault<
<add> $ReadOnlyArray<
<add> | 'portrait'
<add> | 'portrait-upside-down'
<add> | 'landscape'
<add> | 'landscape-left'
<add> | 'landscape-right',
<add> >,
<add> 'portrait',
<ide> >,
<ide>
<ide> /**
<ide> * The `onOrientationChange` callback is called when the orientation changes while the modal is being displayed.
<ide> *
<ide> * See https://facebook.github.io/react-native/docs/modal.html#onorientationchange
<ide> */
<del> onOrientationChange?: ?(event: OrientationChangeEvent) => mixed,
<add> onOrientationChange?: ?(event: DirectEvent<OrientationChangeEvent>) => mixed,
<ide>
<ide> /**
<ide> * The `identifier` is the unique number for identifying Modal components.
<ide> */
<del> identifier?: ?number,
<add> identifier?: ?WithDefault<Int32, 0>,
<ide> |}>;
<ide>
<del>type RCTModalHostViewNativeType = Class<NativeComponent<ModalNativeProps>>;
<del>
<del>module.exports = ((requireNativeComponent(
<del> 'RCTModalHostView',
<del>): any): RCTModalHostViewNativeType);
<add>export default codegenNativeComponent<NativeProps>('ModalHostView', {
<add> interfaceOnly: true,
<add> paperComponentName: 'RCTModalHostView',
<add>});
<ide><path>Libraries/Types/CodegenTypes.js
<ide>
<ide> 'use strict';
<ide>
<del>import type {NativeComponent} from '../Renderer/shims/ReactNative';
<ide> import type {SyntheticEvent} from './CoreEventTypes';
<ide>
<ide> // Event types
<ide> export type DirectEvent<T> = SyntheticEvent<T>;
<ide> export type Float = number;
<ide> export type Int32 = number;
<ide>
<add>type DefaultTypes = number | boolean | string | $ReadOnlyArray<string>;
<ide> // Default handling, ignore the unused value
<ide> // we're only using it for type checking
<ide> //
<ide> // TODO: (rickhanlonii) T44881457 If a default is provided, it should always be optional
<ide> // but that is currently not supported in the codegen since we require a default
<ide> //
<ide> // eslint-disable-next-line no-unused-vars
<del>export type WithDefault<Type: number | boolean | string, Value: ?Type> = Type;
<add>export type WithDefault<Type: DefaultTypes, Value: ?Type | string> = Type; | 4 |
Javascript | Javascript | fix siblings order for async hooks | 128476187aeffc3708efab6fbb99d248f6461507 | <ide><path>packages/ember-glimmer/tests/integration/components/life-cycle-test.js
<del>import { set, run } from 'ember-metal';
<add>import { set, setProperties, run } from 'ember-metal';
<ide> import { Component } from '../../utils/helpers';
<ide> import { strip } from '../../utils/abstract-test-case';
<ide> import { moduleFor, RenderingTest } from '../../utils/test-case';
<ide> class LifeCycleHooksTest extends RenderingTest {
<ide>
<ide> this.assertText('Twitter: @tomdale|Name: Tom Dale|Website: tomdale.net');
<ide>
<del> bottomAttrs = { oldAttrs: { website: 'tomdale.net' }, newAttrs: { website: 'tomdale.net' } };
<del>
<ide> this.assertHooks(
<ide> 'after no-op rerender (middle)',
<ide>
<ide> class LifeCycleHooksTest extends RenderingTest {
<ide>
<ide> this.assertText('Twitter: @tomdale|Name: Tom Dale|Website: tomdale.net');
<ide>
<del> middleAttrs = { oldAttrs: { name: 'Tom Dale' }, newAttrs: { name: 'Tom Dale' } };
<del>
<del>
<ide> this.assertHooks(
<ide> 'after no-op rerender (top)',
<ide>
<ide> class LifeCycleHooksTest extends RenderingTest {
<ide> });
<ide> }
<ide>
<add> ['@test lifecycle hooks are invoked in a correct sibling order']() {
<add> let { attr, invoke } = this.boundHelpers;
<add>
<add> this.registerComponent('the-parent', { template: strip`
<add> <div>
<add> ${invoke('the-first-child', { twitter: expr(attr('twitter')) })}|
<add> ${invoke('the-second-child', { name: expr(attr('name')) })}|
<add> ${invoke('the-last-child', { website: expr(attr('website')) })}
<add> </div>`
<add> });
<add>
<add> this.registerComponent('the-first-child', { template: `Twitter: {{${attr('twitter')}}}` });
<add>
<add> this.registerComponent('the-second-child', { template: `Name: {{${attr('name')}}}` });
<add>
<add> this.registerComponent('the-last-child', { template: `Website: {{${attr('website')}}}` });
<add>
<add> this.render(invoke('the-parent', {
<add> twitter: expr('twitter'),
<add> name: expr('name'),
<add> website: expr('website')
<add> }), {
<add> twitter: '@tomdale',
<add> name: 'Tom Dale',
<add> website: 'tomdale.net'
<add> });
<add>
<add> this.assertText('Twitter: @tomdale|Name: Tom Dale|Website: tomdale.net');
<add> this.assertRegisteredViews('intial render');
<add>
<add> let parentAttrs = { twitter: '@tomdale', name: 'Tom Dale', website: 'tomdale.net' };
<add> let firstAttrs = { twitter: '@tomdale' };
<add> let secondAttrs = { name: 'Tom Dale' };
<add> let lastAttrs = { website: 'tomdale.net' };
<add>
<add> this.assertHooks(
<add>
<add> 'after initial render',
<add>
<add> // Sync hooks
<add>
<add> ['the-parent', 'init'],
<add> ['the-parent', 'didInitAttrs', { attrs: parentAttrs }],
<add> ['the-parent', 'didReceiveAttrs', { newAttrs: parentAttrs }],
<add> ['the-parent', 'willRender'],
<add>
<add> ['the-first-child', 'init'],
<add> ['the-first-child', 'didInitAttrs', { attrs: firstAttrs }],
<add> ['the-first-child', 'didReceiveAttrs', { newAttrs: firstAttrs }],
<add> ['the-first-child', 'willRender'],
<add>
<add> ['the-second-child', 'init'],
<add> ['the-second-child', 'didInitAttrs', { attrs: secondAttrs }],
<add> ['the-second-child', 'didReceiveAttrs', { newAttrs: secondAttrs }],
<add> ['the-second-child', 'willRender'],
<add>
<add> ['the-last-child', 'init'],
<add> ['the-last-child', 'didInitAttrs', { attrs: lastAttrs }],
<add> ['the-last-child', 'didReceiveAttrs', { newAttrs: lastAttrs }],
<add> ['the-last-child', 'willRender'],
<add>
<add> // Async hooks
<add>
<add> ['the-first-child', 'didInsertElement'],
<add> ['the-first-child', 'didRender'],
<add>
<add> ['the-second-child', 'didInsertElement'],
<add> ['the-second-child', 'didRender'],
<add>
<add> ['the-last-child', 'didInsertElement'],
<add> ['the-last-child', 'didRender'],
<add>
<add> ['the-parent', 'didInsertElement'],
<add> ['the-parent', 'didRender']
<add>
<add> );
<add>
<add> this.runTask(() => this.components['the-first-child'].rerender());
<add>
<add> this.assertText('Twitter: @tomdale|Name: Tom Dale|Website: tomdale.net');
<add>
<add> this.assertHooks(
<add>
<add> 'after no-op rerender (first child)',
<add>
<add> // Sync hooks
<add>
<add> ['the-parent', 'willUpdate'],
<add> ['the-parent', 'willRender'],
<add>
<add> ['the-first-child', 'willUpdate'],
<add> ['the-first-child', 'willRender'],
<add>
<add> // Async hooks
<add>
<add> ['the-first-child', 'didUpdate'],
<add> ['the-first-child', 'didRender'],
<add>
<add> ['the-parent', 'didUpdate'],
<add> ['the-parent', 'didRender']
<add>
<add> );
<add>
<add> this.runTask(() => this.components['the-second-child'].rerender());
<add>
<add> this.assertText('Twitter: @tomdale|Name: Tom Dale|Website: tomdale.net');
<add>
<add> this.assertHooks(
<add>
<add> 'after no-op rerender (second child)',
<add>
<add> // Sync hooks
<add>
<add> ['the-parent', 'willUpdate'],
<add> ['the-parent', 'willRender'],
<add>
<add> ['the-second-child', 'willUpdate'],
<add> ['the-second-child', 'willRender'],
<add>
<add> // Async hooks
<add>
<add> ['the-second-child', 'didUpdate'],
<add> ['the-second-child', 'didRender'],
<add>
<add> ['the-parent', 'didUpdate'],
<add> ['the-parent', 'didRender']
<add>
<add> );
<add>
<add> this.runTask(() => this.components['the-last-child'].rerender());
<add>
<add> this.assertText('Twitter: @tomdale|Name: Tom Dale|Website: tomdale.net');
<add>
<add> this.assertHooks(
<add>
<add> 'after no-op rerender (last child)',
<add>
<add> // Sync hooks
<add>
<add> ['the-parent', 'willUpdate'],
<add> ['the-parent', 'willRender'],
<add>
<add> ['the-last-child', 'willUpdate'],
<add> ['the-last-child', 'willRender'],
<add>
<add> // Async hooks
<add>
<add> ['the-last-child', 'didUpdate'],
<add> ['the-last-child', 'didRender'],
<add>
<add> ['the-parent', 'didUpdate'],
<add> ['the-parent', 'didRender']
<add>
<add> );
<add>
<add> this.runTask(() => this.components['the-parent'].rerender());
<add>
<add> this.assertText('Twitter: @tomdale|Name: Tom Dale|Website: tomdale.net');
<add>
<add> this.assertHooks(
<add>
<add> 'after no-op rerender (parent)',
<add>
<add> // Sync hooks
<add>
<add> ['the-parent', 'willUpdate'],
<add> ['the-parent', 'willRender'],
<add>
<add> // Async hooks
<add>
<add> ['the-parent', 'didUpdate'],
<add> ['the-parent', 'didRender']
<add>
<add> );
<add>
<add> this.runTask(() => setProperties(this.context, {
<add> twitter: '@horsetomdale',
<add> name: 'Horse Tom Dale',
<add> website: 'horsetomdale.net'
<add> }));
<add>
<add> this.assertText('Twitter: @horsetomdale|Name: Horse Tom Dale|Website: horsetomdale.net');
<add>
<add> parentAttrs = {
<add> oldAttrs: { twitter: '@tomdale', name: 'Tom Dale', website: 'tomdale.net' },
<add> newAttrs: { twitter: '@horsetomdale', name: 'Horse Tom Dale', website: 'horsetomdale.net' }
<add> };
<add> firstAttrs = { oldAttrs: { twitter: '@tomdale' }, newAttrs: { twitter: '@horsetomdale' } };
<add> secondAttrs = { oldAttrs: { name: 'Tom Dale' }, newAttrs: { name: 'Horse Tom Dale' } };
<add> lastAttrs = { oldAttrs: { website: 'tomdale.net' }, newAttrs: { website: 'horsetomdale.net' } };
<add>
<add> this.assertHooks(
<add>
<add> 'after update',
<add>
<add> // Sync hooks
<add>
<add> ['the-parent', 'didUpdateAttrs', parentAttrs],
<add> ['the-parent', 'didReceiveAttrs', parentAttrs],
<add>
<add> ['the-parent', 'willUpdate'],
<add> ['the-parent', 'willRender'],
<add>
<add> ['the-first-child', 'didUpdateAttrs', firstAttrs],
<add> ['the-first-child', 'didReceiveAttrs', firstAttrs],
<add>
<add> ['the-first-child', 'willUpdate'],
<add> ['the-first-child', 'willRender'],
<add>
<add> ['the-second-child', 'didUpdateAttrs', secondAttrs],
<add> ['the-second-child', 'didReceiveAttrs', secondAttrs],
<add>
<add> ['the-second-child', 'willUpdate'],
<add> ['the-second-child', 'willRender'],
<add>
<add> ['the-last-child', 'didUpdateAttrs', lastAttrs],
<add> ['the-last-child', 'didReceiveAttrs', lastAttrs],
<add>
<add> ['the-last-child', 'willUpdate'],
<add> ['the-last-child', 'willRender'],
<add>
<add> // Async hooks
<add>
<add> ['the-first-child', 'didUpdate'],
<add> ['the-first-child', 'didRender'],
<add>
<add> ['the-second-child', 'didUpdate'],
<add> ['the-second-child', 'didRender'],
<add>
<add> ['the-last-child', 'didUpdate'],
<add> ['the-last-child', 'didRender'],
<add>
<add> ['the-parent', 'didUpdate'],
<add> ['the-parent', 'didRender']
<add>
<add> );
<add>
<add> this.teardownAssertions.push(() => {
<add> this.assertHooks(
<add> 'destroy',
<add> ['the-parent', 'willDestroyElement'],
<add> ['the-parent', 'willClearRender'],
<add> ['the-first-child', 'willDestroyElement'],
<add> ['the-first-child', 'willClearRender'],
<add> ['the-second-child', 'willDestroyElement'],
<add> ['the-second-child', 'willClearRender'],
<add> ['the-last-child', 'willDestroyElement'],
<add> ['the-last-child', 'willClearRender']
<add> );
<add>
<add> this.assertRegisteredViews('after destroy');
<add> });
<add> }
<add>
<ide> ['@test passing values through attrs causes lifecycle hooks to fire if the attribute values have changed']() {
<ide> let { attr, invoke } = this.boundHelpers;
<ide> | 1 |
Java | Java | fix slider colors in android | 31904d523dbe79a8a4a37690cd821b3ec21e16b0 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/views/slider/ReactSliderManager.java
<ide> public void setThumbTintColor(ReactSlider view, Integer color) {
<ide> @ReactProp(name = "minimumTrackTintColor", customType = "Color")
<ide> public void setMinimumTrackTintColor(ReactSlider view, Integer color) {
<ide> LayerDrawable drawable = (LayerDrawable) view.getProgressDrawable().getCurrent();
<del> Drawable background = drawable.findDrawableByLayerId(android.R.id.background);
<add> Drawable progress = drawable.findDrawableByLayerId(android.R.id.progress);
<ide> if (color == null) {
<del> background.clearColorFilter();
<add> progress.clearColorFilter();
<ide> } else {
<del> background.setColorFilter(color, PorterDuff.Mode.SRC_IN);
<add> progress.setColorFilter(color, PorterDuff.Mode.SRC_IN);
<ide> }
<ide> }
<ide>
<ide> @ReactProp(name = "maximumTrackTintColor", customType = "Color")
<ide> public void setMaximumTrackTintColor(ReactSlider view, Integer color) {
<ide> LayerDrawable drawable = (LayerDrawable) view.getProgressDrawable().getCurrent();
<del> Drawable progress = drawable.findDrawableByLayerId(android.R.id.progress);
<add> Drawable background = drawable.findDrawableByLayerId(android.R.id.background);
<ide> if (color == null) {
<del> progress.clearColorFilter();
<add> background.clearColorFilter();
<ide> } else {
<del> progress.setColorFilter(color, PorterDuff.Mode.SRC_IN);
<add> background.setColorFilter(color, PorterDuff.Mode.SRC_IN);
<ide> }
<ide> }
<ide> | 1 |
Ruby | Ruby | remove cyclic dependency on argv | 28e62b52d1be4cff4c9d668779b6e7d361656c2a | <ide><path>Library/Homebrew/cli/args.rb
<ide> module Homebrew
<ide> module CLI
<ide> class Args < OpenStruct
<del> attr_accessor :processed_options
<add> attr_accessor :processed_options, :args_parsed
<ide> # undefine tap to allow --tap argument
<ide> undef tap
<ide>
<ide> def initialize(argv:)
<ide> super
<ide> @argv = argv
<add> @args_parsed = false
<ide> @processed_options = []
<ide> end
<ide>
<ide> def kegs
<ide>
<ide> def downcased_unique_named
<ide> # Only lowercase names, not paths, bottle filenames or URLs
<del> remaining.map do |arg|
<add> arguments = if args_parsed
<add> remaining
<add> else
<add> cmdline_args.reject { |arg| arg.start_with?("-") }
<add> end
<add> arguments.map do |arg|
<ide> if arg.include?("/") || arg.end_with?(".tar.gz") || File.exist?(arg)
<ide> arg
<ide> else
<ide> def downcased_unique_named
<ide> end.uniq
<ide> end
<ide>
<add> def head
<add> (args_parsed && HEAD?) || cmdline_args.include?("--HEAD")
<add> end
<add>
<add> def devel
<add> (args_parsed && devel?) || cmdline_args.include?("--devel")
<add> end
<add>
<ide> def spec(default = :stable)
<del> if HEAD?
<add> if head
<ide> :head
<del> elsif devel?
<add> elsif devel
<ide> :devel
<ide> else
<ide> default
<ide><path>Library/Homebrew/cli/parser.rb
<ide> class Parser
<ide> attr_reader :processed_options, :hide_from_man_page
<ide>
<ide> def self.parse(args = ARGV, &block)
<del> new(&block).parse(args)
<add> new(args, &block).parse(args)
<ide> end
<ide>
<ide> def self.global_options
<ide> def self.global_options
<ide> }
<ide> end
<ide>
<del> def initialize(&block)
<add> def initialize(args = ARGV, &block)
<ide> @parser = OptionParser.new
<ide> @args = Homebrew::CLI::Args.new(argv: ARGV_WITHOUT_MONKEY_PATCHING)
<ide> @args[:remaining] = []
<add> @args[:cmdline_args] = args.dup
<ide> @constraints = []
<ide> @conflicts = []
<ide> @switch_sources = {}
<ide> def parse(cmdline_args = ARGV)
<ide> end
<ide> check_constraint_violations
<ide> @args[:remaining] = remaining_args
<del> @args_parsed = true
<add> @args.args_parsed = @args_parsed = true
<ide> @args.processed_options = @processed_options
<ide> Homebrew.args = @args
<ide> cmdline_args.freeze | 2 |
Java | Java | update error message in dispatcherservlet | 888835445c4f0bfb49055167d19b2c6cb25d7b45 | <ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/DispatcherServlet.java
<ide> protected HandlerAdapter getHandlerAdapter(Object handler) throws ServletExcepti
<ide> }
<ide> }
<ide> throw new ServletException("No adapter for handler [" + handler +
<del> "]: Does your handler implement a supported interface like Controller?");
<add> "]: The DispatcherServlet configuration needs to include a HandlerAdapter that supports this handler");
<ide> }
<ide>
<ide> /** | 1 |
PHP | PHP | remove unused variables/lines | df6b8483893439ae4527e678fc6b963e76f43dae | <ide><path>src/I18n/RelativeTimeFormatter.php
<ide> public function dateAgoInWords(DatetimeInterface $date, array $options = [])
<ide> }
<ide>
<ide> $diffData = $this->_diffData($futureTime, $pastTime, $backwards, $options);
<del> list($fNum, $fWord, $years, $months, $weeks, $days, $hours, $minutes, $seconds) = array_values($diffData);
<add> list($fNum, $fWord, $years, $months, $weeks, $days) = array_values($diffData);
<ide>
<ide> $relativeDate = [];
<ide> if ($fNum >= 1 && $years > 0) {
<ide><path>src/Network/Socket.php
<ide> public function write($data)
<ide> }
<ide> }
<ide> $totalBytes = strlen($data);
<del> for ($written = 0, $rv = 0; $written < $totalBytes; $written += $rv) {
<add> for ($written = 0; $written < $totalBytes; $written += $rv) {
<ide> $rv = fwrite($this->connection, substr($data, $written));
<ide> if ($rv === false || $rv === 0) {
<ide> return $written;
<ide><path>src/ORM/Query.php
<ide> protected function _addDefaultFields()
<ide> protected function _addDefaultSelectTypes()
<ide> {
<ide> $typeMap = $this->typeMap()->defaults();
<del> $selectTypeMap = $this->selectTypeMap();
<ide> $select = $this->clause('select');
<ide> $types = [];
<ide>
<ide><path>src/TestSuite/TestCase.php
<ide> protected function _assertAttributes($assertions, $string, $fullDebug = false, $
<ide> {
<ide> $asserts = $assertions['attrs'];
<ide> $explains = $assertions['explains'];
<del> $len = count($asserts);
<ide> do {
<ide> $matches = false;
<ide> foreach ($asserts as $j => $assert) {
<ide><path>tests/TestCase/View/Helper/HtmlHelperTest.php
<ide> class HtmlHelperTest extends TestCase
<ide> public function setUp()
<ide> {
<ide> parent::setUp();
<del> $controller = $this->getMock('Cake\Controller\Controller', ['redirect']);
<ide> $this->View = $this->getMock('Cake\View\View', ['append']);
<ide> $this->Html = new HtmlHelper($this->View);
<ide> $this->Html->request = new Request(); | 5 |
Go | Go | fix bug for `docker service ls` | 5f69a077dace5de14d27382ffee3c577f8cab1ad | <ide><path>libnetwork/client/service.go
<ide> func getBackendID(cli *NetworkCli, servID string) (string, error) {
<ide> )
<ide>
<ide> if obj, _, err = readBody(cli.call("GET", "/services/"+servID+"/backend", nil, nil)); err == nil {
<del> var bkl []sandboxResource
<del> if err := json.NewDecoder(bytes.NewReader(obj)).Decode(&bkl); err == nil {
<del> if len(bkl) > 0 {
<del> bk = bkl[0].ID
<del> }
<add> var sr sandboxResource
<add> if err := json.NewDecoder(bytes.NewReader(obj)).Decode(&sr); err == nil {
<add> bk = sr.ContainerID
<ide> } else {
<ide> // Only print a message, don't make the caller cli fail for this
<del> fmt.Fprintf(cli.out, "Failed to retrieve backend list for service %s (%v)", servID, err)
<add> fmt.Fprintf(cli.out, "Failed to retrieve backend list for service %s (%v)\n", servID, err)
<ide> }
<ide> }
<ide> | 1 |
Mixed | Ruby | yield translation to `formbuilder#button` block | a086418283b5220a06bf86e485f654d8552b5b0b | <ide><path>actionview/CHANGELOG.md
<add>* Yield translated strings to calls of `ActionView::FormBuilder#button`
<add> when a block is given.
<add>
<add> *Sean Doyle*
<add>
<ide> * Alias `ActionView::Helpers::Tags::Label::LabelBuilder#translation` to
<ide> `#to_s` so that `form.label` calls can yield that value to their blocks.
<ide>
<ide><path>actionview/lib/action_view/helpers/form_helper.rb
<ide> def submit(value = nil, options = {})
<ide> # # <strong>Ask me!</strong>
<ide> # # </button>
<ide> #
<add> # button do |text|
<add> # content_tag(:strong, text)
<add> # end
<add> # # => <button name='button' type='submit'>
<add> # # <strong>Create post</strong>
<add> # # </button>
<add> #
<ide> def button(value = nil, options = {}, &block)
<ide> value, options = nil, value if value.is_a?(Hash)
<ide> value ||= submit_default_value
<del> @template.button_tag(value, options, &block)
<add>
<add> if block_given?
<add> value = @template.capture { yield(value) }
<add> end
<add>
<add> @template.button_tag(value, options)
<ide> end
<ide>
<ide> def emitted_hidden_id? # :nodoc:
<ide><path>actionview/test/template/form_helper/form_with_test.rb
<ide> def test_form_with
<ide> assert_dom_equal expected, output_buffer
<ide> end
<ide>
<add> def test_form_with_button_yields_translation
<add> form_with(model: @post) do |f|
<add> concat(f.button { |value| concat content_tag(:span, value) })
<add> end
<add>
<add> expected = whole_form("/posts/123", method: :patch) do
<add> "<button name='button' type='submit'><span>Update Post</span></button>"
<add> end
<add>
<add> assert_dom_equal expected, output_buffer
<add> end
<add>
<ide> def test_form_with_not_outputting_ids
<ide> old_value = ActionView::Helpers::FormHelper.form_with_generates_ids
<ide> ActionView::Helpers::FormHelper.form_with_generates_ids = false
<ide> def test_form_with_label_accesses_object_through_label_tag_builder
<ide> form_with(model: Post.new) do |f|
<ide> concat(
<ide> f.label(:title) do |builder|
<del> concat tag.span(builder, {
<del> class: ("new_record" unless builder.object.persisted?)
<del> })
<add> concat tag.span(builder, class: ("new_record" unless builder.object.persisted?))
<ide> end
<ide> )
<ide> end | 3 |
Javascript | Javascript | add more asserts to test-http-big-proxy-responses | c7b3c1ecbc7c425eb1e0014166dcc051805f8cff | <ide><path>test/disabled/test-http-big-proxy-responses.js
<ide> fs = require("fs"),
<ide> http = require("http"),
<ide> url = require("url");
<ide>
<add>var chunk = '01234567890123456789';
<add>
<ide> // Produce a very large response.
<ide> var chargen = http.createServer(function (req, res) {
<del> var chunk = '01234567890123456789';
<del> var len = req.headers['x-len'];
<add> var len = parseInt(req.headers['x-len']);
<add> assert.ok(len > 0);
<ide> res.writeHead(200, {"transfer-encoding":"chunked"});
<ide> for (var i=0; i<len; i++) {
<del> print(',');
<add> //print(',');
<ide> res.write(chunk);
<ide> }
<ide> res.end();
<ide> chargen.listen(9000);
<ide> // Proxy to the chargen server.
<ide> var proxy = http.createServer(function (req, res) {
<ide> var c = http.createClient(9000, 'localhost')
<add>
<add> var len = parseInt(req.headers['x-len']);
<add> assert.ok(len > 0);
<add>
<add> var sent = 0;
<add>
<add>
<add> c.addListener('error', function (e) {
<add> puts('proxy client error. sent ' + sent);
<add> throw e;
<add> });
<add>
<ide> var proxy_req = c.request(req.method, req.url, req.headers);
<ide> proxy_req.addListener('response', function(proxy_res) {
<ide> res.writeHead(proxy_res.statusCode, proxy_res.headers);
<del> proxy_res.addListener('data', function(chunk) {
<del> print('.');
<del> res.write(chunk);
<add>
<add> proxy_res.addListener('data', function(d) {
<add> //print('.');
<add> res.write(d);
<add> sent += d.length;
<add> assert.ok(sent <= (len*chunk.length));
<ide> });
<add>
<ide> proxy_res.addListener('end', function() {
<ide> res.end();
<ide> });
<add>
<ide> });
<add>
<ide> proxy_req.end();
<ide> });
<ide> proxy.listen(9001);
<ide> var done = false;
<ide>
<ide> function call_chargen(list) {
<ide> if (list.length > 0) {
<del> sys.debug("calling chargen for " + list[0] + " chunks.");
<del> var req = http.createClient(9001, 'localhost').request('/', {'x-len': list[0]});
<add> var len = list.shift();
<add>
<add> sys.debug("calling chargen for " + len + " chunks.");
<add>
<add> var recved = 0;
<add>
<add> var req = http.createClient(9001, 'localhost').request('/', {'x-len': len});
<add>
<ide> req.addListener('response', function(res) {
<add>
<add> res.addListener('data', function(d) {
<add> recved += d.length;
<add> assert.ok(recved <= (len*chunk.length));
<add> });
<add>
<ide> res.addListener('end', function() {
<del> sys.debug("end for " + list[0] + " chunks.");
<del> list.shift();
<add> assert.ok(recved <= (len*chunk.length));
<add> sys.debug("end for " + len + " chunks.");
<ide> call_chargen(list);
<ide> });
<add>
<ide> });
<ide> req.end();
<add>
<ide> } else {
<del> sys.puts("End of list.");
<del> proxy.end();
<del> chargen.end();
<del> done = true;
<add> sys.puts("End of list. closing servers");
<add> proxy.close();
<add> chargen.close();
<add> done = true;
<ide> }
<ide> }
<ide> | 1 |
Python | Python | add training pipeline (formatting temporary) | 578d23e06114bbd63cf5e931e0fdef9b8b6ac8c4 | <ide><path>examples/run_seq2seq_finetuning.py
<ide> import os
<ide>
<ide> import numpy as np
<add>from tqdm import tqdm, trange
<ide> import torch
<del>from torch.utils.data import Dataset
<add>from torch.utils.data import Dataset, RandomSampler
<ide>
<ide> from transformers import AutoTokenizer, Model2Model
<ide>
<ide> def __init_(self, tokenizer_src, tokenizer_tgt, data_dir="", block_size=512):
<ide> except IndexError: # skip ill-formed stories
<ide> continue
<ide>
<del> story = tokenizer_src.convert_tokens_to_ids(tokenizer_src.tokenize(story))
<add> story = tokenizer_src.convert_tokens_to_ids(
<add> tokenizer_src.tokenize(story)
<add> )
<ide> story_seq = _fit_to_block_size(story, block_size)
<ide>
<del> summary = tokenizer_tgt.convert_tokens_to_ids(tokenizer_tgt.tokenize(summary))
<add> summary = tokenizer_tgt.convert_tokens_to_ids(
<add> tokenizer_tgt.tokenize(summary)
<add> )
<ide> summary_seq = _fit_to_block_size(summary, block_size)
<ide>
<ide> self.examples.append((story_seq, summary_seq))
<ide> def load_and_cache_examples(args, tokenizer_src, tokenizer_tgt):
<ide>
<ide> def train(args, train_dataset, model, tokenizer):
<ide> """ Fine-tune the pretrained model on the corpus. """
<del> raise NotImplementedError
<add>
<add> # Prepare the data loading
<add> args.train_bach_size = 1
<add> train_sampler = RandomSampler(train_dataset)
<add> train_dataloader = DataLoader(
<add> train_dataset, sampler=train_sampler, batch_size=args.train_bach_size
<add> )
<add>
<add> # Prepare the optimizer and schedule (linear warmup and decay)
<add> no_decay = ["bias", "LayerNorm.weight"]
<add> optimizer_grouped_parameters = [
<add> {
<add> "params": [
<add> p
<add> for n, p in model.named_parameters()
<add> if not any(nd in n for nd in no_decay)
<add> ],
<add> "weight_decay": args.weight_decay,
<add> },
<add> {
<add> "params": [
<add> p
<add> for n, p in model.named_parameters()
<add> if any(nd in n for nd in no_decay)
<add> ],
<add> "weight_decay": 0.0,
<add> },
<add> ]
<add> optimizer = AdamW(
<add> optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon
<add> )
<add> scheduler = WarmupLinearSchedule(
<add> optimizer, warmup_steps=args.warmup_steps, t_total=t_total
<add> )
<add>
<add> # Train
<add> logger.info("***** Running training *****")
<add> logger.info(" Num examples = %d", len(train_dataset))
<add> logger.info(" Num Epochs = %d", args.num_train_epochs)
<add> logger.info(
<add> " Instantaneous batch size per GPU = %d", args.per_gpu_train_batch_size
<add> )
<add> logger.info(
<add> " Total train batch size (w. parallel, distributed & accumulation) = %d",
<add> args.train_batch_size
<add> * args.gradient_accumulation_steps
<add> * (torch.distributed.get_world_size() if args.local_rank != -1 else 1),
<add> )
<add> logger.info(" Gradient Accumulation steps = %d", args.gradient_accumulation_steps)
<add> logger.info(" Total optimization steps = %d", t_total)
<add>
<add> global_step = 0
<add> tr_loss, logging_loss = 0.0, 0.0
<add> model.zero_grad()
<add> train_iterator = trange(args.num_train_epochs, desc="Epoch", disable=True)
<add> set_seed(args)
<add> for _ in train_iterator:
<add> epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=True)
<add> for step, batch in enumerate(epoch_iterator):
<add> source = ([s for s, _ in batch]).to(args.device)
<add> target = ([t for _, t in batch]).to(args.device)
<add> model.train()
<add> outputs = model(source, target)
<add> loss = outputs[0]
<add> loss.backward()
<add>
<add> tr_loss += loss.item()
<add> if (step + 1) % args.gradient_accumulation_steps == 0:
<add> torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
<add> optimizer.step()
<add> scheduler.step()
<add> model.zero_grad()
<add> global_step += 1
<add>
<add> if args.max_steps > 0 and global_step > args.max_steps:
<add> epoch_iterator.close()
<add> break
<add>
<add> if args.max_steps > 0 and global_step > args.max_steps:
<add> train_iterator.close()
<add> break
<add>
<add> return global_step, tr_loss / global_step
<ide>
<ide>
<ide> def main():
<ide> def main():
<ide> )
<ide>
<ide> # Optional parameters
<add> parser.add_argument(
<add> "--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer."
<add> )
<ide> parser.add_argument(
<ide> "--decoder_name_or_path",
<ide> default="bert-base-cased",
<ide> def main():
<ide> type=str,
<ide> help="The encoder architecture to be fine-tuned.",
<ide> )
<add> parser.add_argument(
<add> "--learning_rate",
<add> default=5e-5,
<add> type=float,
<add> help="The initial learning rate for Adam.",
<add> )
<add> parser.add_argument(
<add> "--max_grad_norm", default=1.0, type=float, help="Max gradient norm."
<add> )
<add> parser.add_argument(
<add> "--max_steps",
<add> default=-1,
<add> type=int,
<add> help="If > 0: set total number of training steps to perform. Override num_train_epochs.",
<add> )
<add> parser.add_argument(
<add> "--num_train_epochs",
<add> default=1,
<add> type=int,
<add> help="Total number of training epochs to perform.",
<add> )
<ide> parser.add_argument("--seed", default=42, type=int)
<add> parser.add_argument(
<add> "--warmup_steps", default=0, type=int, help="Linear warmup over warmup_steps."
<add> )
<add> parser.add_argument(
<add> "--weight_decay", default=0.0, type=float, help="Weight deay if we apply some."
<add> )
<ide> args = parser.parse_args()
<ide>
<del> if args.encoder_type != 'bert' or args.decoder_type != 'bert':
<del> raise ValueError("Only the BERT architecture is currently supported for seq2seq.")
<add> if args.encoder_type != "bert" or args.decoder_type != "bert":
<add> raise ValueError(
<add> "Only the BERT architecture is currently supported for seq2seq."
<add> )
<ide>
<ide> # Set up training device
<ide> # device = torch.device("cpu")
<ide> def main():
<ide> # Load pretrained model and tokenizer
<ide> encoder_tokenizer_class = AutoTokenizer.from_pretrained(args.encoder_name_or_path)
<ide> decoder_tokenizer_class = AutoTokenizer.from_pretrained(args.decoder_name_or_path)
<del> model = Model2Model.from_pretrained(args.encoder_name_or_path, args.decoder_name_or_path)
<add> model = Model2Model.from_pretrained(
<add> args.encoder_name_or_path, args.decoder_name_or_path
<add> )
<ide> # model.to(device)
<ide>
<ide> logger.info("Training/evaluation parameters %s", args)
<ide>
<ide> # Training
<del> source, target = load_and_cache_examples(args, tokenizer)
<del> # global_step, tr_loss = train(args, train_dataset, model, tokenizer)
<add> train_dataset = load_and_cache_examples(args, tokenizer)
<add> global_step, tr_loss = train(args, train_dataset, model, tokenizer)
<ide> # logger.info(" global_step = %s, average loss = %s", global_step, tr_loss)
<ide>
<ide> | 1 |
Javascript | Javascript | hide loaders in hmr log message | 066b0ace5177b213eadf167cc2145355d2f29e45 | <ide><path>hot/log-apply-result.js
<ide> module.exports = function(updatedModules, renewedModules) {
<ide> } else {
<ide> log("info", "[HMR] Updated modules:");
<ide> renewedModules.forEach(function(moduleId) {
<add> if(moduleId.lastIndexOf) {
<add> moduleId = moduleId.substring(moduleId.lastIndexOf("!") + 1);
<add> }
<ide> log("info", "[HMR] - " + moduleId);
<ide> });
<ide> var numberIds = renewedModules.every(function(moduleId) { | 1 |
Ruby | Ruby | pass variants in rather than mutating the request | 59a9068c3fa501ca23d60f3a83c0968e9ed4639a | <ide><path>actionpack/test/controller/mime/respond_to_test.rb
<ide> class RespondToController < ActionController::Base
<ide> layout :set_layout
<ide>
<add> before_action {
<add> case params[:v]
<add> when String then request.variant = params[:v].to_sym
<add> when Array then request.variant = params[:v].map(&:to_sym)
<add> end
<add> }
<add>
<ide> def html_xml_or_rss
<ide> respond_to do |type|
<ide> type.html { render :text => "HTML" }
<ide> def test_invalid_variant
<ide> logger = ActiveSupport::LogSubscriber::TestHelper::MockLogger.new
<ide> old_logger, ActionController::Base.logger = ActionController::Base.logger, logger
<ide>
<del> @request.variant = :invalid
<del> get :variant_with_implicit_rendering
<add> get :variant_with_implicit_rendering, params: { v: :invalid }
<ide> assert_response :no_content
<ide> assert_equal 1, logger.logged(:info).select{ |s| s =~ /No template found/ }.size, "Implicit head :no_content not logged"
<ide> ensure
<ide> def test_variant_not_set_regular_template_missing
<ide> end
<ide>
<ide> def test_variant_with_implicit_rendering
<del> @request.variant = :implicit
<del> get :variant_with_implicit_rendering
<add> get :variant_with_implicit_rendering, params: { v: :implicit }
<ide> assert_response :no_content
<ide> end
<ide>
<ide> def test_variant_with_implicit_template_rendering
<del> @request.variant = :mobile
<del> get :variant_with_implicit_rendering
<add> get :variant_with_implicit_rendering, params: { v: :mobile }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "mobile", @response.body
<ide> end
<ide>
<ide> def test_variant_with_format_and_custom_render
<del> @request.variant = :phone
<del> get :variant_with_format_and_custom_render
<add> get :variant_with_format_and_custom_render, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "mobile", @response.body
<ide> end
<ide>
<ide> def test_multiple_variants_for_format
<del> @request.variant = :tablet
<del> get :multiple_variants_for_format
<add> get :multiple_variants_for_format, params: { v: :tablet }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "tablet", @response.body
<ide> end
<ide> def test_variant_inline_syntax
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "none", @response.body
<ide>
<del> @request.variant = :phone
<del> get :variant_inline_syntax
<add> get :variant_inline_syntax, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide> end
<ide>
<ide> def test_variant_inline_syntax_without_block
<del> @request.variant = :phone
<del> get :variant_inline_syntax_without_block
<add> get :variant_inline_syntax_without_block, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide> end
<ide>
<ide> def test_variant_any
<del> @request.variant = :phone
<del> get :variant_any
<add> get :variant_any, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide>
<del> @request.variant = :tablet
<del> get :variant_any
<add> get :variant_any, params: { v: :tablet }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "any", @response.body
<ide>
<del> @request.variant = :phablet
<del> get :variant_any
<add> get :variant_any, params: { v: :phablet }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "any", @response.body
<ide> end
<ide> def test_variant_any_any
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "any", @response.body
<ide>
<del> @request.variant = :phone
<del> get :variant_any_any
<add> get :variant_any_any, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide>
<del> @request.variant = :yolo
<del> get :variant_any_any
<add> get :variant_any_any, params: { v: :yolo }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "any", @response.body
<ide> end
<ide>
<ide> def test_variant_inline_any
<del> @request.variant = :phone
<del> get :variant_any
<add> get :variant_any, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide>
<del> @request.variant = :tablet
<del> get :variant_inline_any
<add> get :variant_inline_any, params: { v: :tablet }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "any", @response.body
<ide>
<del> @request.variant = :phablet
<del> get :variant_inline_any
<add> get :variant_inline_any, params: { v: :phablet }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "any", @response.body
<ide> end
<ide>
<ide> def test_variant_inline_any_any
<del> @request.variant = :phone
<del> get :variant_inline_any_any
<add> get :variant_inline_any_any, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide>
<del> @request.variant = :yolo
<del> get :variant_inline_any_any
<add> get :variant_inline_any_any, params: { v: :yolo }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "any", @response.body
<ide> end
<ide>
<ide> def test_variant_any_implicit_render
<del> @request.variant = :tablet
<del> get :variant_any_implicit_render
<add> get :variant_any_implicit_render, params: { v: :tablet }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "tablet", @response.body
<ide>
<del> @request.variant = :phablet
<del> get :variant_any_implicit_render
<add> get :variant_any_implicit_render, params: { v: :phablet }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phablet", @response.body
<ide> end
<ide> def test_variant_any_with_none
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "none or phone", @response.body
<ide>
<del> @request.variant = :phone
<del> get :variant_any_with_none
<add> get :variant_any_with_none, params: { v: :phone }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "none or phone", @response.body
<ide> end
<ide>
<ide> def test_format_any_variant_any
<del> @request.variant = :tablet
<del> get :format_any_variant_any, format: :js
<add> get :format_any_variant_any, format: :js, params: { v: :tablet }
<ide> assert_equal "text/javascript", @response.content_type
<ide> assert_equal "tablet", @response.body
<ide> end
<ide>
<ide> def test_variant_negotiation_inline_syntax
<del> @request.variant = [:tablet, :phone]
<del> get :variant_inline_syntax_without_block
<add> get :variant_inline_syntax_without_block, params: { v: [:tablet, :phone] }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide> end
<ide>
<ide> def test_variant_negotiation_block_syntax
<del> @request.variant = [:tablet, :phone]
<del> get :variant_plus_none_for_format
<add> get :variant_plus_none_for_format, params: { v: [:tablet, :phone] }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide> end
<ide>
<ide> def test_variant_negotiation_without_block
<del> @request.variant = [:tablet, :phone]
<del> get :variant_inline_syntax_without_block
<add> get :variant_inline_syntax_without_block, params: { v: [:tablet, :phone] }
<ide> assert_equal "text/html", @response.content_type
<ide> assert_equal "phone", @response.body
<ide> end | 1 |
Javascript | Javascript | block special pages from the public. | 1c7b2603d4730dd4e6329d8dbd61447d07a7a2bb | <ide><path>server/index.js
<ide> const internalPrefixes = [
<ide> /^\/static\//
<ide> ]
<ide>
<add>const blockedPages = {
<add> '/_document': true,
<add> '/_error': true
<add>}
<add>
<ide> export default class Server {
<ide> constructor ({ dir = '.', dev = false, staticMarkup = false, quiet = false, conf = null } = {}) {
<ide> this.dir = resolve(dir)
<ide> export default class Server {
<ide> return this.handleRequest(req, res, parsedUrl)
<ide> }
<ide>
<add> if (blockedPages[pathname]) {
<add> return await this.render404(req, res, parsedUrl)
<add> }
<add>
<ide> if (this.config.poweredByHeader) {
<ide> res.setHeader('X-Powered-By', `Next.js ${pkg.version}`)
<ide> }
<ide><path>test/integration/production/test/index.test.js
<ide> describe('Production Usage', () => {
<ide> const res2 = await fetch(url, { headers })
<ide> expect(res2.status).toBe(304)
<ide> })
<add>
<add> it('should block special pages', async () => {
<add> const urls = ['/_document', '/_error']
<add> for (const url of urls) {
<add> const html = await renderViaHTTP(appPort, url)
<add> expect(html).toMatch(/404/)
<add> }
<add> })
<ide> })
<ide>
<ide> describe('With navigation', () => { | 2 |
Text | Text | add vdeturckheim as collaborator | 4a498335f563d241407bb3f144d328e97a74f8dd | <ide><path>README.md
<ide> For more information about the governance of the Node.js project, see
<ide> **Rich Trott** <[email protected]> (he/him)
<ide> * [tunniclm](https://github.com/tunniclm) -
<ide> **Mike Tunnicliffe** <[email protected]>
<add>* [vdeturckheim](https://github.com/vdeturckheim) -
<add>**Vladimir de Turckheim** <[email protected]> (he/him)
<ide> * [vkurchatkin](https://github.com/vkurchatkin) -
<ide> **Vladimir Kurchatkin** <[email protected]>
<ide> * [vsemozhetbyt](https://github.com/vsemozhetbyt) - | 1 |
PHP | PHP | fix glob problem in file cache driver | 471b7aaa814dcd406a280e0bcba2df7c10dc435c | <ide><path>laravel/session/drivers/file.php
<ide> public function delete($id)
<ide> */
<ide> public function sweep($expiration)
<ide> {
<del> foreach (glob($this->path.'*') as $file)
<add> $files = glob($this->path.'*');
<add>
<add> if ($files === false) return;
<add>
<add> foreach ($files as $file)
<ide> {
<ide> if (filetype($file) == 'file' and filemtime($file) < $expiration)
<ide> { | 1 |
Javascript | Javascript | fix copy and serialize | 2641cd08cea2f1ed372566975c9f1380996188c8 | <ide><path>examples/js/nodes/postprocessing/NodePass.js
<ide> NodePass.prototype.render = function () {
<ide>
<ide> NodePass.prototype.copy = function ( source ) {
<ide>
<del> this.material = source.material;
<add> this.input = source.input;
<ide>
<ide> };
<ide>
<ide> NodePass.prototype.toJSON = function ( meta ) {
<ide>
<ide> if ( JSON.stringify( this.userData ) !== '{}' ) data.userData = this.userData;
<ide>
<del> data.material = this.material.toJSON( meta ).uuid;
<add> data.input = this.input.toJSON( meta ).uuid;
<ide>
<ide> }
<ide>
<ide><path>examples/js/nodes/postprocessing/NodePostProcessing.js
<ide> NodePostProcessing.prototype = {
<ide>
<ide> copy: function ( source ) {
<ide>
<del> this.material = source.material;
<add> this.output = source.output;
<ide>
<ide> },
<ide>
<ide> NodePostProcessing.prototype = {
<ide>
<ide> if ( JSON.stringify( this.userData ) !== '{}' ) data.userData = this.userData;
<ide>
<del> data.material = this.material.toJSON( meta ).uuid;
<add> data.output = this.output.toJSON( meta ).uuid;
<ide>
<ide> }
<ide> | 2 |
Python | Python | futurize imput.py to prepare for python 3 | 3b929989185fc9e1d89ef07c612ee02b84800378 | <ide><path>tools/gyp/pylib/gyp/input.py
<ide> # Use of this source code is governed by a BSD-style license that can be
<ide> # found in the LICENSE file.
<ide>
<del>from compiler.ast import Const
<del>from compiler.ast import Dict
<del>from compiler.ast import Discard
<del>from compiler.ast import List
<del>from compiler.ast import Module
<del>from compiler.ast import Node
<del>from compiler.ast import Stmt
<del>import compiler
<add>from __future__ import print_function
<add>
<add>import ast
<add>
<ide> import gyp.common
<ide> import gyp.simple_copy
<ide> import multiprocessing
<ide> def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
<ide>
<ide> def CheckedEval(file_contents):
<ide> """Return the eval of a gyp file.
<del>
<ide> The gyp file is restricted to dictionaries and lists only, and
<ide> repeated keys are not allowed.
<del>
<ide> Note that this is slower than eval() is.
<ide> """
<ide>
<del> ast = compiler.parse(file_contents)
<del> assert isinstance(ast, Module)
<del> c1 = ast.getChildren()
<del> assert c1[0] is None
<del> assert isinstance(c1[1], Stmt)
<del> c2 = c1[1].getChildren()
<del> assert isinstance(c2[0], Discard)
<del> c3 = c2[0].getChildren()
<del> assert len(c3) == 1
<del> return CheckNode(c3[0], [])
<add> syntax_tree = ast.parse(file_contents)
<add> assert isinstance(syntax_tree, ast.Module)
<add> c1 = syntax_tree.body
<add> assert len(c1) == 1
<add> c2 = c1[0]
<add> assert isinstance(c2, ast.Expr)
<add> return CheckNode(c2.value, [])
<ide>
<ide>
<ide> def CheckNode(node, keypath):
<del> if isinstance(node, Dict):
<add> if isinstance(node, ast.Dict):
<ide> c = node.getChildren()
<ide> dict = {}
<del> for n in range(0, len(c), 2):
<del> assert isinstance(c[n], Const)
<del> key = c[n].getChildren()[0]
<add> for key, value in zip(node.keys, node.values):
<add> assert isinstance(key, ast.Str)
<add> key = key.s
<ide> if key in dict:
<ide> raise GypError("Key '" + key + "' repeated at level " +
<ide> repr(len(keypath) + 1) + " with key path '" +
<ide> '.'.join(keypath) + "'")
<ide> kp = list(keypath) # Make a copy of the list for descending this node.
<ide> kp.append(key)
<del> dict[key] = CheckNode(c[n + 1], kp)
<add> dict[key] = CheckNode(value, kp)
<ide> return dict
<del> elif isinstance(node, List):
<del> c = node.getChildren()
<add> elif isinstance(node, ast.List):
<ide> children = []
<del> for index, child in enumerate(c):
<add> for index, child in enumerate(node.elts):
<ide> kp = list(keypath) # Copy list.
<ide> kp.append(repr(index))
<ide> children.append(CheckNode(child, kp))
<ide> return children
<del> elif isinstance(node, Const):
<del> return node.getChildren()[0]
<add> elif isinstance(node, ast.Str):
<add> return node.s
<ide> else:
<ide> raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
<del> "': " + repr(node))
<add> "': " + repr(node))
<ide>
<ide>
<ide> def LoadOneBuildFile(build_file_path, data, aux_data, includes,
<ide> def LoadOneBuildFile(build_file_path, data, aux_data, includes,
<ide> else:
<ide> build_file_data = eval(build_file_contents, {'__builtins__': None},
<ide> None)
<del> except SyntaxError, e:
<add> except SyntaxError as e:
<ide> e.filename = build_file_path
<ide> raise
<del> except Exception, e:
<add> except Exception as e:
<ide> gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
<ide> raise
<ide>
<ide> def LoadOneBuildFile(build_file_path, data, aux_data, includes,
<ide> else:
<ide> LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
<ide> aux_data, None, check)
<del> except Exception, e:
<add> except Exception as e:
<ide> gyp.common.ExceptionAppend(e,
<ide> 'while reading includes of ' + build_file_path)
<ide> raise
<ide> def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
<ide> subdict_path, include)
<ide>
<ide> # Recurse into subdictionaries.
<del> for k, v in subdict.iteritems():
<add> for k, v in subdict.items():
<ide> if type(v) is dict:
<ide> LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
<ide> None, check)
<ide> def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
<ide> try:
<ide> LoadTargetBuildFile(dependency, data, aux_data, variables,
<ide> includes, depth, check, load_dependencies)
<del> except Exception, e:
<add> except Exception as e:
<ide> gyp.common.ExceptionAppend(
<ide> e, 'while loading dependencies of %s' % build_file_path)
<ide> raise
<ide> def CallLoadTargetBuildFile(global_flags,
<ide> signal.signal(signal.SIGINT, signal.SIG_IGN)
<ide>
<ide> # Apply globals so that the worker process behaves the same.
<del> for key, value in global_flags.iteritems():
<add> for key, value in global_flags.items():
<ide> globals()[key] = value
<ide>
<ide> SetGeneratorGlobals(generator_input_info)
<ide> def CallLoadTargetBuildFile(global_flags,
<ide> return (build_file_path,
<ide> build_file_data,
<ide> dependencies)
<del> except GypError, e:
<add> except GypError as e:
<ide> sys.stderr.write("gyp: %s\n" % e)
<ide> return None
<del> except Exception, e:
<del> print >>sys.stderr, 'Exception:', e
<del> print >>sys.stderr, traceback.format_exc()
<add> except Exception as e:
<add> print('Exception:', e, file=sys.stderr)
<add> print(traceback.format_exc(), file=sys.stderr)
<ide> return None
<ide>
<ide>
<ide> def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
<ide> args = (global_flags, dependency,
<ide> variables, includes, depth, check, generator_input_info),
<ide> callback = parallel_state.LoadTargetBuildFileCallback)
<del> except KeyboardInterrupt, e:
<add> except KeyboardInterrupt as e:
<ide> parallel_state.pool.terminate()
<ide> raise e
<ide>
<ide> def FixupPlatformCommand(cmd):
<ide>
<ide> def ExpandVariables(input, phase, variables, build_file):
<ide> # Look for the pattern that gets expanded into variables
<add> def to_utf8(s):
<add> return s if isinstance(s, str) else s.decode('utf-8')
<add>
<ide> if phase == PHASE_EARLY:
<ide> variable_re = early_variable_re
<ide> expansion_symbol = '<'
<ide> def ExpandVariables(input, phase, variables, build_file):
<ide> stderr=subprocess.PIPE,
<ide> stdin=subprocess.PIPE,
<ide> cwd=build_file_dir)
<del> except Exception, e:
<add> except Exception as e:
<ide> raise GypError("%s while executing command '%s' in %s" %
<ide> (e, contents, build_file))
<ide>
<ide> p_stdout, p_stderr = p.communicate('')
<add> p_stdout = to_utf8(p_stdout)
<add> p_stderr = to_utf8(p_stderr)
<ide>
<ide> if p.wait() != 0 or p_stderr:
<ide> sys.stderr.write(p_stderr)
<ide> def ExpandVariables(input, phase, variables, build_file):
<ide>
<ide> # Convert all strings that are canonically-represented integers into integers.
<ide> if type(output) is list:
<del> for index in xrange(0, len(output)):
<add> for index in range(0, len(output)):
<ide> if IsStrCanonicalInt(output[index]):
<ide> output[index] = int(output[index])
<ide> elif IsStrCanonicalInt(output):
<ide> def EvalSingleCondition(
<ide> if eval(ast_code, {'__builtins__': None}, variables):
<ide> return true_dict
<ide> return false_dict
<del> except SyntaxError, e:
<add> except SyntaxError as e:
<ide> syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
<ide> 'at character %d.' %
<ide> (str(e.args[0]), e.text, build_file, e.offset),
<ide> e.filename, e.lineno, e.offset, e.text)
<ide> raise syntax_error
<del> except NameError, e:
<add> except NameError as e:
<ide> gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
<ide> (cond_expr_expanded, build_file))
<ide> raise GypError(e)
<ide> def ProcessConditionsInDict(the_dict, phase, variables, build_file):
<ide> def LoadAutomaticVariablesFromDict(variables, the_dict):
<ide> # Any keys with plain string values in the_dict become automatic variables.
<ide> # The variable name is the key name with a "_" character prepended.
<del> for key, value in the_dict.iteritems():
<add> for key, value in the_dict.items():
<ide> if type(value) in (str, int, list):
<ide> variables['_' + key] = value
<ide>
<ide> def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
<ide> # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
<ide> # (it could be a list or it could be parentless because it is a root dict),
<ide> # the_dict_key will be None.
<del> for key, value in the_dict.get('variables', {}).iteritems():
<add> for key, value in the_dict.get('variables', {}).items():
<ide> if type(value) not in (str, int, list):
<ide> continue
<ide>
<ide> def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
<ide> if variable_name in variables:
<ide> # If the variable is already set, don't set it.
<ide> continue
<del> if the_dict_key is 'variables' and variable_name in the_dict:
<add> if the_dict_key == 'variables' and variable_name in the_dict:
<ide> # If the variable is set without a % in the_dict, and the_dict is a
<ide> # variables dict (making |variables| a varaibles sub-dict of a
<ide> # variables dict), use the_dict's definition.
<ide> def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
<ide> # list before we process them so that you can reference one
<ide> # variable from another. They will be fully expanded by recursion
<ide> # in ExpandVariables.
<del> for key, value in the_dict['variables'].iteritems():
<add> for key, value in the_dict['variables'].items():
<ide> variables[key] = value
<ide>
<ide> # Handle the associated variables dict first, so that any variable
<ide> def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
<ide>
<ide> LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
<ide>
<del> for key, value in the_dict.iteritems():
<add> for key, value in the_dict.items():
<ide> # Skip "variables", which was already processed if present.
<ide> if key != 'variables' and type(value) is str:
<ide> expanded = ExpandVariables(value, phase, variables, build_file)
<ide> def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
<ide>
<ide> # Recurse into child dicts, or process child lists which may result in
<ide> # further recursion into descendant dicts.
<del> for key, value in the_dict.iteritems():
<add> for key, value in the_dict.items():
<ide> # Skip "variables" and string values, which were already processed if
<ide> # present.
<ide> if key == 'variables' or type(value) is str:
<ide> def QualifyDependencies(targets):
<ide> for dep in dependency_sections
<ide> for op in ('', '!', '/')]
<ide>
<del> for target, target_dict in targets.iteritems():
<add> for target, target_dict in targets.items():
<ide> target_build_file = gyp.common.BuildFile(target)
<ide> toolset = target_dict['toolset']
<ide> for dependency_key in all_dependency_sections:
<ide> dependencies = target_dict.get(dependency_key, [])
<del> for index in xrange(0, len(dependencies)):
<add> for index in range(0, len(dependencies)):
<ide> dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
<ide> target_build_file, dependencies[index], toolset)
<ide> if not multiple_toolsets:
<ide> def ExpandWildcardDependencies(targets, data):
<ide> dependency list, must be qualified when this function is called.
<ide> """
<ide>
<del> for target, target_dict in targets.iteritems():
<add> for target, target_dict in targets.items():
<ide> toolset = target_dict['toolset']
<ide> target_build_file = gyp.common.BuildFile(target)
<ide> for dependency_key in dependency_sections:
<ide> dependencies = target_dict.get(dependency_key, [])
<ide>
<del> # Loop this way instead of "for dependency in" or "for index in xrange"
<add> # Loop this way instead of "for dependency in" or "for index in range"
<ide> # because the dependencies list will be modified within the loop body.
<ide> index = 0
<ide> while index < len(dependencies):
<ide> def Unify(l):
<ide> def RemoveDuplicateDependencies(targets):
<ide> """Makes sure every dependency appears only once in all targets's dependency
<ide> lists."""
<del> for target_name, target_dict in targets.iteritems():
<add> for target_name, target_dict in targets.items():
<ide> for dependency_key in dependency_sections:
<ide> dependencies = target_dict.get(dependency_key, [])
<ide> if dependencies:
<ide> def Filter(l, item):
<ide> def RemoveSelfDependencies(targets):
<ide> """Remove self dependencies from targets that have the prune_self_dependency
<ide> variable set."""
<del> for target_name, target_dict in targets.iteritems():
<add> for target_name, target_dict in targets.items():
<ide> for dependency_key in dependency_sections:
<ide> dependencies = target_dict.get(dependency_key, [])
<ide> if dependencies:
<ide> def RemoveSelfDependencies(targets):
<ide> def RemoveLinkDependenciesFromNoneTargets(targets):
<ide> """Remove dependencies having the 'link_dependency' attribute from the 'none'
<ide> targets."""
<del> for target_name, target_dict in targets.iteritems():
<add> for target_name, target_dict in targets.items():
<ide> for dependency_key in dependency_sections:
<ide> dependencies = target_dict.get(dependency_key, [])
<ide> if dependencies:
<ide> def BuildDependencyList(targets):
<ide> # Create a DependencyGraphNode for each target. Put it into a dict for easy
<ide> # access.
<ide> dependency_nodes = {}
<del> for target, spec in targets.iteritems():
<add> for target, spec in targets.items():
<ide> if target not in dependency_nodes:
<ide> dependency_nodes[target] = DependencyGraphNode(target)
<ide>
<ide> # Set up the dependency links. Targets that have no dependencies are treated
<ide> # as dependent on root_node.
<ide> root_node = DependencyGraphNode(None)
<del> for target, spec in targets.iteritems():
<add> for target, spec in targets.items():
<ide> target_node = dependency_nodes[target]
<ide> target_build_file = gyp.common.BuildFile(target)
<ide> dependencies = spec.get('dependencies')
<ide> def VerifyNoGYPFileCircularDependencies(targets):
<ide> # Create a DependencyGraphNode for each gyp file containing a target. Put
<ide> # it into a dict for easy access.
<ide> dependency_nodes = {}
<del> for target in targets.iterkeys():
<add> for target in targets:
<ide> build_file = gyp.common.BuildFile(target)
<ide> if not build_file in dependency_nodes:
<ide> dependency_nodes[build_file] = DependencyGraphNode(build_file)
<ide>
<ide> # Set up the dependency links.
<del> for target, spec in targets.iteritems():
<add> for target, spec in targets.items():
<ide> build_file = gyp.common.BuildFile(target)
<ide> build_file_node = dependency_nodes[build_file]
<ide> target_dependencies = spec.get('dependencies', [])
<ide> for dependency in target_dependencies:
<ide> try:
<ide> dependency_build_file = gyp.common.BuildFile(dependency)
<del> except GypError, e:
<add> except GypError as e:
<ide> gyp.common.ExceptionAppend(
<ide> e, 'while computing dependencies of .gyp file %s' % build_file)
<ide> raise
<ide> def VerifyNoGYPFileCircularDependencies(targets):
<ide>
<ide> # Files that have no dependencies are treated as dependent on root_node.
<ide> root_node = DependencyGraphNode(None)
<del> for build_file_node in dependency_nodes.itervalues():
<add> for build_file_node in dependency_nodes.values():
<ide> if len(build_file_node.dependencies) == 0:
<ide> build_file_node.dependencies.append(root_node)
<ide> root_node.dependents.append(build_file_node)
<ide> def is_in_set_or_list(x, s, l):
<ide>
<ide> def MergeDicts(to, fro, to_file, fro_file):
<ide> # I wanted to name the parameter "from" but it's a Python keyword...
<del> for k, v in fro.iteritems():
<add> for k, v in fro.items():
<ide> # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
<ide> # copy semantics. Something else may want to merge from the |fro| dict
<ide> # later, and having the same dict ref pointed to twice in the tree isn't
<ide> def SetUpConfigurations(target, target_dict):
<ide> if not 'configurations' in target_dict:
<ide> target_dict['configurations'] = {'Default': {}}
<ide> if not 'default_configuration' in target_dict:
<del> concrete = [i for (i, config) in target_dict['configurations'].iteritems()
<add> concrete = [i for (i, config) in target_dict['configurations'].items()
<ide> if not config.get('abstract')]
<ide> target_dict['default_configuration'] = sorted(concrete)[0]
<ide>
<ide> merged_configurations = {}
<ide> configs = target_dict['configurations']
<del> for (configuration, old_configuration_dict) in configs.iteritems():
<add> for (configuration, old_configuration_dict) in configs.items():
<ide> # Skip abstract configurations (saves work only).
<ide> if old_configuration_dict.get('abstract'):
<ide> continue
<ide> # Configurations inherit (most) settings from the enclosing target scope.
<ide> # Get the inheritance relationship right by making a copy of the target
<ide> # dict.
<ide> new_configuration_dict = {}
<del> for (key, target_val) in target_dict.iteritems():
<add> for (key, target_val) in target_dict.items():
<ide> key_ext = key[-1:]
<ide> if key_ext in key_suffixes:
<ide> key_base = key[:-1]
<ide> def ProcessListFiltersInDict(name, the_dict):
<ide>
<ide> lists = []
<ide> del_lists = []
<del> for key, value in the_dict.iteritems():
<add> for key, value in the_dict.items():
<ide> operation = key[-1]
<ide> if operation != '!' and operation != '/':
<ide> continue
<ide> def ProcessListFiltersInDict(name, the_dict):
<ide> exclude_key = list_key + '!'
<ide> if exclude_key in the_dict:
<ide> for exclude_item in the_dict[exclude_key]:
<del> for index in xrange(0, len(the_list)):
<add> for index in range(0, len(the_list)):
<ide> if exclude_item == the_list[index]:
<ide> # This item matches the exclude_item, so set its action to 0
<ide> # (exclude).
<ide> def ProcessListFiltersInDict(name, the_dict):
<ide> raise ValueError('Unrecognized action ' + action + ' in ' + name + \
<ide> ' key ' + regex_key)
<ide>
<del> for index in xrange(0, len(the_list)):
<add> for index in range(0, len(the_list)):
<ide> list_item = the_list[index]
<ide> if list_actions[index] == action_value:
<ide> # Even if the regex matches, nothing will change so continue (regex
<ide> def ProcessListFiltersInDict(name, the_dict):
<ide> # the indices of items that haven't been seen yet don't shift. That means
<ide> # that things need to be prepended to excluded_list to maintain them in the
<ide> # same order that they existed in the_list.
<del> for index in xrange(len(list_actions) - 1, -1, -1):
<add> for index in range(len(list_actions) - 1, -1, -1):
<ide> if list_actions[index] == 0:
<ide> # Dump anything with action 0 (exclude). Keep anything with action 1
<ide> # (include) or -1 (no include or exclude seen for the item).
<ide> def ProcessListFiltersInDict(name, the_dict):
<ide> the_dict[excluded_key] = excluded_list
<ide>
<ide> # Now recurse into subdicts and lists that may contain dicts.
<del> for key, value in the_dict.iteritems():
<add> for key, value in the_dict.items():
<ide> if type(value) is dict:
<ide> ProcessListFiltersInDict(key, value)
<ide> elif type(value) is list:
<ide> def ValidateSourcesInTarget(target, target_dict, build_file,
<ide> basenames.setdefault(basename, []).append(source)
<ide>
<ide> error = ''
<del> for basename, files in basenames.iteritems():
<add> for basename, files in basenames.items():
<ide> if len(files) > 1:
<ide> error += ' %s: %s\n' % (basename, ' '.join(files))
<ide>
<ide> def TurnIntIntoStrInDict(the_dict):
<ide> def TurnIntIntoStrInList(the_list):
<ide> """Given list the_list, recursively converts all integers into strings.
<ide> """
<del> for index in xrange(0, len(the_list)):
<add> for index in range(0, len(the_list)):
<ide> item = the_list[index]
<ide> if type(item) is int:
<ide> the_list[index] = str(item)
<ide> def Load(build_files, variables, includes, depth, generator_input_info, check,
<ide> try:
<ide> LoadTargetBuildFile(build_file, data, aux_data,
<ide> variables, includes, depth, check, True)
<del> except Exception, e:
<add> except Exception as e:
<ide> gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
<ide> raise
<ide>
<ide> def Load(build_files, variables, includes, depth, generator_input_info, check,
<ide> RemoveLinkDependenciesFromNoneTargets(targets)
<ide>
<ide> # Apply exclude (!) and regex (/) list filters only for dependency_sections.
<del> for target_name, target_dict in targets.iteritems():
<add> for target_name, target_dict in targets.items():
<ide> tmp_dict = {}
<ide> for key_base in dependency_sections:
<ide> for op in ('', '!', '/'): | 1 |
Ruby | Ruby | remove empty line | 4e30f98ced37b0a82dced1cad0d6102108cd4b74 | <ide><path>activerecord/test/cases/adapters/mysql2/nested_deadlock_test.rb
<ide> class Sample < ActiveRecord::Base
<ide> end
<ide> end
<ide> end
<del>
<ide> end
<ide> end | 1 |
Python | Python | allow specification of terms to fit in fit method | 5f7b1af4e652c2ab631634d9778a3a4015e41ced | <ide><path>numpy/polynomial/_polybase.py
<ide> def fit(cls, x, y, deg, domain=None, rcond=None, full=False, w=None,
<ide> y-coordinates of the sample points. Several data sets of sample
<ide> points sharing the same x-coordinates can be fitted at once by
<ide> passing in a 2D-array that contains one dataset per column.
<del> deg : int
<del> Degree of the fitting polynomial.
<add> deg : int or array_like
<add> Degree of the fitting polynomial. If `deg` is a single integer
<add> all terms up to and including the `deg`'th term are included.
<add> `deg` may alternatively be a list or array specifying which
<add> terms in the Legendre expansion to include in the fit.
<add>
<add> .. versionchanged:: 1.11.0
<add> `deg` may be a list specifying which terms to fit
<ide> domain : {None, [beg, end], []}, optional
<ide> Domain to use for the returned series. If ``None``,
<ide> then a minimal domain that covers the points `x` is chosen. If | 1 |
Javascript | Javascript | add jsdoc typings for https | 9a7cbe25de88d87429a69050a1a1971234558d97 | <ide><path>lib/https.js
<ide> ObjectSetPrototypeOf(Server, tls.Server);
<ide>
<ide> Server.prototype.setTimeout = HttpServer.prototype.setTimeout;
<ide>
<add>/**
<add> * Creates a new `https.Server` instance.
<add> * @param {{
<add> * IncomingMessage?: IncomingMessage;
<add> * ServerResponse?: ServerResponse;
<add> * insecureHTTPParser?: boolean;
<add> * maxHeaderSize?: number;
<add> * }} [opts]
<add> * @param {Function} [requestListener]
<add> * @returns {Server}
<add> */
<ide> function createServer(opts, requestListener) {
<ide> return new Server(opts, requestListener);
<ide> }
<ide> function createConnection(port, host, options) {
<ide> return socket;
<ide> }
<ide>
<del>
<add>/**
<add> * Creates a new `HttpAgent` instance.
<add> * @param {{
<add> * keepAlive?: boolean;
<add> * keepAliveMsecs?: number;
<add> * maxSockets?: number;
<add> * maxTotalSockets?: number;
<add> * maxFreeSockets?: number;
<add> * scheduling?: string;
<add> * timeout?: number;
<add> * maxCachedSessions?: number;
<add> * servername?: string;
<add> * }} [options]
<add> * @returns {Agent}
<add> */
<ide> function Agent(options) {
<ide> if (!(this instanceof Agent))
<ide> return new Agent(options);
<ide> ObjectSetPrototypeOf(Agent.prototype, HttpAgent.prototype);
<ide> ObjectSetPrototypeOf(Agent, HttpAgent);
<ide> Agent.prototype.createConnection = createConnection;
<ide>
<add>/**
<add> * Gets a unique name for a set of options.
<add> * @param {{
<add> * host: string;
<add> * port: number;
<add> * localAddress: string;
<add> * family: number;
<add> * }} [options]
<add> * @returns {string}
<add> */
<ide> Agent.prototype.getName = function getName(options) {
<ide> let name = FunctionPrototypeCall(HttpAgent.prototype.getName, this, options);
<ide>
<ide> Agent.prototype._evictSession = function _evictSession(key) {
<ide>
<ide> const globalAgent = new Agent();
<ide>
<add>/**
<add> * Makes a request to a secure web server.
<add> * @param {...any} args
<add> * @returns {ClientRequest}
<add> */
<ide> function request(...args) {
<ide> let options = {};
<ide>
<ide> function request(...args) {
<ide> return ReflectConstruct(ClientRequest, args);
<ide> }
<ide>
<add>/**
<add> * Makes a GET request to a secure web server.
<add> * @param {string | URL} input
<add> * @param {{
<add> * agent?: Agent | boolean;
<add> * auth?: string;
<add> * createConnection?: Function;
<add> * defaultPort?: number;
<add> * family?: number;
<add> * headers?: Object;
<add> * hints?: number;
<add> * host?: string;
<add> * hostname?: string;
<add> * insecureHTTPParser?: boolean;
<add> * localAddress?: string;
<add> * localPort?: number;
<add> * lookup?: Function;
<add> * maxHeaderSize?: number;
<add> * method?: string;
<add> * path?: string;
<add> * port?: number;
<add> * protocol?: string;
<add> * setHost?: boolean;
<add> * socketPath?: string;
<add> * timeout?: number;
<add> * signal?: AbortSignal;
<add> * } | string | URL} [options]
<add> * @param {Function} [cb]
<add> * @returns {ClientRequest}
<add> */
<ide> function get(input, options, cb) {
<ide> const req = request(input, options, cb);
<ide> req.end(); | 1 |
Mixed | Javascript | add abortsignal to promisified pipeline | 38f6e5a789b19760592532e6a802f106c12f1d07 | <ide><path>doc/api/stream.md
<ide> pipeline(
<ide> );
<ide> ```
<ide>
<del>The `pipeline` API provides promise version:
<add>The `pipeline` API provides a promise version, which can also
<add>receive an options argument as the last parameter with a
<add>`signal` {AbortSignal} property. When the signal is aborted,
<add>`destroy` will be called on the underlying pipeline, with an
<add>`AbortError`.
<ide>
<ide> ```js
<ide> const { pipeline } = require('stream/promises');
<ide> async function run() {
<ide> run().catch(console.error);
<ide> ```
<ide>
<add>To use an `AbortSignal`, pass it inside an options object,
<add>as the last argument:
<add>
<add>```js
<add>const { pipeline } = require('stream/promises');
<add>
<add>async function run() {
<add> const ac = new AbortController();
<add> const options = {
<add> signal: ac.signal,
<add> };
<add>
<add> setTimeout(() => ac.abort(), 1);
<add> await pipeline(
<add> fs.createReadStream('archive.tar'),
<add> zlib.createGzip(),
<add> fs.createWriteStream('archive.tar.gz'),
<add> options,
<add> );
<add>}
<add>
<add>run().catch(console.error); // AbortError
<add>```
<add>
<ide> The `pipeline` API also supports async generators:
<ide>
<ide> ```js
<ide><path>lib/stream/promises.js
<ide> 'use strict';
<ide>
<ide> const {
<add> ArrayPrototypePop,
<ide> Promise,
<add> SymbolAsyncIterator,
<add> SymbolIterator,
<ide> } = primordials;
<ide>
<add>const {
<add> addAbortSignalNoValidate,
<add>} = require('internal/streams/add-abort-signal');
<add>
<add>const {
<add> validateAbortSignal,
<add>} = require('internal/validators');
<add>
<ide> let pl;
<ide> let eos;
<ide>
<add>function isReadable(obj) {
<add> return !!(obj && typeof obj.pipe === 'function');
<add>}
<add>
<add>function isWritable(obj) {
<add> return !!(obj && typeof obj.write === 'function');
<add>}
<add>
<add>function isStream(obj) {
<add> return isReadable(obj) || isWritable(obj);
<add>}
<add>
<add>function isIterable(obj, isAsync) {
<add> if (!obj) return false;
<add> if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function';
<add> if (isAsync === false) return typeof obj[SymbolIterator] === 'function';
<add> return typeof obj[SymbolAsyncIterator] === 'function' ||
<add> typeof obj[SymbolIterator] === 'function';
<add>}
<add>
<ide> function pipeline(...streams) {
<ide> if (!pl) pl = require('internal/streams/pipeline');
<ide> return new Promise((resolve, reject) => {
<del> pl(...streams, (err, value) => {
<add> let signal;
<add> const lastArg = streams[streams.length - 1];
<add> if (lastArg && typeof lastArg === 'object' &&
<add> !isStream(lastArg) && !isIterable(lastArg)) {
<add> const options = ArrayPrototypePop(streams);
<add> signal = options.signal;
<add> validateAbortSignal(signal, 'options.signal');
<add> }
<add>
<add> const pipe = pl(...streams, (err, value) => {
<ide> if (err) {
<ide> reject(err);
<ide> } else {
<ide> resolve(value);
<ide> }
<ide> });
<add> if (signal) {
<add> addAbortSignalNoValidate(signal, pipe);
<add> }
<ide> });
<ide> }
<ide>
<ide><path>test/parallel/test-stream-pipeline.js
<ide> const net = require('net');
<ide> run();
<ide> }
<ide>
<add>{
<add> // Check aborted signal without values
<add> const pipelinePromise = promisify(pipeline);
<add> async function run() {
<add> const ac = new AbortController();
<add> const { signal } = ac;
<add> async function* producer() {
<add> ac.abort();
<add> await Promise.resolve();
<add> yield '8';
<add> }
<add>
<add> const w = new Writable({
<add> write(chunk, encoding, callback) {
<add> callback();
<add> }
<add> });
<add> await pipelinePromise(producer, w, { signal });
<add> }
<add>
<add> assert.rejects(run, { name: 'AbortError' }).then(common.mustCall());
<add>}
<add>
<add>{
<add> // Check aborted signal after init.
<add> const pipelinePromise = promisify(pipeline);
<add> async function run() {
<add> const ac = new AbortController();
<add> const { signal } = ac;
<add> async function* producer() {
<add> yield '5';
<add> await Promise.resolve();
<add> ac.abort();
<add> await Promise.resolve();
<add> yield '8';
<add> }
<add>
<add> const w = new Writable({
<add> write(chunk, encoding, callback) {
<add> callback();
<add> }
<add> });
<add> await pipelinePromise(producer, w, { signal });
<add> }
<add>
<add> assert.rejects(run, { name: 'AbortError' }).then(common.mustCall());
<add>}
<add>
<add>{
<add> // Check pre-aborted signal
<add> const pipelinePromise = promisify(pipeline);
<add> async function run() {
<add> const ac = new AbortController();
<add> const { signal } = ac;
<add> ac.abort();
<add> async function* producer() {
<add> yield '5';
<add> await Promise.resolve();
<add> yield '8';
<add> }
<add>
<add> const w = new Writable({
<add> write(chunk, encoding, callback) {
<add> callback();
<add> }
<add> });
<add> await pipelinePromise(producer, w, { signal });
<add> }
<add>
<add> assert.rejects(run, { name: 'AbortError' }).then(common.mustCall());
<add>}
<add>
<ide> {
<ide> const read = new Readable({
<ide> read() {} | 3 |
Python | Python | fix esm lm head test | 1076d587b580bb0379037339fbb2bc796062192a | <ide><path>tests/models/esm/test_modeling_esm.py
<ide> def test_inference_masked_lm(self):
<ide> self.assertEqual(output.shape, expected_shape)
<ide>
<ide> expected_slice = torch.tensor(
<del> [[[15.0973, -6.6406, -1.1351], [-0.2209, -9.9622, 4.2109], [-1.6055, -10.0023, 1.5914]]]
<add> [[[8.9215, -10.5898, -6.4671], [-6.3967, -13.9114, -1.1212], [-7.7812, -13.9516, -3.7406]]]
<ide> )
<ide> self.assertTrue(torch.allclose(output[:, :3, :3], expected_slice, atol=1e-4))
<ide> | 1 |
Python | Python | fix the array api array.__setitem__ | e4b7205fbaece2b604b0ac2b11a586a9f7c6b3dd | <ide><path>numpy/_array_api/_array_object.py
<ide> def __setitem__(self, key: Union[int, slice, ellipsis, Tuple[Union[int, slice, e
<ide> # Note: Only indices required by the spec are allowed. See the
<ide> # docstring of _validate_index
<ide> key = self._validate_index(key, self.shape)
<del> res = self._array.__setitem__(key, asarray(value)._array)
<del> return self.__class__._new(res)
<add> self._array.__setitem__(key, asarray(value)._array)
<ide>
<ide> def __sub__(self: Array, other: Union[int, float, Array], /) -> Array:
<ide> """ | 1 |
Javascript | Javascript | ignore queryserver msgs on disconnection | f9f1dd92903cd7de5c2c9ba4d8640fb35c9c2bb7 | <ide><path>lib/cluster.js
<ide> function masterInit() {
<ide> }
<ide>
<ide> function queryServer(worker, message) {
<add> // Stop processing if worker already disconnecting
<add> if (worker.suicide)
<add> return;
<ide> var args = [message.address,
<ide> message.port,
<ide> message.addressType,
<ide><path>test/sequential/test-cluster-disconnect-leak.js
<add>'use strict';
<add>// Flags: --expose-internals
<add>
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const net = require('net');
<add>const cluster = require('cluster');
<add>const handles = require('internal/cluster').handles;
<add>const os = require('os');
<add>
<add>if (common.isWindows) {
<add> console.log('1..0 # Skipped: This test does not apply to Windows.');
<add> return;
<add>}
<add>
<add>cluster.schedulingPolicy = cluster.SCHED_NONE;
<add>
<add>if (cluster.isMaster) {
<add> const cpus = os.cpus().length;
<add> const tries = cpus > 8 ? 128 : cpus * 16;
<add>
<add> const worker1 = cluster.fork();
<add> worker1.on('message', common.mustCall(() => {
<add> worker1.disconnect();
<add> for (let i = 0; i < tries; ++ i) {
<add> const w = cluster.fork();
<add> w.on('online', common.mustCall(w.disconnect));
<add> }
<add> }));
<add>
<add> cluster.on('exit', common.mustCall((worker, code) => {
<add> assert.strictEqual(code, 0, 'worker exited with error');
<add> }, tries + 1));
<add>
<add> process.on('exit', () => {
<add> assert.deepEqual(Object.keys(cluster.workers), []);
<add> assert.strictEqual(Object.keys(handles).length, 0);
<add> });
<add>
<add> return;
<add>}
<add>
<add>var server = net.createServer();
<add>
<add>server.listen(common.PORT, function() {
<add> process.send('listening');
<add>}); | 2 |
PHP | PHP | remove hacky code | 6f338115b40d2bb5119c1d33d36d899632e91e07 | <ide><path>lib/Cake/Network/Http/Client.php
<ide> */
<ide> namespace Cake\Network\Http;
<ide>
<add>use Cake\Error;
<ide> use Cake\Network\Http\Request;
<ide> use Cake\Network\Http\Response;
<ide> use Cake\Utility\Hash;
<ide> protected function _typeHeaders($type) {
<ide> 'Content-Type' => $type
<ide> ];
<ide> }
<del> // Hacky as hell but I'll clean it up I promise
<del> $reflection = new \ReflectionClass('Cake\Network\Response');
<del> $properties = $reflection->getDefaultProperties();
<del> if (isset($properties['_mimeTypes'][$type])) {
<del> $mimeTypes = $properties['_mimeTypes'][$type];
<del> $mimeType = is_array($mimeTypes) ? current($mimeTypes) : $mimeTypes;
<del> return [
<del> 'Accept' => $mimeType,
<del> 'Content-Type' => $mimeType,
<del> ];
<add> $typeMap = [
<add> 'json' => 'application/json',
<add> 'xml' => 'application/xml',
<add> ];
<add> if (!isset($typeMap[$type])) {
<add> throw new Error\Exception(__d('cake_dev', 'Unknown type alias.'));
<ide> }
<del> return [];
<add> return [
<add> 'Accept' => $typeMap[$type],
<add> 'Content-Type' => $typeMap[$type],
<add> ];
<ide> }
<ide>
<ide> }
<ide><path>lib/Cake/Test/TestCase/Network/Http/ClientTest.php
<ide> public function testPostWithTypeKey($type, $mime) {
<ide> ]);
<ide> $http->post('/projects/add', $data, ['type' => $type]);
<ide> }
<add>
<add>/**
<add> * Test that exceptions are raised on invalid types.
<add> *
<add> * @expectedException Cake\Error\Exception
<add> * @return void
<add> */
<add> public function testExceptionOnUnknownType() {
<add> $mock = $this->getMock('Cake\Network\Http\Adapter\Stream', ['send']);
<add> $mock->expects($this->never())
<add> ->method('send');
<add>
<add> $http = new Client([
<add> 'host' => 'cakephp.org',
<add> 'adapter' => $mock
<add> ]);
<add> $http->post('/projects/add', 'it works', ['type' => 'invalid']);
<add> }
<ide> } | 2 |
Python | Python | fix the check | a9b84ce7571d3557839b73229bd8069b8412b2d6 | <ide><path>libcloud/storage/base.py
<ide> def _validate_start_and_end_bytes(self, start_bytes, end_bytes=None):
<ide> if start_bytes < 0:
<ide> raise ValueError('start_bytes must be greater than 0')
<ide>
<del> if end_bytes:
<add> if end_bytes is not None:
<ide> if start_bytes > end_bytes:
<ide> raise ValueError('start_bytes must be smaller than end_bytes')
<ide> elif start_bytes == end_bytes: | 1 |
Ruby | Ruby | use correct variable | 5b0b1ee8fda1cd086653992f812f96c62fb3c24b | <ide><path>railties/test/application/dbconsole_test.rb
<ide> def test_use_value_defined_in_environment_file_in_database_yml
<ide> spawn_dbconsole(replica)
<ide> assert_output("sqlite>", primary)
<ide> ensure
<del> master.puts ".exit"
<add> primary.puts ".exit"
<ide> end
<ide>
<ide> def test_respect_environment_option
<ide> def test_respect_environment_option
<ide> spawn_dbconsole(replica, "-e production")
<ide> assert_output("sqlite>", primary)
<ide>
<del> master.puts "pragma database_list;"
<del> assert_output("production.sqlite3", master)
<add> primary.puts "pragma database_list;"
<add> assert_output("production.sqlite3", primary)
<ide> ensure
<del> master.puts ".exit"
<add> primary.puts ".exit"
<ide> end
<ide>
<ide> private
<ide><path>railties/test/engine/commands_test.rb
<ide> def test_console_command_work_inside_engine
<ide> spawn_command("console", replica)
<ide> assert_output(">", primary)
<ide> ensure
<del> master.puts "quit"
<add> primary.puts "quit"
<ide> end
<ide>
<ide> def test_dbconsole_command_work_inside_engine
<ide> def test_dbconsole_command_work_inside_engine
<ide> spawn_command("dbconsole", replica)
<ide> assert_output("sqlite>", primary)
<ide> ensure
<del> master.puts ".exit"
<add> primary.puts ".exit"
<ide> end
<ide>
<ide> def test_server_command_work_inside_engine | 2 |
Go | Go | move xino stuff to /dev/shm | f67ea78cce83114998390c16305a6869c72f5100 | <ide><path>image.go
<ide> func MountAUFS(ro []string, rw string, target string) error {
<ide> }
<ide> branches := fmt.Sprintf("br:%v:%v", rwBranch, roBranches)
<ide>
<add> branches += ",xino=/dev/shm/aufs.xino"
<add>
<ide> //if error, try to load aufs kernel module
<ide> if err := mount("none", target, "aufs", 0, branches); err != nil {
<ide> log.Printf("Kernel does not support AUFS, trying to load the AUFS module with modprobe...") | 1 |
Java | Java | try the debug executor before the release executor | b8621f5d303442ab78dc5d745cfc86a941d4737c | <ide><path>ReactAndroid/src/main/java/com/facebook/hermes/reactexecutor/HermesExecutor.java
<ide> public class HermesExecutor extends JavaScriptExecutor {
<ide> // libhermes must be loaded explicitly to invoke its JNI_OnLoad.
<ide> SoLoader.loadLibrary("hermes");
<ide> try {
<del> SoLoader.loadLibrary("hermes-executor-release");
<del> mode_ = "Release";
<del> } catch (UnsatisfiedLinkError e) {
<ide> SoLoader.loadLibrary("hermes-executor-debug");
<ide> mode_ = "Debug";
<add> } catch (UnsatisfiedLinkError e) {
<add> SoLoader.loadLibrary("hermes-executor-release");
<add> mode_ = "Release";
<ide> }
<ide> }
<ide> | 1 |
PHP | PHP | remove cookie reading in startup | 0b0d83f261d3d1dd9068841a2e6161a2b10e3633 | <ide><path>lib/Cake/Controller/Component/CookieComponent.php
<ide> public function startup(Controller $controller) {
<ide> $this->_expire($this->time);
<ide>
<ide> $this->_values[$this->name] = array();
<del> if (isset($_COOKIE[$this->name])) {
<del> $this->_values[$this->name] = $this->_decrypt($_COOKIE[$this->name]);
<del> }
<ide> }
<ide>
<ide> /** | 1 |
Python | Python | fix error on urllib | b5098079d84a2472d4d8552d8c7867c39b46da88 | <ide><path>spacy/cli/download.py
<ide>
<ide> from .link import link
<ide> from ..util import prints, get_package_path
<del>from ..compat import url_open, url_error
<add>from ..compat import url_read, HTTPError
<ide> from .. import about
<ide>
<ide>
<ide> def download(model, direct=False):
<ide>
<ide> def get_json(url, desc):
<ide> try:
<del> r = url_open(url)
<del> except url_error as e:
<add> data = url_read(url)
<add> except HTTPError as e:
<ide> msg = ("Couldn't fetch %s. Please find a model for your spaCy "
<ide> "installation (v%s), and download it manually.")
<ide> prints(msg % (desc, about.__version__), about.__docs_models__,
<ide> title="Server error (%d: %s)" % (e.code, e.reason), exits=1)
<del> return ujson.load(r)
<add> return ujson.loads(data)
<ide>
<ide>
<ide> def get_compatibility():
<ide><path>spacy/cli/validate.py
<ide> import sys
<ide> import ujson
<ide>
<del>from ..compat import path2str, locale_escape, url_open, url_error
<add>from ..compat import path2str, locale_escape, url_read, HTTPError
<ide> from ..util import prints, get_data_path, read_json
<ide> from .. import about
<ide>
<ide> def validate():
<ide> with the installed models. Should be run after `pip install -U spacy`.
<ide> """
<ide> try:
<del> r = url_open(about.__compatibility__)
<del> except url_error as e:
<add> data = url_read(about.__compatibility__)
<add> except HTTPError as e:
<ide> prints("Couldn't fetch compatibility table.",
<ide> title="Server error (%d: %s)" % (e.code, e.reason), exits=1)
<del> compat = ujson.load(r)['spacy']
<add> compat = ujson.loads(data)['spacy']
<ide> current_compat = compat.get(about.__version__)
<ide> if not current_compat:
<ide> prints(about.__compatibility__, exits=1,
<ide><path>spacy/compat.py
<ide> import urllib2 as urllib
<ide>
<ide> try:
<del> from urllib.error import HTTPError as url_error
<add> from urllib.error import HTTPError
<ide> except ImportError:
<del> from urllib2 import HTTPError as url_error
<add> from urllib2 import HTTPError
<ide>
<ide> pickle = pickle
<ide> copy_reg = copy_reg
<ide> CudaStream = CudaStream
<ide> cupy = cupy
<ide> copy_array = copy_array
<ide> urllib = urllib
<del>url_error = url_error
<ide> izip = getattr(itertools, 'izip', zip)
<ide>
<ide> is_windows = sys.platform.startswith('win')
<ide> input_ = raw_input # noqa: F821
<ide> json_dumps = lambda data: ujson.dumps(data, indent=2, escape_forward_slashes=False).decode('utf8')
<ide> path2str = lambda path: str(path).decode('utf8')
<del> url_open = lambda url: urllib.urlopen(url)
<add> url_open = urllib.urlopen
<ide>
<ide> elif is_python3:
<ide> bytes_ = bytes
<ide> input_ = input
<ide> json_dumps = lambda data: ujson.dumps(data, indent=2, escape_forward_slashes=False)
<ide> path2str = lambda path: str(path)
<del> url_open = lambda url: urllib.request.urlopen(url)
<add> url_open = urllib.request.urlopen
<add>
<add>
<add>def url_read(url):
<add> file_ = url_open(url)
<add> code = file_.getcode()
<add> if code != 200:
<add> raise HTTPError(url, code, "Cannot GET url", [], file_)
<add> data = file_.read()
<add> return data
<ide>
<ide>
<ide> def b_to_str(b_str): | 3 |
Javascript | Javascript | add explicit tests for intermediate components | 1e42c1833cf50f2450be35758470f8b84d7a60a2 | <ide><path>src/renderers/shared/fiber/__tests__/ReactIncremental-test.js
<ide> describe('ReactIncremental', () => {
<ide> state = {x: 0};
<ide> render() {
<ide> statefulInst = this;
<del> return [
<del> <ShowLocaleClass />,
<del> <ShowLocaleFn />,
<del> ];
<add> return this.props.children;
<add> }
<add> }
<add>
<add> function IndirectionFn(props, context) {
<add> ops.push('IndirectionFn ' + JSON.stringify(context));
<add> return props.children;
<add> }
<add>
<add> class IndirectionClass extends React.Component {
<add> render() {
<add> ops.push('IndirectionClass ' + JSON.stringify(this.context));
<add> return this.props.children;
<ide> }
<ide> }
<ide>
<ide> ops.length = 0;
<ide> ReactNoop.render(
<ide> <Intl locale="fr">
<del> <Stateful />
<add> <IndirectionFn>
<add> <IndirectionClass>
<add> <Stateful>
<add> <ShowLocaleClass />
<add> <ShowLocaleFn />
<add> </Stateful>
<add> </IndirectionClass>
<add> </IndirectionFn>
<ide> </Intl>
<ide> );
<ide> ReactNoop.flush();
<ide> expect(ops).toEqual([
<ide> 'Intl:read null',
<ide> 'Intl:provide {"locale":"fr"}',
<add> 'IndirectionFn null',
<add> 'IndirectionClass null',
<ide> 'ShowLocaleClass:read {"locale":"fr"}',
<ide> 'ShowLocaleFn:read {"locale":"fr"}',
<ide> ]);
<ide>
<ide> ops.length = 0;
<ide> statefulInst.setState({x: 1});
<ide> ReactNoop.flush();
<del> expect(ops).toEqual([
<del> // Intl was memoized so we did not need to
<del> // either render it or recompute its context.
<del> 'ShowLocaleClass:read {"locale":"fr"}',
<del> 'ShowLocaleFn:read {"locale":"fr"}',
<del> ]);
<add> // All work has been memoized because setState()
<add> // happened below the context and could not have affected it.
<add> expect(ops).toEqual([]);
<ide> });
<ide>
<ide> it('reads context when setState is above the provider', () => {
<ide> describe('ReactIncremental', () => {
<ide> locale: React.PropTypes.string,
<ide> };
<ide>
<add> function IndirectionFn(props, context) {
<add> ops.push('IndirectionFn ' + JSON.stringify(context));
<add> return props.children;
<add> }
<add>
<add> class IndirectionClass extends React.Component {
<add> render() {
<add> ops.push('IndirectionClass ' + JSON.stringify(this.context));
<add> return this.props.children;
<add> }
<add> }
<add>
<ide> class Stateful extends React.Component {
<ide> state = {locale: 'fr'};
<ide> render() {
<ide> statefulInst = this;
<ide> return (
<ide> <Intl locale={this.state.locale}>
<del> <ShowLocaleClass />
<del> <ShowLocaleFn />
<add> {this.props.children}
<ide> </Intl>
<ide> );
<ide> }
<ide> }
<ide>
<ide> ops.length = 0;
<del> ReactNoop.render(<Stateful />);
<add> ReactNoop.render(
<add> <Stateful>
<add> <IndirectionFn>
<add> <IndirectionClass>
<add> <ShowLocaleClass />
<add> <ShowLocaleFn />
<add> </IndirectionClass>
<add> </IndirectionFn>
<add> </Stateful>
<add> );
<ide> ReactNoop.flush();
<ide> expect(ops).toEqual([
<ide> 'Intl:read null',
<ide> 'Intl:provide {"locale":"fr"}',
<add> 'IndirectionFn null',
<add> 'IndirectionClass null',
<ide> 'ShowLocaleClass:read {"locale":"fr"}',
<ide> 'ShowLocaleFn:read {"locale":"fr"}',
<ide> ]);
<ide> describe('ReactIncremental', () => {
<ide> // its child context.
<ide> 'Intl:read null',
<ide> 'Intl:provide {"locale":"gr"}',
<add> // TODO: it's unfortunate that we can't reuse work on
<add> // these components even though they don't depend on context.
<add> 'IndirectionFn null',
<add> 'IndirectionClass null',
<add> // These components depend on context:
<ide> 'ShowLocaleClass:read {"locale":"gr"}',
<ide> 'ShowLocaleFn:read {"locale":"gr"}',
<ide> ]); | 1 |
Javascript | Javascript | remove some redundant lines | ae8c6dd5347797f2279639a5bc644e9b6632bbbb | <ide><path>packages/react-reconciler/src/ReactFiberScheduler.js
<ide> function renderRoot(root: FiberRoot, isYieldy: boolean): void {
<ide> resetStack();
<ide> nextRoot = root;
<ide> nextRenderExpirationTime = expirationTime;
<del> nextLatestTimeoutMs = -1;
<del> nextRenderDidError = false;
<ide> nextUnitOfWork = createWorkInProgress(
<ide> nextRoot.current,
<ide> null,
<ide> function performWorkOnRoot(
<ide> // This root is already complete. We can commit it.
<ide> completeRoot(root, finishedWork, expirationTime);
<ide> } else {
<del> root.finishedWork = null;
<ide> renderRoot(root, false);
<ide> finishedWork = root.finishedWork;
<ide> if (finishedWork !== null) {
<ide> function performWorkOnRoot(
<ide> // This root is already complete. We can commit it.
<ide> completeRoot(root, finishedWork, expirationTime);
<ide> } else {
<del> root.finishedWork = null;
<ide> renderRoot(root, true);
<ide> finishedWork = root.finishedWork;
<ide> if (finishedWork !== null) { | 1 |
Java | Java | fix dispatcherservlet warnings | c4d98278e94d94187e429d33400b7425b128a577 | <ide><path>org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/DispatcherServlet.java
<ide> * @see org.springframework.web.servlet.mvc.Controller
<ide> * @see org.springframework.web.context.ContextLoaderListener
<ide> */
<add>@SuppressWarnings("serial")
<ide> public class DispatcherServlet extends FrameworkServlet {
<ide>
<ide> /** Well-known name for the MultipartResolver object in the bean factory for this namespace. */
<ide> protected <T> List<T> getDefaultStrategies(ApplicationContext context, Class<T>
<ide> List<T> strategies = new ArrayList<T>(classNames.length);
<ide> for (String className : classNames) {
<ide> try {
<del> Class clazz = ClassUtils.forName(className, DispatcherServlet.class.getClassLoader());
<add> Class<?> clazz = ClassUtils.forName(className, DispatcherServlet.class.getClassLoader());
<ide> Object strategy = createDefaultStrategy(context, clazz);
<ide> strategies.add((T) strategy);
<ide> }
<ide> protected void doService(HttpServletRequest request, HttpServletResponse respons
<ide> if (WebUtils.isIncludeRequest(request)) {
<ide> logger.debug("Taking snapshot of request attributes before include");
<ide> attributesSnapshot = new HashMap<String, Object>();
<del> Enumeration attrNames = request.getAttributeNames();
<add> Enumeration<?> attrNames = request.getAttributeNames();
<ide> while (attrNames.hasMoreElements()) {
<ide> String attrName = (String) attrNames.nextElement();
<ide> if (this.cleanupAfterInclude || attrName.startsWith("org.springframework.web.servlet")) {
<ide> private void triggerAfterCompletion(HandlerExecutionChain mappedHandler,
<ide> * @param request current HTTP request
<ide> * @param attributesSnapshot the snapshot of the request attributes before the include
<ide> */
<del> private void restoreAttributesAfterInclude(HttpServletRequest request, Map attributesSnapshot) {
<add> private void restoreAttributesAfterInclude(HttpServletRequest request, Map<?,?> attributesSnapshot) {
<ide> logger.debug("Restoring snapshot of request attributes after include");
<ide>
<ide> // Need to copy into separate Collection here, to avoid side effects
<ide> // on the Enumeration when removing attributes.
<ide> Set<String> attrsToCheck = new HashSet<String>();
<del> Enumeration attrNames = request.getAttributeNames();
<add> Enumeration<?> attrNames = request.getAttributeNames();
<ide> while (attrNames.hasMoreElements()) {
<ide> String attrName = (String) attrNames.nextElement();
<ide> if (this.cleanupAfterInclude || attrName.startsWith("org.springframework.web.servlet")) { | 1 |
PHP | PHP | add forcerefresh option to describe | bdd3225b419daa13c30faf966cb067ce999bacf8 | <ide><path>src/Console/Command/OrmCacheShell.php
<ide> public function build($name = null) {
<ide> }
<ide> foreach ($tables as $table) {
<ide> $this->_io->verbose('Building metadata cache for ' . $table);
<del> $schema->describe($table);
<add> $schema->describe($table, ['forceRefresh' => true]);
<ide> }
<ide> $this->out('<success>Cache build complete</success>');
<ide> }
<ide><path>src/Database/Schema/Collection.php
<ide> public function listTables() {
<ide> * Caching will be applied if `cacheMetadata` key is present in the Connection
<ide> * configuration options. Defaults to _cake_model_ when true.
<ide> *
<add> * ### Options
<add> *
<add> * - `forceRefresh` - Set to true to force rebuilding the cached metadata.
<add> * Defaults to false.
<add> *
<ide> * @param string $name The name of the table to describe.
<add> * @param array $options The options to use, see above.
<ide> * @return \Cake\Database\Schema\Table Object with column metadata.
<ide> * @throws \Cake\Database\Exception when table cannot be described.
<ide> */
<del> public function describe($name) {
<add> public function describe($name, array $options) {
<add> $options += ['forceRefresh' => false];
<ide> $cacheConfig = $this->cacheMetadata();
<del> if ($cacheConfig) {
<del> $cacheKey = $this->cacheKey($name);
<add> $cacheKey = $this->cacheKey($name);
<add>
<add> if (!empty($cacheConfig) && !$options['forceRefresh']) {
<ide> $cached = Cache::read($cacheKey, $cacheConfig);
<ide> if ($cached !== false) {
<ide> return $cached; | 2 |
Javascript | Javascript | replace set.prototype with setprototype primordial | e56840368b1851fb2d0ab6e80f1e2fd73c94d4fa | <ide><path>lib/internal/process/per_thread.js
<ide> const {
<ide> ObjectFreeze,
<ide> ObjectGetOwnPropertyDescriptors,
<ide> RegExpPrototypeTest,
<add> SetPrototype,
<ide> SetPrototypeHas,
<ide> StringPrototypeReplace,
<ide> } = primordials;
<ide> function buildAllowedFlags() {
<ide> // each object.
<ide> const nodeFlags = ObjectDefineProperties(
<ide> new Set(allowedNodeEnvironmentFlags.map(trimLeadingDashes)),
<del> ObjectGetOwnPropertyDescriptors(Set.prototype)
<add> ObjectGetOwnPropertyDescriptors(SetPrototype)
<ide> );
<ide>
<ide> class NodeEnvironmentFlagsSet extends Set { | 1 |
Text | Text | fix documentation of `token.ancestors` | c323789721d36dd9912fa20b9196f01d98581f0e | <ide><path>website/docs/api/token.md
<ide> dependency tree.
<ide>
<ide> ## Token.ancestors {#ancestors tag="property" model="parser"}
<ide>
<del>The rightmost token of this token's syntactic descendants.
<add>A sequence of the token's syntactic ancestors (parents, grandparents, etc).
<ide>
<ide> > #### Example
<ide> > | 1 |
Ruby | Ruby | introduce a callback for forked children | 78b9580e5f3208c7048659de24f2220693afb23c | <ide><path>activesupport/lib/active_support.rb
<ide> module ActiveSupport
<ide> autoload :Executor
<ide> autoload :FileUpdateChecker
<ide> autoload :EventedFileUpdateChecker
<add> autoload :ForkTracker
<ide> autoload :LogSubscriber
<ide> autoload :Notifications
<ide> autoload :Reloader
<ide><path>activesupport/lib/active_support/fork_tracker.rb
<add># frozen_string_literal: true
<add>
<add>module ActiveSupport
<add> module ForkTracker # :nodoc:
<add> module CoreExt
<add> def fork(*)
<add> if block_given?
<add> super do
<add> ForkTracker.check!
<add> yield
<add> end
<add> else
<add> unless pid = super
<add> ForkTracker.check!
<add> end
<add> pid
<add> end
<add> end
<add> end
<add>
<add> @pid = Process.pid
<add> @callbacks = []
<add>
<add> class << self
<add> def check!
<add> if @pid != Process.pid
<add> @callbacks.each(&:call)
<add> @pid = Process.pid
<add> end
<add> end
<add>
<add> def hook!
<add> ::Object.prepend(CoreExt)
<add> ::Kernel.singleton_class.prepend(CoreExt)
<add> ::Process.singleton_class.prepend(CoreExt)
<add> end
<add>
<add> def after_fork(&block)
<add> @callbacks << block
<add> block
<add> end
<add>
<add> def unregister(callback)
<add> @callbacks.delete(callback)
<add> end
<add> end
<add> end
<add>end
<add>
<add>ActiveSupport::ForkTracker.hook!
<ide><path>activesupport/test/fork_tracker_test.rb
<add># frozen_string_literal: true
<add>
<add>require "abstract_unit"
<add>
<add>class ForkTrackerTest < ActiveSupport::TestCase
<add> def test_object_fork
<add> read, write = IO.pipe
<add> called = false
<add>
<add> handler = ActiveSupport::ForkTracker.after_fork do
<add> called = true
<add> write.write "forked"
<add> end
<add>
<add> pid = fork do
<add> read.close
<add> write.close
<add> exit!
<add> end
<add>
<add> write.close
<add>
<add> Process.waitpid(pid)
<add> assert_equal "forked", read.read
<add> read.close
<add>
<add> assert_not called
<add> ensure
<add> ActiveSupport::ForkTracker.unregister(handler)
<add> end
<add>
<add> def test_object_fork_without_block
<add> read, write = IO.pipe
<add> called = false
<add>
<add> handler = ActiveSupport::ForkTracker.after_fork do
<add> called = true
<add> write.write "forked"
<add> end
<add>
<add> if pid = fork
<add> write.close
<add> Process.waitpid(pid)
<add> assert_equal "forked", read.read
<add> read.close
<add> assert_not called
<add> else
<add> read.close
<add> write.close
<add> exit!
<add> end
<add> ensure
<add> ActiveSupport::ForkTracker.unregister(handler)
<add> end
<add>
<add> def test_process_fork
<add> read, write = IO.pipe
<add> called = false
<add>
<add> handler = ActiveSupport::ForkTracker.after_fork do
<add> called = true
<add> write.write "forked"
<add> end
<add>
<add> pid = Process.fork do
<add> read.close
<add> write.close
<add> exit!
<add> end
<add>
<add> write.close
<add>
<add> Process.waitpid(pid)
<add> assert_equal "forked", read.read
<add> read.close
<add> assert_not called
<add> ensure
<add> ActiveSupport::ForkTracker.unregister(handler)
<add> end
<add>
<add> def test_process_fork_without_block
<add> read, write = IO.pipe
<add> called = false
<add>
<add> handler = ActiveSupport::ForkTracker.after_fork do
<add> called = true
<add> write.write "forked"
<add> end
<add>
<add> if pid = Process.fork
<add> write.close
<add> Process.waitpid(pid)
<add> assert_equal "forked", read.read
<add> read.close
<add> assert_not called
<add> else
<add> read.close
<add> write.close
<add> exit!
<add> end
<add> ensure
<add> ActiveSupport::ForkTracker.unregister(handler)
<add> end
<add>
<add> def test_kernel_fork
<add> read, write = IO.pipe
<add> called = false
<add>
<add> handler = ActiveSupport::ForkTracker.after_fork do
<add> called = true
<add> write.write "forked"
<add> end
<add>
<add> pid = Kernel.fork do
<add> read.close
<add> write.close
<add> exit!
<add> end
<add>
<add> write.close
<add>
<add> Process.waitpid(pid)
<add> assert_equal "forked", read.read
<add> read.close
<add> assert_not called
<add> ensure
<add> ActiveSupport::ForkTracker.unregister(handler)
<add> end
<add>
<add> def test_kernel_fork_without_block
<add> read, write = IO.pipe
<add> called = false
<add>
<add> handler = ActiveSupport::ForkTracker.after_fork do
<add> called = true
<add> write.write "forked"
<add> end
<add>
<add> if pid = Kernel.fork
<add> write.close
<add> Process.waitpid(pid)
<add> assert_equal "forked", read.read
<add> read.close
<add> assert_not called
<add> else
<add> read.close
<add> write.close
<add> exit!
<add> end
<add> ensure
<add> ActiveSupport::ForkTracker.unregister(handler)
<add> end
<add>
<add> def test_check
<add> count = 0
<add> handler = ActiveSupport::ForkTracker.after_fork { count += 1 }
<add>
<add> assert_no_difference -> { count } do
<add> 3.times { ActiveSupport::ForkTracker.check! }
<add> end
<add>
<add> Process.stub(:pid, Process.pid + 1) do
<add> assert_difference -> { count }, +1 do
<add> 3.times { ActiveSupport::ForkTracker.check! }
<add> end
<add> end
<add>
<add> assert_difference -> { count }, +1 do
<add> 3.times { ActiveSupport::ForkTracker.check! }
<add> end
<add> ensure
<add> ActiveSupport::ForkTracker.unregister(handler)
<add> end
<add>end | 3 |
Javascript | Javascript | fix duplicate property | 81c66c8d8549a458518a6534ae3e81c5c496fe85 | <ide><path>src/scales/scale.linear.js
<ide> };
<ide>
<ide> var LinearScale = Chart.Element.extend({
<del> calculateRange: helpers.noop, // overridden in the chart. Will set min and max as properties of the scale for later use
<ide> isHorizontal: function() {
<ide> return this.options.position == "top" || this.options.position == "bottom";
<ide> }, | 1 |
Python | Python | update version string | 4223c8e235337fbb2935eb0e6c78eab50b158609 | <ide><path>libcloud/__init__.py
<ide>
<ide> __all__ = ["__version__", "enable_debug"]
<ide>
<del>__version__ = "0.5.0"
<add>__version__ = "0.5.1"
<ide>
<ide> def enable_debug(fo):
<ide> """ | 1 |
Python | Python | join function typeerror fixed | e172e12182868f70d39ff27b307663d6fe9422e1 | <ide><path>examples/kaggle_otto_nn.py
<ide> def preprocess_labels(y, encoder=None, categorical=True):
<ide> def make_submission(y_prob, ids, encoder, fname):
<ide> with open(fname, 'w') as f:
<ide> f.write('id,')
<del> f.write(','.join(encoder.classes_))
<add> f.write(','.join([str(i) for i in encoder.classes_]))
<ide> f.write('\n')
<ide> for i, probs in zip(ids, y_prob):
<ide> probas = ','.join([i] + [str(p) for p in probs.tolist()]) | 1 |
Javascript | Javascript | improve typings for platform.select on ios | b60b70f7ce28c72ff81df1f1a42bd2cc6907c342 | <ide><path>Libraries/Utilities/Platform.ios.js
<ide>
<ide> import NativePlatformConstantsIOS from './NativePlatformConstantsIOS';
<ide>
<del>export type PlatformSelectSpec<D, N, I> = {
<del> default?: D,
<del> native?: N,
<del> ios?: I,
<add>export type PlatformSelectSpec<T> = {
<add> default?: T,
<add> native?: T,
<add> ios?: T,
<ide> ...
<ide> };
<ide>
<ide> const Platform = {
<ide> }
<ide> return false;
<ide> },
<del> select: <D, N, I>(spec: PlatformSelectSpec<D, N, I>): D | N | I =>
<add> select: <T>(spec: PlatformSelectSpec<T>): T =>
<ide> // $FlowFixMe[incompatible-return]
<ide> 'ios' in spec ? spec.ios : 'native' in spec ? spec.native : spec.default,
<ide> }; | 1 |
PHP | PHP | implement the inputinterface where it is needed | 08920aa043c1827088f7d76b6f75a87b45697368 | <ide><path>src/View/Input/Checkbox.php
<ide> */
<ide> namespace Cake\View\Input;
<ide>
<add>use Cake\View\Input\InputInterface;
<add>
<ide> /**
<ide> * Input widget for creating checkbox widgets.
<ide> */
<del>class Checkbox {
<add>class Checkbox implements InputInterface {
<ide>
<ide> /**
<ide> * Template instance.
<ide> public function __construct($templates) {
<ide> * @param array $data The data to create a checkbox with.
<ide> * @return string Generated HTML string.
<ide> */
<del> public function render($data) {
<add> public function render(array $data) {
<ide> $data += [
<ide> 'name' => '',
<ide> 'value' => 1,
<ide><path>src/View/Input/Label.php
<ide> */
<ide> namespace Cake\View\Input;
<ide>
<add>use Cake\View\Input\InputInterface;
<add>
<ide> /**
<ide> * Form 'widget' for creating labels.
<ide> *
<ide> * Generally this element is used by other widgets,
<ide> * and FormHelper itself.
<ide> */
<del>class Label {
<add>class Label implements InputInterface {
<ide>
<ide> /**
<ide> * Templates
<ide> public function __construct($templates) {
<ide> * @param array $data
<ide> * @return string
<ide> */
<del> public function render($data) {
<add> public function render(array $data) {
<ide> $data += [
<ide> 'text' => '',
<ide> 'input' => '',
<ide><path>src/View/Input/MultiCheckbox.php
<ide> namespace Cake\View\Input;
<ide>
<ide> use Cake\Utility\Inflector;
<add>use Cake\View\Input\InputInterface;
<ide>
<ide> /**
<ide> * Input widget class for generating multiple checkboxes.
<ide> *
<ide> */
<del>class MultiCheckbox {
<add>class MultiCheckbox implements InputInterface {
<ide>
<ide> /**
<ide> * Template instance to use.
<ide> public function __construct($templates, $label) {
<ide> * @param array $data
<ide> * @return string
<ide> */
<del> public function render($data) {
<add> public function render(array $data) {
<ide> $data += [
<ide> 'name' => '',
<ide> 'escape' => true,
<ide><path>src/View/Input/Radio.php
<ide> namespace Cake\View\Input;
<ide>
<ide> use Cake\Utility\Inflector;
<add>use Cake\View\InputInterface;
<ide> use Traversable;
<ide>
<ide> /**
<ide> * This class is intended as an internal implementation detail
<ide> * of Cake\View\Helper\FormHelper and is not intended for direct use.
<ide> */
<del>class Radio {
<add>class Radio implements InputInterface {
<ide>
<ide> /**
<ide> * Template instance.
<ide> public function __construct($templates, $label) {
<ide> * @param array $data The data to build radio buttons with.
<ide> * @return string
<ide> */
<del> public function render($data) {
<add> public function render(array $data) {
<ide> $data += [
<ide> 'name' => '',
<ide> 'options' => [],
<ide><path>src/View/Input/SelectBox.php
<ide> */
<ide> namespace Cake\View\Input;
<ide>
<add>use Cake\View\Input\InputInterface;
<ide> use Traversable;
<ide>
<ide> /**
<ide> * This class is intended as an internal implementation detail
<ide> * of Cake\View\Helper\FormHelper and is not intended for direct use.
<ide> */
<del>class SelectBox {
<add>class SelectBox implements InputInterface {
<ide>
<ide> /**
<ide> * Template instance.
<ide> public function __construct($templates) {
<ide> * @return string A generated select box.
<ide> * @throws \RuntimeException when the name attribute is empty.
<ide> */
<del> public function render($data) {
<add> public function render(array $data) {
<ide> $data += [
<ide> 'name' => '',
<ide> 'empty' => false,
<ide><path>src/View/Input/Text.php
<ide> */
<ide> namespace Cake\View\Input;
<ide>
<add>use Cake\View\Input\InputInterface;
<add>
<ide> /**
<ide> * Basic input class.
<ide> *
<ide> * This input class can be used to render basic simple
<ide> * input elements like hidden, text, email, tel and other
<ide> * types.
<ide> */
<del>class Text {
<add>class Text implements InputInterface {
<ide>
<ide> /**
<ide> * StringTemplate instance.
<ide> public function __construct($templates) {
<ide> * @param array $data The data to build an input with.
<ide> * @return string
<ide> */
<del> public function render($data) {
<add> public function render(array $data) {
<ide> $data += [
<ide> 'name' => '',
<ide> 'val' => null, | 6 |
Javascript | Javascript | clarify return value for path method | 0bd2efb99456f4e54a3ac4f736b5fb144e8d9858 | <ide><path>src/ng/location.js
<ide> var locationPrototype = {
<ide> * ```
<ide> *
<ide> * @param {(string|number)=} path New path
<del> * @return {string} path
<add> * @return {(string|object)} path if called with no parameters, or `$location` if called with a parameter
<ide> */
<ide> path: locationGetterSetter('$$path', function(path) {
<ide> path = path !== null ? path.toString() : ''; | 1 |
Go | Go | use fewer modprobes | 6b5dc55e93d2f9ff6afab08f3fa707f860ea3540 | <ide><path>libnetwork/iptables/iptables.go
<ide> func (e ChainError) Error() string {
<ide> }
<ide>
<ide> func probe() {
<del> if out, err := exec.Command("modprobe", "-va", "nf_nat").CombinedOutput(); err != nil {
<del> logrus.Warnf("Running modprobe nf_nat failed with message: `%s`, error: %v", strings.TrimSpace(string(out)), err)
<add> path, err := exec.LookPath("iptables")
<add> if err != nil {
<add> logrus.Warnf("Failed to find iptables: %v", err)
<add> return
<ide> }
<del> if out, err := exec.Command("modprobe", "-va", "xt_conntrack").CombinedOutput(); err != nil {
<del> logrus.Warnf("Running modprobe xt_conntrack failed with message: `%s`, error: %v", strings.TrimSpace(string(out)), err)
<add> if out, err := exec.Command(path, "--wait", "-t", "nat", "-L", "-n").CombinedOutput(); err != nil {
<add> logrus.Warnf("Running iptables --wait -t nat -L -n failed with message: `%s`, error: %v", strings.TrimSpace(string(out)), err)
<ide> }
<ide> }
<ide>
<ide><path>libnetwork/ns/init_linux.go
<ide> func NlHandle() *netlink.Handle {
<ide> func getSupportedNlFamilies() []int {
<ide> fams := []int{syscall.NETLINK_ROUTE}
<ide> // NETLINK_XFRM test
<del> if err := loadXfrmModules(); err != nil {
<del> if checkXfrmSocket() != nil {
<del> logrus.Warnf("Could not load necessary modules for IPSEC rules: %v", err)
<del> } else {
<del> fams = append(fams, syscall.NETLINK_XFRM)
<del> }
<add> if err := checkXfrmSocket(); err != nil {
<add> logrus.Warnf("Could not load necessary modules for IPSEC rules: %v", err)
<ide> } else {
<ide> fams = append(fams, syscall.NETLINK_XFRM)
<ide> }
<ide> func getSupportedNlFamilies() []int {
<ide> return fams
<ide> }
<ide>
<del>func loadXfrmModules() error {
<del> if out, err := exec.Command("modprobe", "-va", "xfrm_user").CombinedOutput(); err != nil {
<del> return fmt.Errorf("Running modprobe xfrm_user failed with message: `%s`, error: %v", strings.TrimSpace(string(out)), err)
<del> }
<del> if out, err := exec.Command("modprobe", "-va", "xfrm_algo").CombinedOutput(); err != nil {
<del> return fmt.Errorf("Running modprobe xfrm_algo failed with message: `%s`, error: %v", strings.TrimSpace(string(out)), err)
<del> }
<del> return nil
<del>}
<del>
<ide> // API check on required xfrm modules (xfrm_user, xfrm_algo)
<ide> func checkXfrmSocket() error {
<ide> fd, err := syscall.Socket(syscall.AF_NETLINK, syscall.SOCK_RAW, syscall.NETLINK_XFRM) | 2 |
Javascript | Javascript | add a missing parenthese | 53355c692aa7473a552b621cde0b6f0c5a681994 | <ide><path>Libraries/StyleSheet/StyleSheet.js
<ide> module.exports = {
<ide> * }
<ide> * });
<ide> *
<del> * StyleSheet.flatten([styles.listItem, styles.selectedListItem]
<add> * StyleSheet.flatten([styles.listItem, styles.selectedListItem])
<ide> * // returns { flex: 1, fontSize: 16, color: 'green' }
<ide> * ```
<ide> * Alternative use: | 1 |
Python | Python | fix tests for topological sort | 7a6cd96af92aed066b7309c221a50804c5fcbf4d | <ide><path>tests/models.py
<ide> def test_dag_topological_sort(self):
<ide> topological_list = dag.topological_sort()
<ide> logging.info(topological_list)
<ide>
<del> self.assertTrue(topological_list[0] == op5 or topological_list[0] == op4)
<del> self.assertTrue(topological_list[1] == op4 or topological_list[1] == op5)
<del> self.assertTrue(topological_list[2] == op1 or topological_list[2] == op2)
<del> self.assertTrue(topological_list[3] == op1 or topological_list[3] == op2)
<add> set1 = [op4, op5]
<add> self.assertTrue(topological_list[0] in set1)
<add> set1.remove(topological_list[0])
<add>
<add> set2 = [op1, op2]
<add> set2.extend(set1)
<add> self.assertTrue(topological_list[1] in set2)
<add> set2.remove(topological_list[1])
<add>
<add> self.assertTrue(topological_list[2] in set2)
<add> set2.remove(topological_list[2])
<add>
<add> self.assertTrue(topological_list[3] in set2)
<add>
<ide> self.assertTrue(topological_list[4] == op3)
<ide>
<ide> dag = DAG( | 1 |
PHP | PHP | fix cs error | f7d0cef611a9b80e9553ffea89c36ded056e2aa4 | <ide><path>src/basics.php
<ide> function __dxn($domain, $context, $singular, $plural, $count, $args = null)
<ide> *
<ide> * @return string Error message.
<ide> */
<del> function json_last_error_msg() {
<add> function json_last_error_msg()
<add> {
<ide> static $errors = array(
<ide> JSON_ERROR_NONE => '',
<ide> JSON_ERROR_DEPTH => 'Maximum stack depth exceeded', | 1 |
Javascript | Javascript | remove author comments | d28c286c183d6db719240cd214b42c6338d1b326 | <ide><path>examples/jsm/loaders/3DMLoader.js
<del>/**
<del> * @author Luis Fraguada / https://github.com/fraguada
<del> */
<del>
<ide> import {
<ide> BufferGeometryLoader,
<ide> FileLoader, | 1 |
Ruby | Ruby | catch the rangeerror closer to the cause | 0601a9f9aa99bfda26578ab86533bb215f1ee463 | <ide><path>activerecord/lib/active_record/associations/association.rb
<ide> def find_target
<ide> end
<ide>
<ide> binds = AssociationScope.get_bind_values(owner, reflection.chain)
<del> sc.execute(binds, klass.connection) { |record| set_inverse_instance(record) } || []
<add> sc.execute(binds, klass.connection) { |record| set_inverse_instance(record) }
<ide> end
<ide>
<ide> # The scope for this association.
<ide><path>activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
<ide> def select_all(arel, name = nil, binds = [], preparable: nil)
<ide> else
<ide> select(sql, name, binds)
<ide> end
<add> rescue ::RangeError
<add> ActiveRecord::Result.new([], [])
<ide> end
<ide>
<ide> # Returns a record hash with the column names as keys and column values
<ide><path>activerecord/lib/active_record/core.rb
<ide> def find(*ids) # :nodoc:
<ide> where(key => params.bind).limit(1)
<ide> }
<ide>
<del> record = statement.execute([id], connection)&.first
<del> unless record
<del> raise RecordNotFound.new("Couldn't find #{name} with '#{key}'=#{id}", name, key, id)
<del> end
<del> record
<add> statement.execute([id], connection).first ||
<add> raise(RecordNotFound.new("Couldn't find #{name} with '#{key}'=#{id}", name, key, id))
<ide> end
<ide>
<ide> def find_by(*args) # :nodoc:
<ide> def find_by(*args) # :nodoc:
<ide> }
<ide>
<ide> begin
<del> statement.execute(values, connection)&.first
<add> statement.execute(values, connection).first
<ide> rescue TypeError
<ide> raise ActiveRecord::StatementInvalid
<ide> end
<ide><path>activerecord/lib/active_record/statement_cache.rb
<ide> def execute(params, connection, &block)
<ide> sql = query_builder.sql_for bind_values, connection
<ide>
<ide> klass.find_by_sql(sql, bind_values, preparable: true, &block)
<del> rescue ::RangeError
<del> nil
<ide> end
<ide>
<ide> def self.unsupported_value?(value) | 4 |
PHP | PHP | fix tests under error namespace | 6eeed3713e33ccd6e064251590e3461c7659fa61 | <ide><path>src/Error/ExceptionRenderer.php
<ide> public function render()
<ide> if ($unwrapped instanceof CakeException && $isDebug) {
<ide> $this->controller->set($unwrapped->getAttributes());
<ide> }
<del> $this->controller->response = $response;
<add> $this->controller->setResponse($response);
<ide>
<ide> return $this->_outputMessage($template);
<ide> }
<ide> protected function _customMethod($method, $exception)
<ide> $result = call_user_func([$this, $method], $exception);
<ide> $this->_shutdown();
<ide> if (is_string($result)) {
<del> $result = $this->controller->response->withStringBody($result);
<add> $result = $this->controller->getResponse()->withStringBody($result);
<ide> }
<ide>
<ide> return $result;
<ide> protected function _outputMessageSafe($template)
<ide> ->setTemplatePath('Error');
<ide> $view = $this->controller->createView('View');
<ide>
<del> $this->controller->response = $this->controller->response
<add> $response = $this->controller->getResponse()
<ide> ->withType('html')
<ide> ->withStringBody($view->render($template, 'error'));
<add> $this->controller->setResponse($response);
<ide>
<del> return $this->controller->response;
<add> return $response;
<ide> }
<ide>
<ide> /**
<ide> protected function _shutdown()
<ide> $eventManager->on($filter);
<ide> }
<ide> $args = [
<del> 'request' => $this->controller->request,
<del> 'response' => $this->controller->response
<add> 'request' => $this->controller->getRequest(),
<add> 'response' => $this->controller->getResponse(),
<ide> ];
<ide> $result = $dispatcher->dispatchEvent('Dispatcher.afterDispatch', $args);
<ide>
<ide><path>tests/TestCase/Error/DebuggerTest.php
<ide> public function testTrimPath()
<ide> public function testExportVar()
<ide> {
<ide> $Controller = new Controller();
<del> $Controller->helpers = ['Html', 'Form'];
<add> $Controller->viewBuilder()->setHelpers(['Html', 'Form']);
<ide> $View = $Controller->createView();
<ide> $View->int = 2;
<ide> $View->float = 1.333; | 2 |
Javascript | Javascript | ignore events bubbling through disabled elements | 8d9025ca50603e5404c6b50f7fa6c87046ee24c1 | <ide><path>src/event.js
<ide> jQuery.event = {
<ide> event.delegateTarget = this;
<ide>
<ide> // Determine handlers that should run if there are delegated events
<del> // Avoid disabled elements in IE (#6911) and non-left-click bubbling in Firefox (#3861)
<del> if ( delegateCount && event.target.disabled !== true && !(event.button && event.type === "click") ) {
<add> // Avoid non-left-click bubbling in Firefox (#3861)
<add> if ( delegateCount && !(event.button && event.type === "click") ) {
<ide>
<ide> // Pregenerate a single jQuery object for reuse with .is()
<ide> jqcur = jQuery(this);
<ide> jqcur.context = this.ownerDocument || this;
<ide>
<ide> for ( cur = event.target; cur != this; cur = cur.parentNode || this ) {
<del> selMatch = {};
<del> matches = [];
<del> jqcur[0] = cur;
<del> for ( i = 0; i < delegateCount; i++ ) {
<del> handleObj = handlers[ i ];
<del> sel = handleObj.selector;
<del>
<del> if ( selMatch[ sel ] === undefined ) {
<del> selMatch[ sel ] = (
<del> handleObj.quick ? quickIs( cur, handleObj.quick ) : jqcur.is( sel )
<del> );
<add>
<add> // Don't process events on disabled elements (#6911, #8165)
<add> if ( cur.disabled !== true ) {
<add> selMatch = {};
<add> matches = [];
<add> jqcur[0] = cur;
<add> for ( i = 0; i < delegateCount; i++ ) {
<add> handleObj = handlers[ i ];
<add> sel = handleObj.selector;
<add>
<add> if ( selMatch[ sel ] === undefined ) {
<add> selMatch[ sel ] = (
<add> handleObj.quick ? quickIs( cur, handleObj.quick ) : jqcur.is( sel )
<add> );
<add> }
<add> if ( selMatch[ sel ] ) {
<add> matches.push( handleObj );
<add> }
<ide> }
<del> if ( selMatch[ sel ] ) {
<del> matches.push( handleObj );
<add> if ( matches.length ) {
<add> handlerQueue.push({ elem: cur, matches: matches });
<ide> }
<ide> }
<del> if ( matches.length ) {
<del> handlerQueue.push({ elem: cur, matches: matches });
<del> }
<ide> }
<ide> }
<ide>
<ide><path>test/unit/event.js
<ide> test("Delegated events in SVG (#10791)", function() {
<ide> svg.remove();
<ide> });
<ide>
<del>test("Delegated events in forms (#10844; #11145)", function() {
<del> expect(2);
<add>test("Delegated events in forms (#10844; #11145; #8165)", function() {
<add> expect(3);
<ide>
<ide> // Aliases names like "id" cause havoc
<ide> var form = jQuery(
<ide> test("Delegated events in forms (#10844; #11145)", function() {
<ide> .end()
<ide> .off("submit");
<ide>
<add> form
<add> .append( '<button id="nestyDisabledBtn"><span>Zing</span></button>' )
<add> .on( "click", "#nestyDisabledBtn", function() {
<add> ok( true, "enabled/disabled button with nesty elements" );
<add> })
<add> .find( "span" ).trigger( "click" ).end() // yep
<add> .find( "#nestyDisabledBtn" ).prop( "disabled", true ).end()
<add> .find( "span" ).trigger( "click" ).end() // nope
<add> .off( "click" );
<add>
<ide> form.remove();
<ide> });
<ide> | 2 |
Python | Python | fix top k generation for k != 0 | 4b4b07927256a11a4b296c97db198d67c2545fdb | <ide><path>examples/run_gpt2.py
<ide> logger = logging.getLogger(__name__)
<ide>
<ide> def top_k_logits(logits, k):
<add> """
<add> Masks everything but the k top entries as -infinity (1e10).
<add> Used to mask logits such that e^-infinity -> 0 won't contribute to the
<add> sum of the denominator.
<add> """
<ide> if k == 0:
<ide> return logits
<del> values, _ = torch.topk(logits, k)
<del> min_values = values[:, -1]
<del> return torch.where(logits < min_values, torch.ones_like(logits, dtype=logits.dtype) * -1e10, logits)
<add> else:
<add> values = torch.topk(logits, k)[0]
<add> batch_mins = values[:, -1].view(-1, 1).expand_as(logits)
<add> return torch.where(logits < batch_mins, torch.ones_like(logits) * -1e10, logits)
<ide>
<ide> def sample_sequence(model, length, start_token=None, batch_size=None, context=None, temperature=1, top_k=0, device='cuda', sample=True):
<ide> if start_token is None: | 1 |
Go | Go | add line-numbers to asserts | b80e0309d220268a2b9e6aec5bb05d7af330e591 | <ide><path>builder/dockerfile/shell/lex_test.go
<ide> func TestShellParser4EnvVars(t *testing.T) {
<ide> if expected == "error" {
<ide> assert.Check(t, is.ErrorContains(err, ""), "input: %q, result: %q", source, newWord)
<ide> } else {
<del> assert.Check(t, err)
<del> assert.Check(t, is.Equal(newWord, expected))
<add> assert.Check(t, err, "at line %d of %s", lineCount, fn)
<add> assert.Check(t, is.Equal(newWord, expected), "at line %d of %s", lineCount, fn)
<ide> }
<ide> }
<ide> } | 1 |
Ruby | Ruby | remove unused variable | 5fac725a4a1d9afb4402e442ed6a7099a594be73 | <ide><path>activerecord/test/cases/relation_test.rb
<ide> def test_relation_merging_with_merged_joins_as_symbols
<ide> end
<ide>
<ide> def test_relation_merging_with_merged_symbol_joins_keeps_inner_joins
<del> queries = capture_sql { authors_with_commented_posts = Author.joins(:posts).merge(Post.joins(:comments)).to_a }
<add> queries = capture_sql { Author.joins(:posts).merge(Post.joins(:comments)).to_a }
<ide>
<ide> nb_inner_join = queries.sum { |sql| sql.scan(/INNER\s+JOIN/i).size }
<ide> assert_equal 2, nb_inner_join, "Wrong amount of INNER JOIN in query" | 1 |
Text | Text | use newer help out for 'docker' and 'docker run' | effa5e2c18f27b81fc46d1a925d04e6af5695459 | <ide><path>docs/sources/reference/commandline/cli.md
<ide> expect an integer, and they can only be specified once.
<ide> A self-sufficient runtime for linux containers.
<ide>
<ide> Options:
<del> --api-enable-cors=false Enable CORS headers in the remote API
<del> -b, --bridge="" Attach containers to a network bridge
<del> --bip="" Specify network bridge IP
<del> -D, --debug=false Enable debug mode
<del> -d, --daemon=false Enable daemon mode
<del> --dns=[] DNS server to use
<del> --dns-search=[] DNS search domains to use
<del> -e, --exec-driver="native" Exec driver to use
<del> --fixed-cidr="" IPv4 subnet for fixed IPs
<del> --fixed-cidr-v6="" IPv6 subnet for fixed IPs
<del> -G, --group="docker" Group for the unix socket
<del> -g, --graph="/var/lib/docker" Root of the Docker runtime
<del> -H, --host=[] Daemon socket(s) to use or connect to
<del> --icc=true Enable inter-container communication
<del> --insecure-registry=[] Enable insecure registry communication
<del> --ip=0.0.0.0 Default IP when binding container ports
<del> --ip-forward=true Enable net.ipv4.ip_forward
<del> --ip-masq=true Enable IP masquerading
<del> --iptables=true Enable addition of iptables rules
<del> --ipv6=false Enable IPv6 networking
<del> -l, --log-level="info" Set the logging level
<del> --label=[] Set key=value labels to the daemon
<del> --mtu=0 Set the containers network MTU
<del> -p, --pidfile="/var/run/docker.pid" Path to use for daemon PID file
<del> --registry-mirror=[] Preferred Docker registry mirror
<del> -s, --storage-driver="" Storage driver to use
<del> --selinux-enabled=false Enable selinux support
<del> --storage-opt=[] Set storage driver options
<del> --tls=false Use TLS; implied by --tlsverify flag
<del> --tlscacert="/home/sven/.docker/ca.pem" Trust certs signed only by this CA
<del> --tlscert="/home/sven/.docker/cert.pem" Path to TLS certificate file
<del> --tlskey="/home/sven/.docker/key.pem" Path to TLS key file
<del> --tlsverify=false Use TLS and verify the remote
<del> -v, --version=false Print version information and quit
<add> --api-enable-cors=false Enable CORS headers in the remote API
<add> -b, --bridge="" Attach containers to a network bridge
<add> --bip="" Specify network bridge IP
<add> -D, --debug=false Enable debug mode
<add> -d, --daemon=false Enable daemon mode
<add> --dns=[] DNS server to use
<add> --dns-search=[] DNS search domains to use
<add> -e, --exec-driver="native" Exec driver to use
<add> --fixed-cidr="" IPv4 subnet for fixed IPs
<add> --fixed-cidr-v6="" IPv6 subnet for fixed IPs
<add> -G, --group="docker" Group for the unix socket
<add> -g, --graph="/var/lib/docker" Root of the Docker runtime
<add> -H, --host=[] Daemon socket(s) to use or connect to
<add> -h, --help=false Print usage
<add> --icc=true Enable inter-container communication
<add> --insecure-registry=[] Enable insecure registry communication
<add> --ip=0.0.0.0 Default IP when binding container ports
<add> --ip-forward=true Enable net.ipv4.ip_forward
<add> --ip-masq=true Enable IP masquerading
<add> --iptables=true Enable addition of iptables rules
<add> --ipv6=false Enable IPv6 networking
<add> -l, --log-level="info" Set the logging level
<add> --label=[] Set key=value labels to the daemon
<add> --mtu=0 Set the containers network MTU
<add> -p, --pidfile="/var/run/docker.pid" Path to use for daemon PID file
<add> --registry-mirror=[] Preferred Docker registry mirror
<add> -s, --storage-driver="" Storage driver to use
<add> --selinux-enabled=false Enable selinux support
<add> --storage-opt=[] Set storage driver options
<add> --tls=false Use TLS; implied by --tlsverify flag
<add> --tlscacert="~/.docker/ca.pem" Trust certs signed only by this CA
<add> --tlscert="~/.docker/cert.pem" Path to TLS certificate file
<add> --tlskey="~/.docker/key.pem" Path to TLS key file
<add> --tlsverify=false Use TLS and verify the remote
<add> -v, --version=false Print version information and quit
<ide>
<ide> Options with [] may be specified multiple times.
<ide>
<ide> share executable memory between devices. Use `docker -d -s btrfs -g /mnt/btrfs_p
<ide>
<ide> The `overlay` is a very fast union filesystem. It is now merged in the main
<ide> Linux kernel as of [3.18.0](https://lkml.org/lkml/2014/10/26/137).
<del>Call `docker -d -s overlay` to use it.
<del>> **Note:**
<add>Call `docker -d -s overlay` to use it.
<add>> **Note:**
<ide> > It is currently unsupported on `btrfs` or any Copy on Write filesystem
<ide> > and should only be used over `ext4` partitions.
<ide>
<ide> Currently supported options are:
<ide> are inherently "sparse", so a 10G device which is mostly empty doesn't use
<ide> 10 GB of space on the pool. However, the filesystem will use more space for
<ide> the empty case the larger the device is.
<del>
<add>
<ide> **Warning**: This value affects the system-wide "base" empty filesystem
<ide> that may already be initialized and inherited by pulled images. Typically,
<ide> a change to this value will require additional steps to take effect:
<del>
<add>
<ide> $ sudo service docker stop
<ide> $ sudo rm -rf /var/lib/docker
<ide> $ sudo service docker start
<ide> verification failed (i.e., wrong CA).
<ide> By default, Docker assumes all, but local (see local registries below), registries are secure.
<ide> Communicating with an insecure registry is not possible if Docker assumes that registry is secure.
<ide> In order to communicate with an insecure registry, the Docker daemon requires `--insecure-registry`
<del>in one of the following two forms:
<add>in one of the following two forms:
<ide>
<ide> * `--insecure-registry myregistry:5000` tells the Docker daemon that myregistry:5000 should be considered insecure.
<ide> * `--insecure-registry 10.1.0.0/16` tells the Docker daemon that all registries whose domain resolve to an IP address is part
<ide> attaching to a tty-enabled container (i.e.: launched with `-t`).
<ide> $ echo $?
<ide> 0
<ide> $ docker ps -a | grep topdemo
<del> 7998ac8581f9 ubuntu:14.04 "/usr/bin/top -b" 38 seconds ago Exited (0) 21 seconds ago topdemo
<add> 7998ac8581f9 ubuntu:14.04 "/usr/bin/top -b" 38 seconds ago Exited (0) 21 seconds ago topdemo
<ide>
<ide> And in this second example, you can see the exit code returned by the `bash` process
<ide> is returned by the `docker attach` command to its caller too:
<ide> instructions instead of `Dockerfile`.
<ide> $ sudo docker build -f dockerfiles/Dockerfile.debug -t myapp_debug .
<ide> $ sudo docker build -f dockerfiles/Dockerfile.prod -t myapp_prod .
<ide>
<del>The above commands will build the current build context (as specified by
<del>the `.`) twice, once using a debug version of a `Dockerfile` and once using
<add>The above commands will build the current build context (as specified by
<add>the `.`) twice, once using a debug version of a `Dockerfile` and once using
<ide> a production version.
<ide>
<ide> $ cd /home/me/myapp/some/dir/really/deep
<ide> $ sudo docker build -f /home/me/myapp/dockerfiles/debug /home/me/myapp
<ide> $ sudo docker build -f ../../../../dockerfiles/debug /home/me/myapp
<ide>
<ide> These two `docker build` commands do the exact same thing. They both
<del>use the contents of the `debug` file instead of looking for a `Dockerfile`
<del>and will use `/home/me/myapp` as the root of the build context. Note that
<del>`debug` is in the directory structure of the build context, regardless of how
<add>use the contents of the `debug` file instead of looking for a `Dockerfile`
<add>and will use `/home/me/myapp` as the root of the build context. Note that
<add>`debug` is in the directory structure of the build context, regardless of how
<ide> you refer to it on the command line.
<ide>
<ide> > **Note:** `docker build` will return a `no such file or directory` error
<ide> For example:
<ide>
<ide> > **Note:**
<ide> > `docker export` does not export the contents of volumes associated with the
<del>> container. If a volume is mounted on top of an existing directory in the
<del>> container, `docker export` will export the contents of the *underlying*
<add>> container. If a volume is mounted on top of an existing directory in the
<add>> container, `docker export` will export the contents of the *underlying*
<ide> > directory, not the contents of the volume.
<ide> >
<ide> > Refer to [Backup, restore, or migrate data volumes](/userguide/dockervolumes/#backup-restore-or-migrate-data-volumes)
<ide> for further details.
<ide>
<ide> rename a existing container to a NEW_NAME
<ide>
<del>The `docker rename` command allows the container to be renamed to a different name.
<add>The `docker rename` command allows the container to be renamed to a different name.
<ide>
<ide> ## ps
<ide>
<ide> removed before the image is removed.
<ide> --cidfile="" Write the container ID to the file
<ide> --cpuset="" CPUs in which to allow execution (0-3, 0,1)
<ide> -d, --detach=false Run container in background and print container ID
<del> --device=[] Add a host device to the container (e.g. --device=/dev/sdc:/dev/xvdc:rwm)
<add> --device=[] Add a host device to the container
<ide> --dns=[] Set custom DNS servers
<del> --dns-search=[] Set custom DNS search domains (Use --dns-search=. if you don't wish to set the search domain)
<add> --dns-search=[] Set custom DNS search domains
<ide> -e, --env=[] Set environment variables
<ide> --entrypoint="" Overwrite the default ENTRYPOINT of the image
<del> --env-file=[] Read in a line delimited file of environment variables
<del> --expose=[] Expose a port or a range of ports (e.g. --expose=3300-3310) from the container without publishing it to your host
<add> --env-file=[] Read in a file of environment variables
<add> --expose=[] Expose a port or a range of ports
<ide> -h, --hostname="" Container host name
<add> --help=false Print usage
<ide> -i, --interactive=false Keep STDIN open even if not attached
<del> --ipc="" Default is to create a private IPC namespace (POSIX SysV IPC) for the container
<del> 'container:<name|id>': reuses another container shared memory, semaphores and message queues
<del> 'host': use the host shared memory,semaphores and message queues inside the container. Note: the host mode gives the container full access to local shared memory and is therefore considered insecure.
<del> --link=[] Add link to another container in the form of name:alias
<add> --ipc="" IPC namespace to use
<add> --link=[] Add link to another container
<ide> --lxc-conf=[] Add custom lxc options
<ide> -m, --memory="" Memory limit
<del> -memory-swap="" Total memory (memory+swap), '-1' to disable swap
<ide> --mac-address="" Container MAC address (e.g. 92:d0:c6:0a:29:33)
<add> --memory-swap="" Total memory (memory + swap), '-1' to disable swap
<ide> --name="" Assign a name to the container
<ide> --net="bridge" Set the Network mode for the container
<del> 'bridge': creates a new network stack for the container on the docker bridge
<del> 'none': no networking for this container
<del> 'container:<name|id>': reuses another container network stack
<del> 'host': use the host network stack inside the container. Note: the host mode gives the container full access to local system services such as D-bus and is therefore considered insecure.
<del> -P, --publish-all=false Publish all exposed ports to random ports on the host interfaces
<del> -p, --publish=[] Publish a container's port to the host
<del> format: ip:hostPort:containerPort | ip::containerPort | hostPort:containerPort | containerPort
<del> Both hostPort and containerPort can be specified as a range of ports.
<del> When specifying ranges for both, the number of container ports in the range must match the number of host ports in the range. (e.g., `-p 1234-1236:1234-1236/tcp`)
<del> (use 'docker port' to see the actual mapping)
<del> --pid=host 'host': use the host PID namespace inside the container. Note: the host mode gives the container full access to local system services such as D-bus and is therefore considered insecure.
<add> -P, --publish-all=false Publish all exposed ports to random ports
<add> -p, --publish=[] Publish a container's port(s) to the host
<add> --pid="" PID namespace to use
<ide> --privileged=false Give extended privileges to this container
<del> --read-only=false Mount the container's root filesystem as read only
<del> --restart="" Restart policy to apply when a container exits (no, on-failure[:max-retry], always)
<add> --read-only=false Mount the container's root filesystem as read only
<add> --restart="" Restart policy to apply when a container exits
<ide> --rm=false Automatically remove the container when it exits
<ide> --security-opt=[] Security Options
<ide> --sig-proxy=true Proxy received signals to the process
<ide> -t, --tty=false Allocate a pseudo-TTY
<del> -u, --user="" Username or UID
<del> -v, --volume=[] Bind mount a volume (e.g., from the host: -v /host:/container, from Docker: -v /container)
<add> -u, --user="" Username or UID (format: <name|uid>[:<group|gid>])
<add> -v, --volume=[] Bind mount a volume
<ide> --volumes-from=[] Mount volumes from the specified container(s)
<ide> -w, --workdir="" Working directory inside the container
<ide>
<ide> folder before starting your container.
<ide>
<ide> $ sudo docker run --read-only -v /icanwrite busybox touch /icanwrite here
<ide>
<del>Volumes can be used in combination with `--read-only` to control where
<add>Volumes can be used in combination with `--read-only` to control where
<ide> a container writes files. The `--read-only` flag mounts the container's root
<ide> filesystem as read only prohibiting writes to locations other than the
<ide> specified volumes for the container.
<ide> ports in Docker.
<ide> This sets environmental variables in the container. For illustration all three
<ide> flags are shown here. Where `-e`, `--env` take an environment variable and
<ide> value, or if no `=` is provided, then that variable's current value is passed
<del>through (i.e. `$MYVAR1` from the host is set to `$MYVAR1` in the container).
<add>through (i.e. `$MYVAR1` from the host is set to `$MYVAR1` in the container).
<ide> When no `=` is provided and that variable is not defined in the client's
<ide> environment then that variable will be removed from the container's list of
<ide> environment variables.
<ide> See [*Find Public Images on Docker Hub*](
<ide> /userguide/dockerrepos/#searching-for-images) for
<ide> more details on finding shared images from the command line.
<ide>
<del>> **Note:**
<del>> Search queries will only return up to 25 results
<add>> **Note:**
<add>> Search queries will only return up to 25 results
<ide>
<ide> ## start
<ide>
<ide> Running `docker stats` on multiple containers
<ide> redis2 0.07% 2.746 MiB/64 MiB 4.29% 1.266 KiB/648 B
<ide>
<ide>
<del>The `docker stats` command will only return a live stream of data for running
<add>The `docker stats` command will only return a live stream of data for running
<ide> containers. Stopped containers will not return any data.
<ide>
<ide> > **Note:** | 1 |
Ruby | Ruby | recover `changes_applied` performance " | 5fcbdcfb574c731841be12764c50d9587b58345f | <ide><path>activemodel/lib/active_model/attribute_mutation_tracker.rb
<ide> def change_to_attribute(attr_name)
<ide> end
<ide> end
<ide>
<del> def changed_attribute_names
<del> attr_names.select { |attr| changed?(attr) }
<del> end
<del>
<ide> def any_changes?
<ide> attr_names.any? { |attr| changed?(attr) }
<ide> end
<ide> def forget_change(*)
<ide>
<ide> def original_value(*)
<ide> end
<add>
<add> def force_change(*)
<add> end
<ide> end
<ide> end
<ide><path>activemodel/lib/active_model/dirty.rb
<ide> require "active_support/hash_with_indifferent_access"
<ide> require "active_support/core_ext/object/duplicable"
<ide> require "active_model/attribute_mutation_tracker"
<del>require "active_model/attribute_set"
<ide>
<ide> module ActiveModel
<ide> # == Active \Model \Dirty
<ide> def initialize_dup(other) # :nodoc:
<ide> end
<ide>
<ide> def changes_applied # :nodoc:
<del> _prepare_changes
<add> @previously_changed = changes
<ide> @mutations_before_last_save = mutations_from_database
<add> @attributes_changed_by_setter = ActiveSupport::HashWithIndifferentAccess.new
<ide> forget_attribute_assignments
<ide> @mutations_from_database = nil
<ide> end
<ide> def changes_applied # :nodoc:
<ide> # person.name = 'bob'
<ide> # person.changed? # => true
<ide> def changed?
<del> mutations_from_database.any_changes?
<add> changed_attributes.present?
<ide> end
<ide>
<ide> # Returns an array with the name of the attributes with unsaved changes.
<ide> def changed?
<ide> # person.name = 'bob'
<ide> # person.changed # => ["name"]
<ide> def changed
<del> mutations_from_database.changed_attribute_names
<add> changed_attributes.keys
<ide> end
<ide>
<ide> # Handles <tt>*_changed?</tt> for +method_missing+.
<ide> def attribute_changed?(attr, from: OPTION_NOT_GIVEN, to: OPTION_NOT_GIVEN) # :nodoc:
<del> !!mutations_from_database.changed?(attr) &&
<add> !!changes_include?(attr) &&
<ide> (to == OPTION_NOT_GIVEN || to == _read_attribute(attr)) &&
<del> (from == OPTION_NOT_GIVEN || from == attribute_was(attr))
<add> (from == OPTION_NOT_GIVEN || from == changed_attributes[attr])
<ide> end
<ide>
<ide> # Handles <tt>*_was</tt> for +method_missing+.
<ide> def attribute_was(attr) # :nodoc:
<del> mutations_from_database.original_value(attr)
<add> attribute_changed?(attr) ? changed_attributes[attr] : _read_attribute(attr)
<ide> end
<ide>
<ide> # Handles <tt>*_previously_changed?</tt> for +method_missing+.
<ide> def attribute_previously_changed?(attr) #:nodoc:
<del> mutations_before_last_save.changed?(attr)
<add> previous_changes_include?(attr)
<ide> end
<ide>
<ide> # Restore all previous data of the provided attributes.
<ide> def restore_attributes(attributes = changed)
<ide>
<ide> # Clears all dirty data: current changes and previous changes.
<ide> def clear_changes_information
<add> @previously_changed = ActiveSupport::HashWithIndifferentAccess.new
<ide> @mutations_before_last_save = nil
<add> @attributes_changed_by_setter = ActiveSupport::HashWithIndifferentAccess.new
<ide> forget_attribute_assignments
<ide> @mutations_from_database = nil
<ide> end
<ide>
<ide> def clear_attribute_changes(attr_names)
<add> attributes_changed_by_setter.except!(*attr_names)
<ide> attr_names.each do |attr_name|
<ide> clear_attribute_change(attr_name)
<ide> end
<ide> def clear_attribute_changes(attr_names)
<ide> # person.name = 'robert'
<ide> # person.changed_attributes # => {"name" => "bob"}
<ide> def changed_attributes
<del> mutations_from_database.changed_values.freeze
<add> # This should only be set by methods which will call changed_attributes
<add> # multiple times when it is known that the computed value cannot change.
<add> if defined?(@cached_changed_attributes)
<add> @cached_changed_attributes
<add> else
<add> attributes_changed_by_setter.reverse_merge(mutations_from_database.changed_values).freeze
<add> end
<ide> end
<ide>
<ide> # Returns a hash of changed attributes indicating their original
<ide> def changed_attributes
<ide> # person.name = 'bob'
<ide> # person.changes # => { "name" => ["bill", "bob"] }
<ide> def changes
<del> _prepare_changes
<del> mutations_from_database.changes
<add> cache_changed_attributes do
<add> ActiveSupport::HashWithIndifferentAccess[changed.map { |attr| [attr, attribute_change(attr)] }]
<add> end
<ide> end
<ide>
<ide> # Returns a hash of attributes that were changed before the model was saved.
<ide> def changes
<ide> # person.save
<ide> # person.previous_changes # => {"name" => ["bob", "robert"]}
<ide> def previous_changes
<del> mutations_before_last_save.changes
<add> @previously_changed ||= ActiveSupport::HashWithIndifferentAccess.new
<add> @previously_changed.merge(mutations_before_last_save.changes)
<ide> end
<ide>
<ide> def attribute_changed_in_place?(attr_name) # :nodoc:
<ide> def mutations_from_database
<ide> unless defined?(@mutations_from_database)
<ide> @mutations_from_database = nil
<ide> end
<del>
<del> unless defined?(@attributes)
<del> @_pseudo_attributes = true
<del> @attributes = AttributeSet.new(
<del> Hash.new { |h, attr|
<del> h[attr] = Attribute.with_cast_value(attr, _clone_attribute(attr), Type.default_value)
<del> }
<del> )
<add> @mutations_from_database ||= if defined?(@attributes)
<add> ActiveModel::AttributeMutationTracker.new(@attributes)
<add> else
<add> NullMutationTracker.instance
<ide> end
<del>
<del> @mutations_from_database ||= ActiveModel::AttributeMutationTracker.new(@attributes)
<ide> end
<ide>
<ide> def forget_attribute_assignments
<ide> def mutations_before_last_save
<ide> @mutations_before_last_save ||= ActiveModel::NullMutationTracker.instance
<ide> end
<ide>
<add> def cache_changed_attributes
<add> @cached_changed_attributes = changed_attributes
<add> yield
<add> ensure
<add> clear_changed_attributes_cache
<add> end
<add>
<add> def clear_changed_attributes_cache
<add> remove_instance_variable(:@cached_changed_attributes) if defined?(@cached_changed_attributes)
<add> end
<add>
<add> # Returns +true+ if attr_name is changed, +false+ otherwise.
<add> def changes_include?(attr_name)
<add> attributes_changed_by_setter.include?(attr_name) || mutations_from_database.changed?(attr_name)
<add> end
<add> alias attribute_changed_by_setter? changes_include?
<add>
<add> # Returns +true+ if attr_name were changed before the model was saved,
<add> # +false+ otherwise.
<add> def previous_changes_include?(attr_name)
<add> previous_changes.include?(attr_name)
<add> end
<add>
<ide> # Handles <tt>*_change</tt> for +method_missing+.
<ide> def attribute_change(attr)
<del> [attribute_was(attr), _read_attribute(attr)] if attribute_changed?(attr)
<add> [changed_attributes[attr], _read_attribute(attr)] if attribute_changed?(attr)
<ide> end
<ide>
<ide> # Handles <tt>*_previous_change</tt> for +method_missing+.
<ide> def attribute_previous_change(attr)
<del> mutations_before_last_save.change_to_attribute(attr)
<add> previous_changes[attr] if attribute_previously_changed?(attr)
<ide> end
<ide>
<ide> # Handles <tt>*_will_change!</tt> for +method_missing+.
<ide> def attribute_will_change!(attr)
<del> attr = attr.to_s
<del> mutations_from_database.force_change(attr).tap do
<del> @attributes[attr] if defined?(@_pseudo_attributes)
<add> unless attribute_changed?(attr)
<add> begin
<add> value = _read_attribute(attr)
<add> value = value.duplicable? ? value.clone : value
<add> rescue TypeError, NoMethodError
<add> end
<add>
<add> set_attribute_was(attr, value)
<ide> end
<add> mutations_from_database.force_change(attr)
<ide> end
<ide>
<ide> # Handles <tt>restore_*!</tt> for +method_missing+.
<ide> def restore_attribute!(attr)
<ide> if attribute_changed?(attr)
<del> __send__("#{attr}=", attribute_was(attr))
<add> __send__("#{attr}=", changed_attributes[attr])
<ide> clear_attribute_changes([attr])
<ide> end
<ide> end
<ide>
<del> def _prepare_changes
<del> if defined?(@_pseudo_attributes)
<del> changed.each do |attr|
<del> @attributes.write_from_user(attr, _read_attribute(attr))
<del> end
<del> end
<add> def attributes_changed_by_setter
<add> @attributes_changed_by_setter ||= ActiveSupport::HashWithIndifferentAccess.new
<ide> end
<ide>
<del> def _clone_attribute(attr)
<del> value = _read_attribute(attr)
<del> value.duplicable? ? value.clone : value
<del> rescue TypeError, NoMethodError
<del> value
<add> # Force an attribute to have a particular "before" value
<add> def set_attribute_was(attr, old_value)
<add> attributes_changed_by_setter[attr] = old_value
<ide> end
<ide> end
<ide> end
<ide><path>activerecord/lib/active_record/attribute_methods/dirty.rb
<ide> module Dirty
<ide> # <tt>reload</tt> the record and clears changed attributes.
<ide> def reload(*)
<ide> super.tap do
<add> @previously_changed = ActiveSupport::HashWithIndifferentAccess.new
<ide> @mutations_before_last_save = nil
<add> @attributes_changed_by_setter = ActiveSupport::HashWithIndifferentAccess.new
<ide> @mutations_from_database = nil
<ide> end
<ide> end
<ide> def changes_to_save
<ide>
<ide> # Alias for +changed+
<ide> def changed_attribute_names_to_save
<del> mutations_from_database.changed_attribute_names
<add> changes_to_save.keys
<ide> end
<ide>
<ide> # Alias for +changed_attributes+
<ide> def attributes_in_database
<del> mutations_from_database.changed_values
<add> changes_to_save.transform_values(&:first)
<ide> end
<ide>
<ide> private
<ide><path>activerecord/lib/active_record/persistence.rb
<ide> def becomes(klass)
<ide> became.send(:initialize)
<ide> became.instance_variable_set("@attributes", @attributes)
<ide> became.instance_variable_set("@mutations_from_database", @mutations_from_database) if defined?(@mutations_from_database)
<add> became.instance_variable_set("@changed_attributes", attributes_changed_by_setter)
<ide> became.instance_variable_set("@new_record", new_record?)
<ide> became.instance_variable_set("@destroyed", destroyed?)
<ide> became.errors.copy!(errors) | 4 |
Text | Text | add german language link inside certifications | d57da28c4fd8b3223daf17ae7737fe726ed45c18 | <ide><path>docs/index.md
<ide> Certifications are already live in some major world languages like below:
<ide> - [Portuguese (Português)](https://www.freecodecamp.org/portuguese/learn)
<ide> - [Ukrainian (Українська)](https://www.freecodecamp.org/ukrainian/learn)
<ide> - [Japanese (日本語)](https://www.freecodecamp.org/japanese/learn)
<add>- [German (Deutsch)](https://www.freecodecamp.org/german/learn)
<ide>
<ide> We encourage you to read the [announcement here](https://www.freecodecamp.org/news/help-translate-freecodecamp-language/) and share it with your friends to get them excited about this.
<ide> | 1 |
PHP | PHP | add test case | 3bc4d43079222bcaf930fa6b5c87040bbc9f76f5 | <ide><path>src/View/Helper/UrlHelper.php
<ide> public function script($path, array $options = [])
<ide> *
<ide> * Depending on options passed provides full URL with domain name. Also calls
<ide> * `Helper::assetTimestamp()` to add timestamp to local files.
<add> *
<add> * ### Options:
<add> *
<add> * - `fullBase` Boolean true or a string (e.g. https://example) to
<add> * return full URL with protocol and domain name.
<add> * - `pathPrefix` Path prefix for relative URLs
<add> * - `ext` Asset extension to append
<add> * - `plugin` False value will prevent parsing path as a plugin
<add> * - `timestamp` Overrides the value of `Asset.timestamp` in Configure.
<add> * Set to false to skip timestamp generation.
<add> * Set to true to apply timestamps when debug is true. Set to 'force' to always
<add> * enable timestamping regardless of debug value.
<ide> *
<ide> * @param string|array $path Path string or URL array
<del> * @param array $options Options array. Possible keys:
<del> * `fullBase` Return full URL with domain name. Bool or string.
<del> * `pathPrefix` Path prefix for relative URLs
<del> * `ext` Asset extension to append
<del> * `plugin` False value will prevent parsing path as a plugin
<del> * `timestamp` Overrides the value of `Asset.timestamp` in Configure.
<del> * Set to false to skip timestamp generation.
<del> * Set to true to apply timestamps when debug is true. Set to 'force' to always
<del> * enable timestamping regardless of debug value.
<add> * @param array $options Options array.
<ide> * @return string Generated URL
<ide> */
<ide> public function assetUrl($path, array $options = [])
<ide><path>tests/TestCase/View/Helper/UrlHelperTest.php
<ide> public function testAssetUrlPlugin()
<ide> $this->removePlugins(['TestPlugin']);
<ide> }
<ide>
<add> /**
<add> * Tests assetUrl() with full base URL.
<add> *
<add> * @return void
<add> */
<add> public function testAssetUrlFullBase()
<add> {
<add> $result = $this->Helper->assetUrl('img/foo.jpg', ['fullBase' => true]);
<add> $this->assertEquals(Router::fullBaseUrl() . '/img/foo.jpg', $result);
<add>
<add> $result = $this->Helper->assetUrl('img/foo.jpg', ['fullBase' => 'https://xyz/']);
<add> $this->assertEquals('https://xyz/img/foo.jpg', $result);
<add> }
<add>
<ide> /**
<ide> * test assetUrl and Asset.timestamp = force
<ide> *
<ide> public function testAssetTimestampPluginsAndThemes()
<ide> $result = $this->Helper->assetTimestamp('/test_theme/js/non_existant.js');
<ide> $this->assertRegExp('#/test_theme/js/non_existant.js$#', $result, 'No error on missing file');
<ide> }
<del>
<add>
<ide> /**
<ide> * test script()
<ide> * | 2 |
PHP | PHP | fix morphto lazy eager loading | ef2dc2e7ccbcb3d074e81c552e35df36a6fc25e6 | <ide><path>src/Illuminate/Database/Eloquent/Builder.php
<ide> public function getRelation($name)
<ide> // and error prone. We don't want constraints because we add eager ones.
<ide> $relation = Relation::noConstraints(function () use ($name) {
<ide> try {
<del> return $this->getModel()->{$name}();
<add> return $this->getModel()->newInstance()->$name();
<ide> } catch (BadMethodCallException $e) {
<ide> throw RelationNotFoundException::make($this->getModel(), $name);
<ide> }
<ide><path>tests/Database/DatabaseEloquentBuilderTest.php
<ide> public function testGetRelationProperlySetsNestedRelationships()
<ide> {
<ide> $builder = $this->getBuilder();
<ide> $builder->setModel($this->getMockModel());
<del> $builder->getModel()->shouldReceive('orders')->once()->andReturn($relation = m::mock('stdClass'));
<add> $builder->getModel()->shouldReceive('newInstance->orders')->once()->andReturn($relation = m::mock('stdClass'));
<ide> $relationQuery = m::mock('stdClass');
<ide> $relation->shouldReceive('getQuery')->andReturn($relationQuery);
<ide> $relationQuery->shouldReceive('with')->once()->with(['lines' => null, 'lines.details' => null]);
<ide> public function testGetRelationProperlySetsNestedRelationshipsWithSimilarNames()
<ide> {
<ide> $builder = $this->getBuilder();
<ide> $builder->setModel($this->getMockModel());
<del> $builder->getModel()->shouldReceive('orders')->once()->andReturn($relation = m::mock('stdClass'));
<del> $builder->getModel()->shouldReceive('ordersGroups')->once()->andReturn($groupsRelation = m::mock('stdClass'));
<add> $builder->getModel()->shouldReceive('newInstance->orders')->once()->andReturn($relation = m::mock('stdClass'));
<add> $builder->getModel()->shouldReceive('newInstance->ordersGroups')->once()->andReturn($groupsRelation = m::mock('stdClass'));
<ide>
<ide> $relationQuery = m::mock('stdClass');
<ide> $relation->shouldReceive('getQuery')->andReturn($relationQuery);
<ide><path>tests/Integration/Database/EloquentMorphToLazyEagerLoadingTest.php
<add><?php
<add>
<add>namespace Illuminate\Tests\Integration\Database\EloquentMorphToLazyEagerLoadingTest;
<add>
<add>use Illuminate\Support\Facades\Schema;
<add>use Illuminate\Database\Eloquent\Model;
<add>use Illuminate\Database\Schema\Blueprint;
<add>use Illuminate\Tests\Integration\Database\DatabaseTestCase;
<add>
<add>/**
<add> * @group integration
<add> */
<add>class EloquentMorphToLazyEagerLoadingTest extends DatabaseTestCase
<add>{
<add> public function setUp()
<add> {
<add> parent::setUp();
<add>
<add> Schema::create('users', function (Blueprint $table) {
<add> $table->increments('id');
<add> });
<add>
<add> Schema::create('posts', function (Blueprint $table) {
<add> $table->increments('post_id');
<add> $table->unsignedInteger('user_id');
<add> });
<add>
<add> Schema::create('videos', function (Blueprint $table) {
<add> $table->increments('video_id');
<add> });
<add>
<add> Schema::create('comments', function (Blueprint $table) {
<add> $table->increments('id');
<add> $table->string('commentable_type');
<add> $table->integer('commentable_id');
<add> });
<add>
<add> $user = User::create();
<add>
<add> $post = tap((new Post)->user()->associate($user))->save();
<add>
<add> $video = Video::create();
<add>
<add> (new Comment)->commentable()->associate($post)->save();
<add> (new Comment)->commentable()->associate($video)->save();
<add> }
<add>
<add> public function test_lazy_eager_loading()
<add> {
<add> $comments = Comment::all();
<add>
<add> \DB::enableQueryLog();
<add>
<add> $comments->load('commentable');
<add>
<add> $this->assertCount(3, \DB::getQueryLog());
<add> $this->assertTrue($comments[0]->relationLoaded('commentable'));
<add> $this->assertTrue($comments[0]->commentable->relationLoaded('user'));
<add> $this->assertTrue($comments[1]->relationLoaded('commentable'));
<add> }
<add>}
<add>
<add>class Comment extends Model
<add>{
<add> public $timestamps = false;
<add>
<add> public function commentable()
<add> {
<add> return $this->morphTo();
<add> }
<add>}
<add>
<add>class Post extends Model
<add>{
<add> public $timestamps = false;
<add> protected $primaryKey = 'post_id';
<add> protected $with = ['user'];
<add>
<add> public function user()
<add> {
<add> return $this->belongsTo(User::class);
<add> }
<add>}
<add>
<add>class User extends Model
<add>{
<add> public $timestamps = false;
<add>}
<add>
<add>class Video extends Model
<add>{
<add> public $timestamps = false;
<add> protected $primaryKey = 'video_id';
<add>} | 3 |
Ruby | Ruby | use default location when possible | 4a39070c268b6dba063b938d8663c70f60311230 | <ide><path>Library/Homebrew/os/mac/xquartz.rb
<ide> module Mac
<ide> module XQuartz
<ide> module_function
<ide>
<add> # TODO: confirm this path when you have internet
<add> DEFAULT_BUNDLE_PATH = Pathname.new("Applications/Utilities/XQuartz.app").freeze
<ide> FORGE_BUNDLE_ID = "org.macosforge.xquartz.X11".freeze
<ide> APPLE_BUNDLE_ID = "org.x.X11".freeze
<ide> FORGE_PKG_ID = "org.macosforge.xquartz.pkg".freeze
<ide> def latest_version
<ide> end
<ide>
<ide> def bundle_path
<add> # Use the default location if it exists.
<add> return DEFAULT_BUNDLE_PATH if DEFAULT_BUNDLE_PATH.exist?
<add>
<add> # Ask Spotlight where XQuartz is. If the user didn't install XQuartz
<add> # in the conventional place, this is our only option.
<ide> MacOS.app_with_bundle_id(FORGE_BUNDLE_ID, APPLE_BUNDLE_ID)
<ide> end
<ide> | 1 |
PHP | PHP | fix indentation and add __construct() docblocks | 89d4b58073160c57e339188e8479adea98bdc834 | <ide><path>lib/Cake/View/Helper/NumberHelper.php
<ide> */
<ide> class NumberHelper extends AppHelper {
<ide>
<del> /** CakeNumber instance
<del> */
<add>/** CakeNumber instance
<add> */
<ide> protected $_engine = null;
<ide>
<del> /* Default Constructor
<del> *
<del> * @param View $View The View this helper is being attached to.
<del> * @param array $settings Configuration settings for the helper
<del> */
<add>/* Default Constructor
<add> *
<add> * ### Settings:
<add> *
<add> * - `engine` Class name to use to replace CakeNumber functionality
<add> * The class needs to be placed in the `Utility` directory.
<add> *
<add> * @param View $View The View this helper is being attached to.
<add> * @param array $settings Configuration settings for the helper
<add> */
<ide> function __construct(View $View, $settings = array()) {
<ide> $settings = Set::merge(array('engine' => 'CakeNumber'), $settings);
<ide> parent::__construct($View, $settings);
<ide> function __construct(View $View, $settings = array()) {
<ide> }
<ide> }
<ide>
<del> /**
<del> * Call methods from CakeNumber utility class
<del> */
<add>/**
<add> * Call methods from CakeNumber utility class
<add> */
<ide> public function __call($method, $params) {
<ide> return call_user_func_array(array($this->_engine, $method), $params);
<ide> }
<ide><path>lib/Cake/View/Helper/TextHelper.php
<ide> class TextHelper extends AppHelper {
<ide> /**
<ide> * Constructor
<ide> *
<add> * ### Settings:
<add> *
<add> * - `engine` Class name to use to replace String functionality.
<add> * The class needs to be placed in the `Utility` directory.
<add> *
<ide> * @param View $View the view object the helper is attached to.
<ide> * @param array $settings Settings array Settings array
<ide> */
<ide> public function __construct(View $View, $settings = array()) {
<ide> }
<ide> }
<ide>
<del> /**
<del> * Call methods from String utility class
<del> */
<add>/**
<add> * Call methods from String utility class
<add> */
<ide> public function __call($method, $params) {
<ide> return call_user_func_array(array($this->_engine, $method), $params);
<ide> }
<ide><path>lib/Cake/View/Helper/TimeHelper.php
<ide> class TimeHelper extends AppHelper {
<ide> /**
<ide> * Constructor
<ide> *
<add> * ### Settings:
<add> *
<add> * - `engine` Class name to use to replace CakeTime functionality
<add> * The class needs to be placed in the `Utility` directory.
<add> *
<ide> * @param View $View the view object the helper is attached to.
<ide> * @param array $settings Settings array Settings array
<ide> */
<ide> public function __get($name) {
<ide> return null;
<ide> }
<ide>
<del> /**
<del> * Call methods from CakeTime utility class
<del> */
<add>/**
<add> * Call methods from CakeTime utility class
<add> */
<ide> public function __call($method, $params) {
<ide> return call_user_func_array(array($this->_engine, $method), $params);
<ide> } | 3 |
Text | Text | fix minor typo [ci skip] | 1ac67a5ed60040755c96b0bc8a4ec7f44d581765 | <ide><path>guides/source/association_basics.md
<ide> The `belongs_to` association creates a one-to-one match with another model. In d
<ide>
<ide> #### Methods Added by `belongs_to`
<ide>
<del>When you declare a `belongs_to` association, the declaring class automatically gains five methods related to the association:
<add>When you declare a `belongs_to` association, the declaring class automatically gains 6 methods related to the association:
<ide>
<ide> * `association`
<ide> * `association=(associate)`
<ide> The `has_one` association creates a one-to-one match with another model. In data
<ide>
<ide> #### Methods Added by `has_one`
<ide>
<del>When you declare a `has_one` association, the declaring class automatically gains five methods related to the association:
<add>When you declare a `has_one` association, the declaring class automatically gains 6 methods related to the association:
<ide>
<ide> * `association`
<ide> * `association=(associate)`
<ide> The `has_many` association creates a one-to-many relationship with another model
<ide>
<ide> #### Methods Added by `has_many`
<ide>
<del>When you declare a `has_many` association, the declaring class automatically gains 16 methods related to the association:
<add>When you declare a `has_many` association, the declaring class automatically gains 17 methods related to the association:
<ide>
<ide> * `collection`
<ide> * `collection<<(object, ...)`
<ide> The `has_and_belongs_to_many` association creates a many-to-many relationship wi
<ide>
<ide> #### Methods Added by `has_and_belongs_to_many`
<ide>
<del>When you declare a `has_and_belongs_to_many` association, the declaring class automatically gains 16 methods related to the association:
<add>When you declare a `has_and_belongs_to_many` association, the declaring class automatically gains 17 methods related to the association:
<ide>
<ide> * `collection`
<ide> * `collection<<(object, ...)` | 1 |
Java | Java | fix compiler warnings | 13239a0c3d5bf226998f2da9eb66014118972a26 | <ide><path>spring-aop/src/main/java/org/springframework/aop/framework/ProxyFactory.java
<ide> /*
<del> * Copyright 2002-2009 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public ProxyFactory(Class proxyInterface, TargetSource targetSource) {
<ide> * (if necessary for proxy creation).
<ide> * @return the proxy object
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> public Object getProxy() {
<ide> return createAopProxy().getProxy();
<ide> }
<ide> public Object getProxy() {
<ide> * (or <code>null</code> for the low-level proxy facility's default)
<ide> * @return the proxy object
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> public Object getProxy(ClassLoader classLoader) {
<ide> return createAopProxy().getProxy(classLoader);
<ide> }
<ide><path>spring-beans/src/main/java/org/springframework/beans/factory/config/DestructionAwareAttributeHolder.java
<ide> /*
<del> * Copyright 2002-2011 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public Map<String, Object> getAttributeMap() {
<ide> * the name of the attribute to be returned
<ide> * @return the attribute value or <code>null</code> if not available
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> public Object getAttribute(String name) {
<ide> return attributes.get(name);
<ide> }
<ide> public Object getAttribute(String name) {
<ide> * @return any previously object stored under the same name, if any,
<ide> * <code>null</code> otherwise
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> public Object setAttribute(String name, Object value) {
<ide> return attributes.put(name, value);
<ide> }
<ide> public Object setAttribute(String name, Object value) {
<ide> * @return the removed object, or <code>null</code> if no object was present
<ide> * @see #registerDestructionCallback
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> public Object removeAttribute(String name) {
<ide> Object value = attributes.remove(name);
<ide>
<ide><path>spring-context-support/src/main/java/org/springframework/scheduling/quartz/JobDetailFactoryBean.java
<ide> public void setApplicationContextJobDataKey(String applicationContextJobDataKey)
<ide> }
<ide>
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public void afterPropertiesSet() {
<ide> if (this.name == null) {
<ide> this.name = this.beanName;
<ide> public void afterPropertiesSet() {
<ide> this.jobDetail = jdi;
<ide> */
<ide>
<del> Class jobDetailClass;
<add> Class<?> jobDetailClass;
<ide> try {
<ide> jobDetailClass = getClass().getClassLoader().loadClass("org.quartz.impl.JobDetailImpl");
<ide> }
<ide><path>spring-context/src/main/java/org/springframework/context/event/SourceFilteringListener.java
<ide> /*
<del> * Copyright 2002-2009 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public int getOrder() {
<ide> * <p>The default implementation invokes the specified delegate, if any.
<ide> * @param event the event to process (matching the specified source)
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> protected void onApplicationEventInternal(ApplicationEvent event) {
<ide> if (this.delegate == null) {
<ide> throw new IllegalStateException(
<ide><path>spring-context/src/main/java/org/springframework/instrument/classloading/websphere/WebSphereClassLoaderAdapter.java
<ide> /*
<del> * Copyright 2002-2011 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public void addTransformer(ClassFileTransformer transformer) {
<ide> }
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public ClassLoader getThrowawayClassLoader() {
<ide> try {
<ide> ClassLoader loader = (ClassLoader) cloneConstructor.newInstance(getClassLoader());
<ide> // clear out the transformers (copied as well)
<del> List list = (List) transformerList.get(loader);
<add> List<?> list = (List<?>) transformerList.get(loader);
<ide> list.clear();
<ide> return loader;
<ide> }
<ide><path>spring-context/src/test/java/org/springframework/context/support/TestProxyFactoryBean.java
<ide> /*
<del> * Copyright 2002-2011 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import org.springframework.beans.factory.BeanFactory;
<ide> import org.springframework.beans.factory.BeanFactoryAware;
<ide>
<del>@SuppressWarnings({ "serial", "deprecation" })
<add>@SuppressWarnings("serial")
<ide> public class TestProxyFactoryBean extends AbstractSingletonProxyFactoryBean implements BeanFactoryAware {
<ide>
<ide> @Override
<ide><path>spring-core/src/main/java/org/springframework/core/LocalVariableTableParameterNameDiscoverer.java
<ide> /*
<del> * Copyright 2002-2010 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public String[] getParameterNames(Method method) {
<ide> return null;
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public String[] getParameterNames(Constructor ctor) {
<ide> Class<?> declaringClass = ctor.getDeclaringClass();
<ide> Map<Member, String[]> map = this.parameterNamesCache.get(declaringClass);
<ide><path>spring-core/src/main/java/org/springframework/core/MethodParameter.java
<ide> public Annotation[] getMethodAnnotations() {
<ide> * @param annotationType the annotation type to look for
<ide> * @return the annotation object, or <code>null</code> if not found
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> public <T extends Annotation> T getMethodAnnotation(Class<T> annotationType) {
<ide> return getAnnotatedElement().getAnnotation(annotationType);
<ide> }
<ide><path>spring-jdbc/src/main/java/org/springframework/jdbc/core/ColumnMapRowMapper.java
<ide> /*
<del> * Copyright 2002-2009 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public Map<String, Object> mapRow(ResultSet rs, int rowNum) throws SQLException
<ide> * @return the new Map instance
<ide> * @see org.springframework.util.LinkedCaseInsensitiveMap
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> protected Map<String, Object> createColumnMap(int columnCount) {
<ide> return new LinkedCaseInsensitiveMap<Object>(columnCount);
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java
<ide> /*
<del> * Copyright 2002-2009 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public void removeAttribute(String name, int scope) {
<ide> }
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public String[] getAttributeNames(int scope) {
<ide> if (scope == SCOPE_REQUEST) {
<ide> if (!isRequestActive()) {
<ide><path>spring-web/src/main/java/org/springframework/web/context/request/ServletWebRequest.java
<ide> /*
<del> * Copyright 2002-2011 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public Object getNativeResponse() {
<ide> return getResponse();
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public <T> T getNativeRequest(Class<T> requiredType) {
<ide> return WebUtils.getNativeRequest(getRequest(), requiredType);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public <T> T getNativeResponse(Class<T> requiredType) {
<ide> return WebUtils.getNativeResponse(getResponse(), requiredType);
<ide> }
<ide> public String getHeader(String headerName) {
<ide> return getRequest().getHeader(headerName);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public String[] getHeaderValues(String headerName) {
<ide> String[] headerValues = StringUtils.toStringArray(getRequest().getHeaders(headerName));
<ide> return (!ObjectUtils.isEmpty(headerValues) ? headerValues : null);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public Iterator<String> getHeaderNames() {
<ide> return CollectionUtils.toIterator(getRequest().getHeaderNames());
<ide> }
<ide> public String[] getParameterValues(String paramName) {
<ide> return getRequest().getParameterValues(paramName);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public Iterator<String> getParameterNames() {
<ide> return CollectionUtils.toIterator(getRequest().getParameterNames());
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public Map<String, String[]> getParameterMap() {
<ide> return getRequest().getParameterMap();
<ide> } | 11 |
PHP | PHP | clarify the type of response we're looking for | 2fc313b8b1505d7c6ca16c52a1c33cf8aed7fc7a | <ide><path>app/Exceptions/Handler.php
<ide> public function report(Exception $e)
<ide> }
<ide>
<ide> /**
<del> * Render an exception into a response.
<add> * Render an exception into an HTTP response.
<ide> *
<ide> * @param \Illuminate\Http\Request $request
<ide> * @param \Exception $e | 1 |
PHP | PHP | add numeric validator for float fields by default | bcafde6212a8614d69e9c399197166c67eb0b0d1 | <ide><path>lib/Cake/Console/Command/Task/ModelTask.php
<ide> public function fieldValidation($fieldName, $metaData, $primaryKey = 'id') {
<ide> $guess = $methods['notEmpty'];
<ide> } elseif ($metaData['type'] === 'integer') {
<ide> $guess = $methods['numeric'];
<add> } elseif ($metaData['type'] === 'float') {
<add> $guess = $methods['numeric'];
<ide> } elseif ($metaData['type'] === 'boolean') {
<ide> $guess = $methods['boolean'];
<ide> } elseif ($metaData['type'] === 'date') { | 1 |
PHP | PHP | add a space between 2 lines | 38eb84b57c5cb81a1beb70ba6421635a78408174 | <ide><path>src/Illuminate/Support/Collection.php
<ide> public function lists($value, $key = null)
<ide> else
<ide> {
<ide> $itemKey = is_object($item) ? $item->{$key} : $item[$key];
<add>
<ide> $results[$itemKey] = $itemValue;
<ide> }
<ide> } | 1 |
Mixed | Go | remove deprecated cli flags | 7929888214741c4ab194c44e0b14ac08aca06556 | <ide><path>api/client/attach.go
<ide> import (
<ide> // Usage: docker attach [OPTIONS] CONTAINER
<ide> func (cli *DockerCli) CmdAttach(args ...string) error {
<ide> cmd := Cli.Subcmd("attach", []string{"CONTAINER"}, Cli.DockerCommands["attach"].Description, true)
<del> noStdin := cmd.Bool([]string{"#nostdin", "-no-stdin"}, false, "Do not attach STDIN")
<del> proxy := cmd.Bool([]string{"#sig-proxy", "-sig-proxy"}, true, "Proxy all received signals to the process")
<add> noStdin := cmd.Bool([]string{"-no-stdin"}, false, "Do not attach STDIN")
<add> proxy := cmd.Bool([]string{"-sig-proxy"}, true, "Proxy all received signals to the process")
<ide>
<ide> cmd.Require(flag.Exact, 1)
<ide>
<ide><path>api/client/build.go
<ide> func (cli *DockerCli) CmdBuild(args ...string) error {
<ide> flTags := opts.NewListOpts(validateTag)
<ide> cmd.Var(&flTags, []string{"t", "-tag"}, "Name and optionally a tag in the 'name:tag' format")
<ide> suppressOutput := cmd.Bool([]string{"q", "-quiet"}, false, "Suppress the verbose output generated by the containers")
<del> noCache := cmd.Bool([]string{"#no-cache", "-no-cache"}, false, "Do not use cache when building the image")
<del> rm := cmd.Bool([]string{"#rm", "-rm"}, true, "Remove intermediate containers after a successful build")
<add> noCache := cmd.Bool([]string{"-no-cache"}, false, "Do not use cache when building the image")
<add> rm := cmd.Bool([]string{"-rm"}, true, "Remove intermediate containers after a successful build")
<ide> forceRm := cmd.Bool([]string{"-force-rm"}, false, "Always remove intermediate containers")
<ide> pull := cmd.Bool([]string{"-pull"}, false, "Always attempt to pull a newer version of the image")
<ide> dockerfileName := cmd.String([]string{"f", "-file"}, "", "Name of the Dockerfile (Default is 'PATH/Dockerfile')")
<ide><path>api/client/commit.go
<ide> func (cli *DockerCli) CmdCommit(args ...string) error {
<ide> cmd := Cli.Subcmd("commit", []string{"CONTAINER [REPOSITORY[:TAG]]"}, Cli.DockerCommands["commit"].Description, true)
<ide> flPause := cmd.Bool([]string{"p", "-pause"}, true, "Pause container during commit")
<ide> flComment := cmd.String([]string{"m", "-message"}, "", "Commit message")
<del> flAuthor := cmd.String([]string{"a", "#author", "-author"}, "", "Author (e.g., \"John Hannibal Smith <[email protected]>\")")
<add> flAuthor := cmd.String([]string{"a", "-author"}, "", "Author (e.g., \"John Hannibal Smith <[email protected]>\")")
<ide> flChanges := opts.NewListOpts(nil)
<ide> cmd.Var(&flChanges, []string{"c", "-change"}, "Apply Dockerfile instruction to the created image")
<ide> // FIXME: --run is deprecated, it will be replaced with inline Dockerfile commands.
<del> flConfig := cmd.String([]string{"#run", "#-run"}, "", "This option is deprecated and will be removed in a future version in favor of inline Dockerfile-compatible commands")
<add> flConfig := cmd.String([]string{"#-run"}, "", "This option is deprecated and will be removed in a future version in favor of inline Dockerfile-compatible commands")
<ide> cmd.Require(flag.Max, 2)
<ide> cmd.Require(flag.Min, 1)
<ide>
<ide><path>api/client/events.go
<ide> import (
<ide> // Usage: docker events [OPTIONS]
<ide> func (cli *DockerCli) CmdEvents(args ...string) error {
<ide> cmd := Cli.Subcmd("events", nil, Cli.DockerCommands["events"].Description, true)
<del> since := cmd.String([]string{"#since", "-since"}, "", "Show all events created since timestamp")
<add> since := cmd.String([]string{"-since"}, "", "Show all events created since timestamp")
<ide> until := cmd.String([]string{"-until"}, "", "Stream events until this timestamp")
<ide> flFilter := opts.NewListOpts(nil)
<ide> cmd.Var(&flFilter, []string{"f", "-filter"}, "Filter output based on conditions provided")
<ide><path>api/client/history.go
<ide> func (cli *DockerCli) CmdHistory(args ...string) error {
<ide> cmd := Cli.Subcmd("history", []string{"IMAGE"}, Cli.DockerCommands["history"].Description, true)
<ide> human := cmd.Bool([]string{"H", "-human"}, true, "Print sizes and dates in human readable format")
<ide> quiet := cmd.Bool([]string{"q", "-quiet"}, false, "Only show numeric IDs")
<del> noTrunc := cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
<add> noTrunc := cmd.Bool([]string{"-no-trunc"}, false, "Don't truncate output")
<ide> cmd.Require(flag.Exact, 1)
<ide>
<ide> cmd.ParseFlags(args, true)
<ide><path>api/client/images.go
<ide> func (cli *DockerCli) CmdImages(args ...string) error {
<ide> cmd := Cli.Subcmd("images", []string{"[REPOSITORY[:TAG]]"}, Cli.DockerCommands["images"].Description, true)
<ide> quiet := cmd.Bool([]string{"q", "-quiet"}, false, "Only show numeric IDs")
<ide> all := cmd.Bool([]string{"a", "-all"}, false, "Show all images (default hides intermediate images)")
<del> noTrunc := cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
<add> noTrunc := cmd.Bool([]string{"-no-trunc"}, false, "Don't truncate output")
<ide> showDigests := cmd.Bool([]string{"-digests"}, false, "Show digests")
<ide>
<ide> flFilter := opts.NewListOpts(nil)
<ide><path>api/client/inspect.go
<ide> var funcMap = template.FuncMap{
<ide> // Usage: docker inspect [OPTIONS] CONTAINER|IMAGE [CONTAINER|IMAGE...]
<ide> func (cli *DockerCli) CmdInspect(args ...string) error {
<ide> cmd := Cli.Subcmd("inspect", []string{"CONTAINER|IMAGE [CONTAINER|IMAGE...]"}, Cli.DockerCommands["inspect"].Description, true)
<del> tmplStr := cmd.String([]string{"f", "#format", "-format"}, "", "Format the output using the given go template")
<add> tmplStr := cmd.String([]string{"f", "-format"}, "", "Format the output using the given go template")
<ide> inspectType := cmd.String([]string{"-type"}, "", "Return JSON for specified type, (e.g image or container)")
<ide> size := cmd.Bool([]string{"s", "-size"}, false, "Display total file sizes if the type is container")
<ide> cmd.Require(flag.Min, 1)
<ide><path>api/client/ps.go
<ide> func (cli *DockerCli) CmdPs(args ...string) error {
<ide> quiet = cmd.Bool([]string{"q", "-quiet"}, false, "Only display numeric IDs")
<ide> size = cmd.Bool([]string{"s", "-size"}, false, "Display total file sizes")
<ide> all = cmd.Bool([]string{"a", "-all"}, false, "Show all containers (default shows just running)")
<del> noTrunc = cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
<add> noTrunc = cmd.Bool([]string{"-no-trunc"}, false, "Don't truncate output")
<ide> nLatest = cmd.Bool([]string{"l", "-latest"}, false, "Show the latest created container, include non-running")
<del> since = cmd.String([]string{"#sinceId", "#-since-id", "-since"}, "", "Show created since Id or Name, include non-running")
<del> before = cmd.String([]string{"#beforeId", "#-before-id", "-before"}, "", "Show only container created before Id or Name")
<add> since = cmd.String([]string{"-since"}, "", "Show created since Id or Name, include non-running")
<add> before = cmd.String([]string{"-before"}, "", "Show only container created before Id or Name")
<ide> last = cmd.Int([]string{"n"}, -1, "Show n last created containers, include non-running")
<ide> format = cmd.String([]string{"-format"}, "", "Pretty-print containers using a Go template")
<ide> flFilter = opts.NewListOpts(nil)
<ide><path>api/client/rm.go
<ide> import (
<ide> func (cli *DockerCli) CmdRm(args ...string) error {
<ide> cmd := Cli.Subcmd("rm", []string{"CONTAINER [CONTAINER...]"}, Cli.DockerCommands["rm"].Description, true)
<ide> v := cmd.Bool([]string{"v", "-volumes"}, false, "Remove the volumes associated with the container")
<del> link := cmd.Bool([]string{"l", "#link", "-link"}, false, "Remove the specified link")
<add> link := cmd.Bool([]string{"l", "-link"}, false, "Remove the specified link")
<ide> force := cmd.Bool([]string{"f", "-force"}, false, "Force the removal of a running container (uses SIGKILL)")
<ide> cmd.Require(flag.Min, 1)
<ide>
<ide><path>api/client/search.go
<ide> func (r ByStars) Less(i, j int) bool { return r[i].StarCount < r[j].StarCount }
<ide> // Usage: docker search [OPTIONS] TERM
<ide> func (cli *DockerCli) CmdSearch(args ...string) error {
<ide> cmd := Cli.Subcmd("search", []string{"TERM"}, Cli.DockerCommands["search"].Description, true)
<del> noTrunc := cmd.Bool([]string{"#notrunc", "-no-trunc"}, false, "Don't truncate output")
<del> trusted := cmd.Bool([]string{"#t", "#trusted", "#-trusted"}, false, "Only show trusted builds")
<add> noTrunc := cmd.Bool([]string{"-no-trunc"}, false, "Don't truncate output")
<ide> automated := cmd.Bool([]string{"-automated"}, false, "Only show automated builds")
<del> stars := cmd.Uint([]string{"s", "#stars", "-stars"}, 0, "Only displays with at least x stars")
<add> stars := cmd.Uint([]string{"s", "-stars"}, 0, "Only displays with at least x stars")
<ide> cmd.Require(flag.Exact, 1)
<ide>
<ide> cmd.ParseFlags(args, true)
<ide> func (cli *DockerCli) CmdSearch(args ...string) error {
<ide> w := tabwriter.NewWriter(cli.out, 10, 1, 3, ' ', 0)
<ide> fmt.Fprintf(w, "NAME\tDESCRIPTION\tSTARS\tOFFICIAL\tAUTOMATED\n")
<ide> for _, res := range results {
<del> if (*automated && !res.IsAutomated) || (int(*stars) > res.StarCount) || (*trusted && !res.IsTrusted) {
<add> if (*automated && !res.IsAutomated) || (int(*stars) > res.StarCount) {
<ide> continue
<ide> }
<ide> desc := strings.Replace(res.Description, "\n", " ", -1)
<ide><path>api/client/tag.go
<ide> import (
<ide> // Usage: docker tag [OPTIONS] IMAGE[:TAG] [REGISTRYHOST/][USERNAME/]NAME[:TAG]
<ide> func (cli *DockerCli) CmdTag(args ...string) error {
<ide> cmd := Cli.Subcmd("tag", []string{"IMAGE[:TAG] [REGISTRYHOST/][USERNAME/]NAME[:TAG]"}, Cli.DockerCommands["tag"].Description, true)
<del> force := cmd.Bool([]string{"f", "#force", "-force"}, false, "Force")
<add> force := cmd.Bool([]string{"f", "-force"}, false, "Force")
<ide> cmd.Require(flag.Exact, 2)
<ide>
<ide> cmd.ParseFlags(args, true)
<ide><path>docker/daemon.go
<ide> import (
<ide> const daemonUsage = " docker daemon [ --help | ... ]\n"
<ide>
<ide> var (
<del> flDaemon = flag.Bool([]string{"#d", "#-daemon"}, false, "Enable daemon mode (deprecated; use docker daemon)")
<ide> daemonCli cli.Handler = NewDaemonCli()
<ide> )
<ide>
<del>// TODO: remove once `-d` is retired
<del>func handleGlobalDaemonFlag() {
<del> // This block makes sure that if the deprecated daemon flag `--daemon` is absent,
<del> // then all daemon-specific flags are absent as well.
<del> if !*flDaemon && daemonFlags != nil {
<del> flag.CommandLine.Visit(func(fl *flag.Flag) {
<del> for _, name := range fl.Names {
<del> name := strings.TrimPrefix(name, "#")
<del> if daemonFlags.Lookup(name) != nil {
<del> // daemon flag was NOT specified, but daemon-specific flags were
<del> // so let's error out
<del> fmt.Fprintf(os.Stderr, "docker: the daemon flag '-%s' must follow the 'docker daemon' command.\n", name)
<del> os.Exit(1)
<del> }
<del> }
<del> })
<del> }
<del>
<del> if *flDaemon {
<del> daemonCli.(*DaemonCli).CmdDaemon(flag.Args()...)
<del> os.Exit(0)
<del> }
<del>}
<del>
<ide> func presentInHelp(usage string) string { return usage }
<ide> func absentFromHelp(string) string { return "" }
<ide>
<ide> func (cli *DaemonCli) CmdDaemon(args ...string) error {
<ide> // warn from uuid package when running the daemon
<ide> uuid.Loggerf = logrus.Warnf
<ide>
<del> if *flDaemon {
<del> // allow legacy forms `docker -D -d` and `docker -d -D`
<del> logrus.Warn("please use 'docker daemon' instead.")
<del> } else if !commonFlags.FlagSet.IsEmpty() || !clientFlags.FlagSet.IsEmpty() {
<add> if !commonFlags.FlagSet.IsEmpty() || !clientFlags.FlagSet.IsEmpty() {
<ide> // deny `docker -D daemon`
<ide> illegalFlag := getGlobalFlag()
<ide> fmt.Fprintf(os.Stderr, "invalid flag '-%s'.\nSee 'docker daemon --help'.\n", illegalFlag.Names[0])
<ide><path>docker/daemon_none.go
<ide> const daemonUsage = ""
<ide>
<ide> var daemonCli cli.Handler
<ide>
<del>// TODO: remove once `-d` is retired
<del>func handleGlobalDaemonFlag() {}
<del>
<ide> // notifySystem sends a message to the host when the server is ready to be used
<ide> func notifySystem() {
<ide> }
<ide><path>docker/docker.go
<ide> func main() {
<ide> return
<ide> }
<ide>
<del> // TODO: remove once `-d` is retired
<del> handleGlobalDaemonFlag()
<ide> clientCli := client.NewDockerCli(stdin, stdout, stderr, clientFlags)
<ide>
<ide> c := cli.New(clientCli, daemonCli)
<ide><path>docs/misc/deprecated.md
<ide> are deprecated and replaced with double-dash options (`--opt`):
<ide> docker ps -sinceId
<ide> docker rm -link
<ide> docker run -cidfile
<del> docker run -cpuset
<ide> docker run -dns
<ide> docker run -entrypoint
<ide> docker run -expose
<ide> are deprecated and replaced with double-dash options (`--opt`):
<ide>
<ide> The following double-dash options are deprecated and have no replacement:
<ide>
<add> docker run --cpuset
<ide> docker run --networking
<ide> docker ps --since-id
<ide> docker ps --before-id
<ide><path>integration-cli/docker_api_inspect_unix_test.go
<ide> func (s *DockerSuite) TestInspectApiCpusetInConfigPre120(c *check.C) {
<ide> testRequires(c, cgroupCpuset)
<ide>
<ide> name := "cpusetinconfig-pre120"
<del> dockerCmd(c, "run", "--name", name, "--cpuset", "0-1", "busybox", "true")
<add> dockerCmd(c, "run", "--name", name, "--cpuset-cpus", "0-1", "busybox", "true")
<ide>
<ide> status, body, err := sockRequest("GET", fmt.Sprintf("/v1.19/containers/%s/json", name), nil)
<ide> c.Assert(status, check.Equals, http.StatusOK)
<ide><path>integration-cli/docker_cli_daemon_test.go
<ide> func (s *DockerDaemonSuite) TestDaemonLogLevelWrong(c *check.C) {
<ide> c.Assert(s.d.Start("--log-level=bogus"), check.NotNil, check.Commentf("Daemon shouldn't start with wrong log level"))
<ide> }
<ide>
<del>func (s *DockerSuite) TestDaemonStartWithBackwardCompatibility(c *check.C) {
<del>
<del> var validCommandArgs = [][]string{
<del> {"--selinux-enabled", "-l", "info"},
<del> {"--insecure-registry", "daemon"},
<del> }
<del>
<del> var invalidCommandArgs = [][]string{
<del> {"--selinux-enabled", "--storage-opt"},
<del> {"-D", "-b"},
<del> {"--config", "/tmp"},
<del> }
<del>
<del> for _, args := range validCommandArgs {
<del> d := NewDaemon(c)
<del> d.Command = "--daemon"
<del> if err := d.Start(args...); err != nil {
<del> c.Fatalf("Daemon should have started successfully with --daemon %v: %v", args, err)
<del> }
<del> d.Stop()
<del> }
<del>
<del> for _, args := range invalidCommandArgs {
<del> d := NewDaemon(c)
<del> if err := d.Start(args...); err == nil {
<del> d.Stop()
<del> c.Fatalf("Daemon should have failed to start with %v", args)
<del> }
<del> }
<del>}
<del>
<ide> func (s *DockerSuite) TestDaemonStartWithDaemonCommand(c *check.C) {
<ide>
<ide> type kind int
<ide><path>integration-cli/docker_cli_ps_test.go
<ide> import (
<ide> "os"
<ide> "os/exec"
<ide> "path/filepath"
<add> "sort"
<ide> "strconv"
<ide> "strings"
<ide> "time"
<ide>
<ide> "github.com/docker/docker/pkg/integration/checker"
<del> "github.com/go-check/check"
<del> "sort"
<del>
<ide> "github.com/docker/docker/pkg/stringid"
<add> "github.com/go-check/check"
<ide> )
<ide>
<ide> func (s *DockerSuite) TestPsListContainersBase(c *check.C) {
<ide><path>integration-cli/docker_cli_run_test.go
<ide> func (s *DockerSuite) TestRunWithoutNetworking(c *check.C) {
<ide> if exitCode != 1 {
<ide> c.Errorf("--net=none should've disabled the network; the container shouldn't have been able to ping 8.8.8.8")
<ide> }
<del>
<del> // And then with the short form -n
<del> out, exitCode, err = dockerCmdWithError("run", "-n=false", image, "ping", count, "1", "8.8.8.8")
<del> if err != nil && exitCode != 1 {
<del> c.Fatal(out, err)
<del> }
<del> if exitCode != 1 {
<del> c.Errorf("-n=false should've disabled the network; the container shouldn't have been able to ping 8.8.8.8")
<del> }
<ide> }
<ide>
<ide> //test --link use container name to link target
<ide> func (s *DockerSuite) TestRunLinksContainerWithContainerId(c *check.C) {
<ide>
<ide> // Issue 9677.
<ide> func (s *DockerSuite) TestRunWithDaemonFlags(c *check.C) {
<del> out, _, err := dockerCmdWithError("--exec-opt", "foo=bar", "run", "-i", "-t", "busybox", "true")
<add> out, _, err := dockerCmdWithError("--exec-opt", "foo=bar", "run", "-i", "busybox", "true")
<ide> if err != nil {
<del> if !strings.Contains(out, "must follow the 'docker daemon' command") && // daemon
<del> !strings.Contains(out, "flag provided but not defined: --exec-opt") { // no daemon (client-only)
<add> if !strings.Contains(out, "flag provided but not defined: --exec-opt") { // no daemon (client-only)
<ide> c.Fatal(err, out)
<ide> }
<ide> }
<ide><path>integration-cli/docker_cli_run_unix_test.go
<ide> func (s *DockerSuite) TestRunEchoStdoutWithCPUSharesAndMemoryLimit(c *check.C) {
<ide> c.Assert(out, checker.Equals, "test\n", check.Commentf("container should've printed 'test'"))
<ide> }
<ide>
<del>func (s *DockerSuite) TestRunWithCpuset(c *check.C) {
<del> testRequires(c, cgroupCpuset)
<del> dockerCmd(c, "run", "--cpuset", "0", "busybox", "true")
<del>}
<del>
<ide> func (s *DockerSuite) TestRunWithCpusetCpus(c *check.C) {
<ide> testRequires(c, cgroupCpuset)
<ide> dockerCmd(c, "run", "--cpuset-cpus", "0", "busybox", "true")
<ide><path>runconfig/parse.go
<ide> func Parse(cmd *flag.FlagSet, args []string) (*Config, *HostConfig, *flag.FlagSe
<ide> flSecurityOpt = opts.NewListOpts(nil)
<ide> flLabelsFile = opts.NewListOpts(nil)
<ide> flLoggingOpts = opts.NewListOpts(nil)
<del> flNetwork = cmd.Bool([]string{"#n", "#-networking"}, true, "Enable networking for this container")
<del> flPrivileged = cmd.Bool([]string{"#privileged", "-privileged"}, false, "Give extended privileges to this container")
<add> flPrivileged = cmd.Bool([]string{"-privileged"}, false, "Give extended privileges to this container")
<ide> flPidMode = cmd.String([]string{"-pid"}, "", "PID namespace to use")
<ide> flUTSMode = cmd.String([]string{"-uts"}, "", "UTS namespace to use")
<ide> flPublishAll = cmd.Bool([]string{"P", "-publish-all"}, false, "Publish all exposed ports to random ports")
<ide> flStdin = cmd.Bool([]string{"i", "-interactive"}, false, "Keep STDIN open even if not attached")
<ide> flTty = cmd.Bool([]string{"t", "-tty"}, false, "Allocate a pseudo-TTY")
<ide> flOomKillDisable = cmd.Bool([]string{"-oom-kill-disable"}, false, "Disable OOM Killer")
<del> flContainerIDFile = cmd.String([]string{"#cidfile", "-cidfile"}, "", "Write the container ID to the file")
<del> flEntrypoint = cmd.String([]string{"#entrypoint", "-entrypoint"}, "", "Overwrite the default ENTRYPOINT of the image")
<add> flContainerIDFile = cmd.String([]string{"-cidfile"}, "", "Write the container ID to the file")
<add> flEntrypoint = cmd.String([]string{"-entrypoint"}, "", "Overwrite the default ENTRYPOINT of the image")
<ide> flHostname = cmd.String([]string{"h", "-hostname"}, "", "Container host name")
<ide> flMemoryString = cmd.String([]string{"m", "-memory"}, "", "Memory limit")
<ide> flMemoryReservation = cmd.String([]string{"-memory-reservation"}, "", "Memory soft limit")
<ide> func Parse(cmd *flag.FlagSet, args []string) (*Config, *HostConfig, *flag.FlagSe
<ide> flCPUShares = cmd.Int64([]string{"#c", "-cpu-shares"}, 0, "CPU shares (relative weight)")
<ide> flCPUPeriod = cmd.Int64([]string{"-cpu-period"}, 0, "Limit CPU CFS (Completely Fair Scheduler) period")
<ide> flCPUQuota = cmd.Int64([]string{"-cpu-quota"}, 0, "Limit CPU CFS (Completely Fair Scheduler) quota")
<del> flCpusetCpus = cmd.String([]string{"#-cpuset", "-cpuset-cpus"}, "", "CPUs in which to allow execution (0-3, 0,1)")
<add> flCpusetCpus = cmd.String([]string{"-cpuset-cpus"}, "", "CPUs in which to allow execution (0-3, 0,1)")
<ide> flCpusetMems = cmd.String([]string{"-cpuset-mems"}, "", "MEMs in which to allow execution (0-3, 0,1)")
<ide> flBlkioWeight = cmd.Uint16([]string{"-blkio-weight"}, 0, "Block IO (relative weight), between 10 and 1000")
<ide> flSwappiness = cmd.Int64([]string{"-memory-swappiness"}, -1, "Tuning container memory swappiness (0 to 100)")
<ide> func Parse(cmd *flag.FlagSet, args []string) (*Config, *HostConfig, *flag.FlagSe
<ide> cmd.Var(&flAttach, []string{"a", "-attach"}, "Attach to STDIN, STDOUT or STDERR")
<ide> cmd.Var(&flBlkioWeightDevice, []string{"-blkio-weight-device"}, "Block IO weight (relative device weight)")
<ide> cmd.Var(&flVolumes, []string{"v", "-volume"}, "Bind mount a volume")
<del> cmd.Var(&flLinks, []string{"#link", "-link"}, "Add link to another container")
<add> cmd.Var(&flLinks, []string{"-link"}, "Add link to another container")
<ide> cmd.Var(&flDevices, []string{"-device"}, "Add a host device to the container")
<ide> cmd.Var(&flLabels, []string{"l", "-label"}, "Set meta data on a container")
<ide> cmd.Var(&flLabelsFile, []string{"-label-file"}, "Read in a line delimited file of labels")
<ide> cmd.Var(&flEnv, []string{"e", "-env"}, "Set environment variables")
<ide> cmd.Var(&flEnvFile, []string{"-env-file"}, "Read in a file of environment variables")
<ide> cmd.Var(&flPublish, []string{"p", "-publish"}, "Publish a container's port(s) to the host")
<del> cmd.Var(&flExpose, []string{"#expose", "-expose"}, "Expose a port or a range of ports")
<del> cmd.Var(&flDNS, []string{"#dns", "-dns"}, "Set custom DNS servers")
<add> cmd.Var(&flExpose, []string{"-expose"}, "Expose a port or a range of ports")
<add> cmd.Var(&flDNS, []string{"-dns"}, "Set custom DNS servers")
<ide> cmd.Var(&flDNSSearch, []string{"-dns-search"}, "Set custom DNS search domains")
<ide> cmd.Var(&flDNSOptions, []string{"-dns-opt"}, "Set DNS options")
<ide> cmd.Var(&flExtraHosts, []string{"-add-host"}, "Add a custom host-to-IP mapping (host:ip)")
<del> cmd.Var(&flVolumesFrom, []string{"#volumes-from", "-volumes-from"}, "Mount volumes from the specified container(s)")
<add> cmd.Var(&flVolumesFrom, []string{"-volumes-from"}, "Mount volumes from the specified container(s)")
<ide> cmd.Var(&flCapAdd, []string{"-cap-add"}, "Add Linux capabilities")
<ide> cmd.Var(&flCapDrop, []string{"-cap-drop"}, "Drop Linux capabilities")
<ide> cmd.Var(&flGroupAdd, []string{"-group-add"}, "Add additional groups to join")
<ide> func Parse(cmd *flag.FlagSet, args []string) (*Config, *HostConfig, *flag.FlagSe
<ide> }
<ide>
<ide> config := &Config{
<del> Hostname: hostname,
<del> Domainname: domainname,
<del> ExposedPorts: ports,
<del> User: *flUser,
<del> Tty: *flTty,
<del> NetworkDisabled: !*flNetwork,
<add> Hostname: hostname,
<add> Domainname: domainname,
<add> ExposedPorts: ports,
<add> User: *flUser,
<add> Tty: *flTty,
<add> // TODO: deprecated, it comes from -n, --networking
<add> // it's still needed internally to set the network to disabled
<add> // if e.g. bridge is none in daemon opts, and in inspect
<add> NetworkDisabled: false,
<ide> OpenStdin: *flStdin,
<ide> AttachStdin: attachStdin,
<ide> AttachStdout: attachStdout, | 21 |
Javascript | Javascript | convert the `preferences` to an es6 class | c5f9193777ad319903d9f5b560a62122f2dc8901 | <ide><path>web/app.js
<ide> import { PDFLinkService } from './pdf_link_service';
<ide> import { PDFOutlineViewer } from './pdf_outline_viewer';
<ide> import { PDFPresentationMode } from './pdf_presentation_mode';
<ide> import { PDFThumbnailViewer } from './pdf_thumbnail_viewer';
<del>import { Preferences } from './preferences';
<ide> import { SecondaryToolbar } from './secondary_toolbar';
<ide> import { Toolbar } from './toolbar';
<ide> import { ViewHistory } from './view_history';
<ide> var DefaultExternalServices = {
<ide> createDownloadManager: function () {
<ide> throw new Error('Not implemented: createDownloadManager');
<ide> },
<add> createPreferences() {
<add> throw new Error('Not implemented: createPreferences');
<add> },
<ide> supportsIntegratedFind: false,
<ide> supportsDocumentFonts: true,
<ide> supportsDocumentColors: true,
<ide> var PDFViewerApplication = {
<ide> store: null,
<ide> /** @type {DownloadManager} */
<ide> downloadManager: null,
<add> /** @type {Preferences} */
<add> preferences: null,
<ide> /** @type {Toolbar} */
<ide> toolbar: null,
<ide> /** @type {SecondaryToolbar} */
<ide> var PDFViewerApplication = {
<ide>
<ide> // called once when the document is loaded
<ide> initialize: function pdfViewInitialize(appConfig) {
<del> var self = this;
<del>
<del> Preferences.initialize();
<del> this.preferences = Preferences;
<add> this.preferences = this.externalServices.createPreferences();
<ide>
<ide> configure(PDFJS);
<ide> this.appConfig = appConfig;
<ide>
<del> return this._readPreferences().then(function () {
<del> return self._initializeViewerComponents();
<del> }).then(function () {
<add> return this._readPreferences().then(() => {
<add> return this._initializeViewerComponents();
<add> }).then(() => {
<ide> // Bind the various event handlers *after* the viewer has been
<ide> // initialized, to prevent errors if an event arrives too soon.
<del> self.bindEvents();
<del> self.bindWindowEvents();
<add> this.bindEvents();
<add> this.bindWindowEvents();
<ide>
<ide> if (typeof PDFJSDev === 'undefined' || PDFJSDev.test('GENERIC')) {
<ide> // For backwards compatibility, we dispatch the 'localized' event on
<ide> // the `eventBus` once the viewer has been initialized.
<del> localized.then(function () {
<del> self.eventBus.dispatch('localized');
<add> localized.then(() => {
<add> this.eventBus.dispatch('localized');
<ide> });
<ide> }
<ide>
<del> if (self.isViewerEmbedded && !PDFJS.isExternalLinkTargetSet()) {
<add> if (this.isViewerEmbedded && !PDFJS.isExternalLinkTargetSet()) {
<ide> // Prevent external links from "replacing" the viewer,
<ide> // when it's embedded in e.g. an iframe or an object.
<ide> PDFJS.externalLinkTarget = PDFJS.LinkTarget.TOP;
<ide> }
<ide>
<del> self.initialized = true;
<add> this.initialized = true;
<ide> });
<ide> },
<ide>
<ide> /**
<ide> * @private
<ide> */
<ide> _readPreferences: function () {
<del> var self = this;
<add> var { preferences, viewerPrefs, } = this;
<ide>
<ide> return Promise.all([
<del> Preferences.get('enableWebGL').then(function resolved(value) {
<add> preferences.get('enableWebGL').then(function resolved(value) {
<ide> PDFJS.disableWebGL = !value;
<ide> }),
<del> Preferences.get('sidebarViewOnLoad').then(function resolved(value) {
<del> self.viewerPrefs['sidebarViewOnLoad'] = value;
<add> preferences.get('sidebarViewOnLoad').then(function resolved(value) {
<add> viewerPrefs['sidebarViewOnLoad'] = value;
<ide> }),
<del> Preferences.get('pdfBugEnabled').then(function resolved(value) {
<del> self.viewerPrefs['pdfBugEnabled'] = value;
<add> preferences.get('pdfBugEnabled').then(function resolved(value) {
<add> viewerPrefs['pdfBugEnabled'] = value;
<ide> }),
<del> Preferences.get('showPreviousViewOnLoad').then(function resolved(value) {
<del> self.viewerPrefs['showPreviousViewOnLoad'] = value;
<add> preferences.get('showPreviousViewOnLoad').then(function resolved(value) {
<add> viewerPrefs['showPreviousViewOnLoad'] = value;
<ide> }),
<del> Preferences.get('defaultZoomValue').then(function resolved(value) {
<del> self.viewerPrefs['defaultZoomValue'] = value;
<add> preferences.get('defaultZoomValue').then(function resolved(value) {
<add> viewerPrefs['defaultZoomValue'] = value;
<ide> }),
<del> Preferences.get('enhanceTextSelection').then(function resolved(value) {
<del> self.viewerPrefs['enhanceTextSelection'] = value;
<add> preferences.get('enhanceTextSelection').then(function resolved(value) {
<add> viewerPrefs['enhanceTextSelection'] = value;
<ide> }),
<del> Preferences.get('disableTextLayer').then(function resolved(value) {
<add> preferences.get('disableTextLayer').then(function resolved(value) {
<ide> if (PDFJS.disableTextLayer === true) {
<ide> return;
<ide> }
<ide> PDFJS.disableTextLayer = value;
<ide> }),
<del> Preferences.get('disableRange').then(function resolved(value) {
<add> preferences.get('disableRange').then(function resolved(value) {
<ide> if (PDFJS.disableRange === true) {
<ide> return;
<ide> }
<ide> PDFJS.disableRange = value;
<ide> }),
<del> Preferences.get('disableStream').then(function resolved(value) {
<add> preferences.get('disableStream').then(function resolved(value) {
<ide> if (PDFJS.disableStream === true) {
<ide> return;
<ide> }
<ide> PDFJS.disableStream = value;
<ide> }),
<del> Preferences.get('disableAutoFetch').then(function resolved(value) {
<add> preferences.get('disableAutoFetch').then(function resolved(value) {
<ide> PDFJS.disableAutoFetch = value;
<ide> }),
<del> Preferences.get('disableFontFace').then(function resolved(value) {
<add> preferences.get('disableFontFace').then(function resolved(value) {
<ide> if (PDFJS.disableFontFace === true) {
<ide> return;
<ide> }
<ide> PDFJS.disableFontFace = value;
<ide> }),
<del> Preferences.get('useOnlyCssZoom').then(function resolved(value) {
<add> preferences.get('useOnlyCssZoom').then(function resolved(value) {
<ide> PDFJS.useOnlyCssZoom = value;
<ide> }),
<del> Preferences.get('externalLinkTarget').then(function resolved(value) {
<add> preferences.get('externalLinkTarget').then(function resolved(value) {
<ide> if (PDFJS.isExternalLinkTargetSet()) {
<ide> return;
<ide> }
<ide> PDFJS.externalLinkTarget = value;
<ide> }),
<del> Preferences.get('renderer').then(function resolved(value) {
<del> self.viewerPrefs['renderer'] = value;
<add> preferences.get('renderer').then(function resolved(value) {
<add> viewerPrefs['renderer'] = value;
<ide> }),
<del> Preferences.get('renderInteractiveForms').then(function resolved(value) {
<del> self.viewerPrefs['renderInteractiveForms'] = value;
<add> preferences.get('renderInteractiveForms').then(function resolved(value) {
<add> viewerPrefs['renderInteractiveForms'] = value;
<ide> }),
<del> Preferences.get('disablePageLabels').then(function resolved(value) {
<del> self.viewerPrefs['disablePageLabels'] = value;
<add> preferences.get('disablePageLabels').then(function resolved(value) {
<add> viewerPrefs['disablePageLabels'] = value;
<ide> }),
<del> Preferences.get('enablePrintAutoRotate').then(function resolved(value) {
<del> self.viewerPrefs['enablePrintAutoRotate'] = value;
<add> preferences.get('enablePrintAutoRotate').then(function resolved(value) {
<add> viewerPrefs['enablePrintAutoRotate'] = value;
<ide> }),
<ide> ]).catch(function (reason) { });
<ide> },
<ide> var PDFViewerApplication = {
<ide> var self = this;
<ide> var appConfig = this.appConfig;
<ide>
<del> return new Promise(function (resolve, reject) {
<add> return new Promise((resolve, reject) => {
<ide> var eventBus = appConfig.eventBus || getGlobalEventBus();
<ide> self.eventBus = eventBus;
<ide>
<ide> var PDFViewerApplication = {
<ide> self.overlayManager = OverlayManager;
<ide>
<ide> self.handTool = new HandTool({
<del> container: container,
<del> eventBus: eventBus,
<add> container,
<add> eventBus,
<add> preferences: this.preferences,
<ide> });
<ide>
<ide> self.pdfDocumentProperties =
<ide> var PDFViewerApplication = {
<ide> }
<ide> if (this.pdfLoadingTask) {
<ide> // We need to destroy already opened document.
<del> return this.close().then(function () {
<add> return this.close().then(() => {
<ide> // Reload the preferences if a document was previously opened.
<del> Preferences.reload();
<add> this.preferences.reload();
<ide> // ... and repeat the open() call.
<ide> return this.open(file, args);
<del> }.bind(this));
<add> });
<ide> }
<ide>
<ide> var parameters = Object.create(null), scale;
<ide><path>web/chromecom.js
<ide> /* globals chrome */
<ide>
<ide> import { DefaultExternalServices, PDFViewerApplication } from './app';
<add>import { BasePreferences } from './preferences';
<ide> import { DownloadManager } from './download_manager';
<ide> import { OverlayManager } from './overlay_manager';
<ide> import { PDFJS } from './pdfjs';
<del>import { Preferences } from './preferences';
<ide>
<ide> if (typeof PDFJSDev === 'undefined' || !PDFJSDev.test('CHROME')) {
<ide> throw new Error('Module "pdfjs-web/chromecom" shall not be used outside ' +
<ide> function setReferer(url, callback) {
<ide> // chrome.storage.local to chrome.storage.sync when needed.
<ide> var storageArea = chrome.storage.sync || chrome.storage.local;
<ide>
<del>Preferences._writeToStorage = function (prefObj) {
<del> return new Promise(function (resolve) {
<del> if (prefObj === Preferences.defaults) {
<del> var keysToRemove = Object.keys(Preferences.defaults);
<del> // If the storage is reset, remove the keys so that the values from
<del> // managed storage are applied again.
<del> storageArea.remove(keysToRemove, function() {
<del> resolve();
<del> });
<del> } else {
<del> storageArea.set(prefObj, function() {
<del> resolve();
<del> });
<del> }
<del> });
<del>};
<add>class ChromePreferences extends BasePreferences {
<add> _writeToStorage(prefObj) {
<add> return new Promise((resolve) => {
<add> if (prefObj === this.defaults) {
<add> var keysToRemove = Object.keys(this.defaults);
<add> // If the storage is reset, remove the keys so that the values from
<add> // managed storage are applied again.
<add> storageArea.remove(keysToRemove, function() {
<add> resolve();
<add> });
<add> } else {
<add> storageArea.set(prefObj, function() {
<add> resolve();
<add> });
<add> }
<add> });
<add> }
<ide>
<del>Preferences._readFromStorage = function (prefObj) {
<del> return new Promise(function (resolve) {
<del> if (chrome.storage.managed) {
<del> // Get preferences as set by the system administrator.
<del> // See extensions/chromium/preferences_schema.json for more information.
<del> // These preferences can be overridden by the user.
<del> chrome.storage.managed.get(Preferences.defaults, getPreferences);
<del> } else {
<del> // Managed storage not supported, e.g. in old Chromium versions.
<del> getPreferences(Preferences.defaults);
<del> }
<add> _readFromStorage(prefObj) {
<add> return new Promise((resolve) => {
<add> var getPreferences = (defaultPrefs) => {
<add> if (chrome.runtime.lastError) {
<add> // Managed storage not supported, e.g. in Opera.
<add> defaultPrefs = this.defaults;
<add> }
<add> storageArea.get(defaultPrefs, function(readPrefs) {
<add> resolve(readPrefs);
<add> });
<add> };
<ide>
<del> function getPreferences(defaultPrefs) {
<del> if (chrome.runtime.lastError) {
<del> // Managed storage not supported, e.g. in Opera.
<del> defaultPrefs = Preferences.defaults;
<add> if (chrome.storage.managed) {
<add> // Get preferences as set by the system administrator.
<add> // See extensions/chromium/preferences_schema.json for more information.
<add> // These preferences can be overridden by the user.
<add> chrome.storage.managed.get(this.defaults, getPreferences);
<add> } else {
<add> // Managed storage not supported, e.g. in old Chromium versions.
<add> getPreferences(this.defaults);
<ide> }
<del> storageArea.get(defaultPrefs, function(readPrefs) {
<del> resolve(readPrefs);
<del> });
<del> }
<del> });
<del>};
<add> });
<add> }
<add>}
<ide>
<ide> var ChromeExternalServices = Object.create(DefaultExternalServices);
<ide> ChromeExternalServices.initPassiveLoading = function (callbacks) {
<ide> ChromeExternalServices.initPassiveLoading = function (callbacks) {
<ide> ChromeExternalServices.createDownloadManager = function() {
<ide> return new DownloadManager();
<ide> };
<add>ChromeExternalServices.createPreferences = function() {
<add> return new ChromePreferences();
<add>};
<ide> PDFViewerApplication.externalServices = ChromeExternalServices;
<ide>
<ide> export {
<ide><path>web/firefoxcom.js
<ide> * limitations under the License.
<ide> */
<ide>
<del>import {
<del> createObjectURL, PDFDataRangeTransport, shadow
<del>} from './pdfjs';
<add>import { createObjectURL, PDFDataRangeTransport, shadow } from './pdfjs';
<add>import { BasePreferences } from './preferences';
<ide> import { PDFViewerApplication } from './app';
<del>import { Preferences } from './preferences';
<ide>
<ide> if (typeof PDFJSDev === 'undefined' ||
<ide> !PDFJSDev.test('FIREFOX || MOZCENTRAL')) {
<ide> var DownloadManager = (function DownloadManagerClosure() {
<ide> return DownloadManager;
<ide> })();
<ide>
<del>Preferences._writeToStorage = function (prefObj) {
<del> return new Promise(function (resolve) {
<del> FirefoxCom.request('setPreferences', prefObj, resolve);
<del> });
<del>};
<add>class FirefoxPreferences extends BasePreferences {
<add> _writeToStorage(prefObj) {
<add> return new Promise(function(resolve) {
<add> FirefoxCom.request('setPreferences', prefObj, resolve);
<add> });
<add> }
<ide>
<del>Preferences._readFromStorage = function (prefObj) {
<del> return new Promise(function (resolve) {
<del> FirefoxCom.request('getPreferences', prefObj, function (prefStr) {
<del> var readPrefs = JSON.parse(prefStr);
<del> resolve(readPrefs);
<add> _readFromStorage(prefObj) {
<add> return new Promise(function(resolve) {
<add> FirefoxCom.request('getPreferences', prefObj, function (prefStr) {
<add> var readPrefs = JSON.parse(prefStr);
<add> resolve(readPrefs);
<add> });
<ide> });
<del> });
<del>};
<add> }
<add>}
<ide>
<ide> (function listenFindEvents() {
<ide> var events = [
<ide> PDFViewerApplication.externalServices = {
<ide> return new DownloadManager();
<ide> },
<ide>
<add> createPreferences() {
<add> return new FirefoxPreferences();
<add> },
<add>
<ide> get supportsIntegratedFind() {
<ide> var support = FirefoxCom.requestSync('supportsIntegratedFind');
<ide> return shadow(this, 'supportsIntegratedFind', support);
<ide><path>web/genericcom.js
<ide> */
<ide>
<ide> import { DefaultExternalServices, PDFViewerApplication } from './app';
<add>import { BasePreferences } from './preferences';
<ide> import { DownloadManager } from './download_manager';
<ide>
<ide> if (typeof PDFJSDev !== 'undefined' && !PDFJSDev.test('GENERIC')) {
<ide> if (typeof PDFJSDev !== 'undefined' && !PDFJSDev.test('GENERIC')) {
<ide>
<ide> var GenericCom = {};
<ide>
<add>class GenericPreferences extends BasePreferences {
<add> _writeToStorage(prefObj) {
<add> return new Promise(function(resolve) {
<add> localStorage.setItem('pdfjs.preferences', JSON.stringify(prefObj));
<add> resolve();
<add> });
<add> }
<add>
<add> _readFromStorage(prefObj) {
<add> return new Promise(function(resolve) {
<add> var readPrefs = JSON.parse(localStorage.getItem('pdfjs.preferences'));
<add> resolve(readPrefs);
<add> });
<add> }
<add>}
<add>
<ide> var GenericExternalServices = Object.create(DefaultExternalServices);
<del>GenericExternalServices.createDownloadManager = function () {
<add>GenericExternalServices.createDownloadManager = function() {
<ide> return new DownloadManager();
<ide> };
<add>GenericExternalServices.createPreferences = function() {
<add> return new GenericPreferences();
<add>};
<ide> PDFViewerApplication.externalServices = GenericExternalServices;
<ide>
<ide> export {
<ide><path>web/hand_tool.js
<ide>
<ide> import { GrabToPan } from './grab_to_pan';
<ide> import { localized } from './ui_utils';
<del>import { Preferences } from './preferences';
<ide>
<ide> /**
<ide> * @typedef {Object} HandToolOptions
<ide> var HandTool = (function HandToolClosure() {
<ide> function HandTool(options) {
<ide> this.container = options.container;
<ide> this.eventBus = options.eventBus;
<add> var preferences = options.preferences;
<ide>
<ide> this.wasActive = false;
<ide>
<ide> var HandTool = (function HandToolClosure() {
<ide>
<ide> this.eventBus.on('togglehandtool', this.toggle.bind(this));
<ide>
<del> Promise.all([localized, Preferences.get('enableHandToolOnLoad')]).then(
<del> function resolved(values) {
<del> if (values[1] === true) {
<del> this.handTool.activate();
<del> }
<del> }.bind(this)).catch(function rejected(reason) { });
<add> Promise.all([localized,
<add> preferences.get('enableHandToolOnLoad')]).then((values) => {
<add> if (values[1] === true) {
<add> this.handTool.activate();
<add> }
<add> }).catch(function rejected(reason) { });
<ide>
<ide> this.eventBus.on('presentationmodechanged', function (e) {
<ide> if (e.switchInProgress) {
<ide><path>web/preferences.js
<ide> function getDefaultPreferences() {
<ide> try {
<ide> resolve(JSON.parse(xhr.responseText));
<ide> } catch (e) {
<del> console.error('Unable to load default preferences: ' + e);
<add> console.error(`Unable to load default preferences: ${e}`);
<ide> resolve({});
<ide> }
<ide> };
<ide> function cloneObj(obj) {
<ide> }
<ide>
<ide> /**
<del> * Preferences - Utility for storing persistent settings.
<add> * BasePreferences - Abstract base class for storing persistent settings.
<ide> * Used for settings that should be applied to all opened documents,
<ide> * or every time the viewer is loaded.
<ide> */
<del>var Preferences = {
<del> prefs: null,
<del> isInitializedPromiseResolved: false,
<del> initializedPromise: null,
<del>
<del> /**
<del> * Initialize and fetch the current preference values from storage.
<del> * @return {Promise} A promise that is resolved when the preferences
<del> * have been initialized.
<del> */
<del> initialize: function preferencesInitialize() {
<del> return this.initializedPromise = getDefaultPreferences().then(
<del> function (defaults) {
<add>class BasePreferences {
<add> constructor() {
<add> if (this.constructor === BasePreferences) {
<add> throw new Error('Cannot initialize BasePreferences.');
<add> }
<add> this.prefs = null;
<ide>
<add> this._initializedPromise = getDefaultPreferences().then((defaults) => {
<ide> Object.defineProperty(this, 'defaults', {
<ide> value: Object.freeze(defaults),
<ide> writable: false,
<ide> var Preferences = {
<ide>
<ide> this.prefs = cloneObj(defaults);
<ide> return this._readFromStorage(defaults);
<del> }.bind(this)).then(function(prefObj) {
<del> this.isInitializedPromiseResolved = true;
<add> }).then((prefObj) => {
<ide> if (prefObj) {
<ide> this.prefs = prefObj;
<ide> }
<del> }.bind(this));
<del> },
<add> });
<add> }
<ide>
<ide> /**
<ide> * Stub function for writing preferences to storage.
<del> * NOTE: This should be overridden by a build-specific function defined below.
<ide> * @param {Object} prefObj The preferences that should be written to storage.
<ide> * @return {Promise} A promise that is resolved when the preference values
<ide> * have been written.
<ide> */
<del> _writeToStorage: function preferences_writeToStorage(prefObj) {
<del> return Promise.resolve();
<del> },
<add> _writeToStorage(prefObj) {
<add> return Promise.reject(new Error('Not implemented: _writeToStorage'));
<add> }
<ide>
<ide> /**
<ide> * Stub function for reading preferences from storage.
<del> * NOTE: This should be overridden by a build-specific function defined below.
<ide> * @param {Object} prefObj The preferences that should be read from storage.
<ide> * @return {Promise} A promise that is resolved with an {Object} containing
<ide> * the preferences that have been read.
<ide> */
<del> _readFromStorage: function preferences_readFromStorage(prefObj) {
<del> return Promise.resolve();
<del> },
<add> _readFromStorage(prefObj) {
<add> return Promise.reject(new Error('Not implemented: _readFromStorage'));
<add> }
<ide>
<ide> /**
<ide> * Reset the preferences to their default values and update storage.
<ide> * @return {Promise} A promise that is resolved when the preference values
<ide> * have been reset.
<ide> */
<del> reset: function preferencesReset() {
<del> return this.initializedPromise.then(function() {
<add> reset() {
<add> return this._initializedPromise.then(() => {
<ide> this.prefs = cloneObj(this.defaults);
<ide> return this._writeToStorage(this.defaults);
<del> }.bind(this));
<del> },
<add> });
<add> }
<ide>
<ide> /**
<ide> * Replace the current preference values with the ones from storage.
<ide> * @return {Promise} A promise that is resolved when the preference values
<ide> * have been updated.
<ide> */
<del> reload: function preferencesReload() {
<del> return this.initializedPromise.then(function () {
<del> this._readFromStorage(this.defaults).then(function(prefObj) {
<del> if (prefObj) {
<del> this.prefs = prefObj;
<del> }
<del> }.bind(this));
<del> }.bind(this));
<del> },
<add> reload() {
<add> return this._initializedPromise.then(() => {
<add> return this._readFromStorage(this.defaults);
<add> }).then((prefObj) => {
<add> if (prefObj) {
<add> this.prefs = prefObj;
<add> }
<add> });
<add> }
<ide>
<ide> /**
<ide> * Set the value of a preference.
<ide> var Preferences = {
<ide> * @return {Promise} A promise that is resolved when the value has been set,
<ide> * provided that the preference exists and the types match.
<ide> */
<del> set: function preferencesSet(name, value) {
<del> return this.initializedPromise.then(function () {
<add> set(name, value) {
<add> return this._initializedPromise.then(() => {
<ide> if (this.defaults[name] === undefined) {
<del> throw new Error('preferencesSet: \'' + name + '\' is undefined.');
<add> throw new Error(`Set preference: "${name}" is undefined.`);
<ide> } else if (value === undefined) {
<del> throw new Error('preferencesSet: no value is specified.');
<add> throw new Error('Set preference: no value is specified.');
<ide> }
<ide> var valueType = typeof value;
<ide> var defaultType = typeof this.defaults[name];
<ide> var Preferences = {
<ide> if (valueType === 'number' && defaultType === 'string') {
<ide> value = value.toString();
<ide> } else {
<del> throw new Error('Preferences_set: \'' + value + '\' is a \"' +
<del> valueType + '\", expected \"' + defaultType + '\".');
<add> throw new Error(`Set preference: "${value}" is a ${valueType}, ` +
<add> `expected a ${defaultType}.`);
<ide> }
<ide> } else {
<ide> if (valueType === 'number' && (value | 0) !== value) {
<del> throw new Error('Preferences_set: \'' + value +
<del> '\' must be an \"integer\".');
<add> throw new Error(`Set preference: "${value}" must be an integer.`);
<ide> }
<ide> }
<ide> this.prefs[name] = value;
<ide> return this._writeToStorage(this.prefs);
<del> }.bind(this));
<del> },
<add> });
<add> }
<ide>
<ide> /**
<ide> * Get the value of a preference.
<ide> * @param {string} name The name of the preference whose value is requested.
<ide> * @return {Promise} A promise that is resolved with a {boolean|number|string}
<ide> * containing the value of the preference.
<ide> */
<del> get: function preferencesGet(name) {
<del> return this.initializedPromise.then(function () {
<add> get(name) {
<add> return this._initializedPromise.then(() => {
<ide> var defaultValue = this.defaults[name];
<ide>
<ide> if (defaultValue === undefined) {
<del> throw new Error('preferencesGet: \'' + name + '\' is undefined.');
<add> throw new Error(`Get preference: "${name}" is undefined.`);
<ide> } else {
<ide> var prefValue = this.prefs[name];
<ide>
<ide> var Preferences = {
<ide> }
<ide> }
<ide> return defaultValue;
<del> }.bind(this));
<del> }
<del>};
<del>
<del>if (typeof PDFJSDev === 'undefined' ||
<del> !PDFJSDev.test('FIREFOX || MOZCENTRAL || CHROME')) {
<del> Preferences._writeToStorage = function (prefObj) {
<del> return new Promise(function (resolve) {
<del> localStorage.setItem('pdfjs.preferences', JSON.stringify(prefObj));
<del> resolve();
<ide> });
<del> };
<del>
<del> Preferences._readFromStorage = function (prefObj) {
<del> return new Promise(function (resolve) {
<del> var readPrefs = JSON.parse(localStorage.getItem('pdfjs.preferences'));
<del> resolve(readPrefs);
<del> });
<del> };
<add> }
<ide> }
<ide>
<ide> export {
<del> Preferences,
<add> BasePreferences,
<ide> }; | 6 |
Javascript | Javascript | open two same tabs on firefox | 24f1923b1b55f142c39364c88a57b2a1b90d3972 | <ide><path>packages/react-devtools-extensions/popups/shared.js
<ide> document.addEventListener('DOMContentLoaded', function() {
<ide> const location = ln.href;
<ide> ln.onclick = function() {
<ide> chrome.tabs.create({active: true, url: location});
<add> return false;
<ide> };
<ide> })();
<ide> } | 1 |
Ruby | Ruby | fix tests for `cask audit` | c098ce5d16b3b9ca253ca07ffd44cf98cd7366f1 | <ide><path>Library/Homebrew/test/cask/audit_spec.rb
<ide> def include_msg?(messages, msg)
<ide> end
<ide> end
<ide>
<del> matcher :fail_with do |error_msg|
<add> matcher :fail_with do |message|
<ide> match do |audit|
<del> include_msg?(audit.errors, error_msg)
<add> include_msg?(audit.errors, message)
<ide> end
<ide> end
<ide>
<del> matcher :warn_with do |warning_msg|
<add> matcher :warn_with do |message|
<ide> match do |audit|
<del> include_msg?(audit.warnings, warning_msg)
<add> include_msg?(audit.warnings, message)
<ide> end
<ide> end
<ide>
<ide> def include_msg?(messages, msg)
<ide> it "implies `strict`" do
<ide> expect(audit).to be_strict
<ide> end
<add>
<add> it "implies `token_conflicts`" do
<add> expect(audit.token_conflicts?).to be true
<add> end
<ide> end
<ide>
<ide> context "when `online` is specified" do
<ide> def include_msg?(messages, msg)
<ide> expect(audit.download).to be_truthy
<ide> end
<ide> end
<del>
<del> context "when `strict` is specified" do
<del> let(:strict) { true }
<del>
<del> it "implies `token_conflicts`" do
<del> expect(audit.token_conflicts?).to be true
<del> end
<del> end
<ide> end
<ide>
<ide> describe "#result" do
<ide> def tmp_cask(name, text)
<ide> context "when cask token is not lowercase" do
<ide> let(:cask_token) { "Upper-Case" }
<ide>
<del> it "warns about lowercase" do
<del> expect(subject).to warn_with(/token is not lowercase/)
<add> it "fails" do
<add> expect(subject).to fail_with(/lowercase/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token is not ascii" do
<ide> let(:cask_token) { "ascii⌘" }
<ide>
<del> it "warns about ascii" do
<del> expect(subject).to warn_with(/contains non-ascii characters/)
<add> it "fails" do
<add> expect(subject).to fail_with(/contains non-ascii characters/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token has +" do
<ide> let(:cask_token) { "app++" }
<ide>
<del> it "warns about +" do
<del> expect(subject).to warn_with(/\+ should be replaced by -plus-/)
<add> it "fails" do
<add> expect(subject).to fail_with(/\+ should be replaced by -plus-/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token has @" do
<ide> let(:cask_token) { "app@stuff" }
<ide>
<del> it "warns about +" do
<del> expect(subject).to warn_with(/@ should be replaced by -at-/)
<add> it "fails" do
<add> expect(subject).to fail_with(/@ should be replaced by -at-/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token has whitespace" do
<ide> let(:cask_token) { "app stuff" }
<ide>
<del> it "warns about whitespace" do
<del> expect(subject).to warn_with(/whitespace should be replaced by hyphens/)
<add> it "fails" do
<add> expect(subject).to fail_with(/whitespace should be replaced by hyphens/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token has underscores" do
<ide> let(:cask_token) { "app_stuff" }
<ide>
<del> it "warns about underscores" do
<del> expect(subject).to warn_with(/underscores should be replaced by hyphens/)
<add> it "fails" do
<add> expect(subject).to fail_with(/underscores should be replaced by hyphens/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token has non-alphanumeric characters" do
<ide> let(:cask_token) { "app(stuff)" }
<ide>
<del> it "warns about non-alphanumeric characters" do
<del> expect(subject).to warn_with(/should only contain alphanumeric characters and hyphens/)
<add> it "fails" do
<add> expect(subject).to fail_with(/alphanumeric characters and hyphens/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token has double hyphens" do
<ide> let(:cask_token) { "app--stuff" }
<ide>
<del> it "warns about double hyphens" do
<del> expect(subject).to warn_with(/should not contain double hyphens/)
<add> it "fails" do
<add> expect(subject).to fail_with(/should not contain double hyphens/)
<add> end
<add> end
<add>
<add> context "when cask token has leading hyphens" do
<add> let(:cask_token) { "-app" }
<add>
<add> it "fails" do
<add> expect(subject).to fail_with(/should not have leading or trailing hyphens/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token has trailing hyphens" do
<ide> let(:cask_token) { "app-" }
<ide>
<del> it "warns about trailing hyphens" do
<del> expect(subject).to warn_with(/should not have leading or trailing hyphens/)
<add> it "fails" do
<add> expect(subject).to fail_with(/should not have leading or trailing hyphens/)
<ide> end
<ide> end
<ide> end
<ide>
<ide> describe "token bad words" do
<del> let(:strict) { true }
<add> let(:new_cask) { true }
<add> let(:online) { false }
<ide> let(:cask) do
<ide> tmp_cask cask_token.to_s, <<~RUBY
<ide> cask '#{cask_token}' do
<ide> version '1.0'
<ide> sha256 '8dd95daa037ac02455435446ec7bc737b34567afe9156af7d20b2a83805c1d8a'
<ide> url "https://brew.sh/"
<ide> name 'Audit'
<add> desc 'Cask for testing tokens'
<ide> homepage 'https://brew.sh/'
<ide> app 'Audit.app'
<ide> end
<ide> def tmp_cask(name, text)
<ide> context "when cask token contains .app" do
<ide> let(:cask_token) { "token.app" }
<ide>
<del> it "warns about .app" do
<del> expect(subject).to warn_with(/token contains .app/)
<add> it "fails" do
<add> expect(subject).to fail_with(/token contains .app/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token contains version designation" do
<ide> let(:cask_token) { "token-beta" }
<ide>
<del> it "warns about version in token if the cask is from an official tap" do
<add> it "fails if the cask is from an official tap" do
<ide> allow(cask).to receive(:tap).and_return(Tap.fetch("homebrew/cask"))
<ide>
<del> expect(subject).to warn_with(/token contains version designation/)
<add> expect(subject).to fail_with(/token contains version designation/)
<ide> end
<ide>
<del> it "does not warn about version in token if the cask is from the `cask-versions` tap" do
<add> it "does not fail if the cask is from the `cask-versions` tap" do
<ide> allow(cask).to receive(:tap).and_return(Tap.fetch("homebrew/cask-versions"))
<ide>
<del> expect(subject).not_to warn_with(/token contains version designation/)
<add> expect(subject).to pass
<ide> end
<ide> end
<ide>
<ide> context "when cask token contains launcher" do
<ide> let(:cask_token) { "token-launcher" }
<ide>
<del> it "warns about launcher in token" do
<del> expect(subject).to warn_with(/token mentions launcher/)
<add> it "fails" do
<add> expect(subject).to fail_with(/token mentions launcher/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token contains desktop" do
<ide> let(:cask_token) { "token-desktop" }
<ide>
<del> it "warns about desktop in token" do
<del> expect(subject).to warn_with(/token mentions desktop/)
<add> it "fails" do
<add> expect(subject).to fail_with(/token mentions desktop/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token contains platform" do
<ide> let(:cask_token) { "token-osx" }
<ide>
<del> it "warns about platform in token" do
<del> expect(subject).to warn_with(/token mentions platform/)
<add> it "fails" do
<add> expect(subject).to fail_with(/token mentions platform/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token contains architecture" do
<ide> let(:cask_token) { "token-x86" }
<ide>
<del> it "warns about architecture in token" do
<del> expect(subject).to warn_with(/token mentions architecture/)
<add> it "fails" do
<add> expect(subject).to fail_with(/token mentions architecture/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token contains framework" do
<ide> let(:cask_token) { "token-java" }
<ide>
<del> it "warns about framework in token" do
<del> expect(subject).to warn_with(/cask token mentions framework/)
<add> it "fails" do
<add> expect(subject).to fail_with(/cask token mentions framework/)
<ide> end
<ide> end
<ide>
<ide> context "when cask token is framework" do
<ide> let(:cask_token) { "java" }
<ide>
<del> it "does not warn about framework" do
<del> expect(subject).not_to warn_with(/token contains version/)
<add> it "does not fail" do
<add> expect(subject).to pass
<ide> end
<ide> end
<ide> end
<ide> def tmp_cask(name, text)
<ide> end
<ide>
<ide> describe "pkg allow_untrusted checks" do
<del> let(:warning_msg) { "allow_untrusted is not permitted in official Homebrew Cask taps" }
<add> let(:message) { "allow_untrusted is not permitted in official Homebrew Cask taps" }
<ide>
<ide> context "when the Cask has no pkg stanza" do
<ide> let(:cask_token) { "basic-cask" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask does not have allow_untrusted" do
<ide> let(:cask_token) { "with-uninstall-pkgutil" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has allow_untrusted" do
<ide> let(:cask_token) { "with-allow-untrusted" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "when the Cask stanza requires uninstall" do
<del> let(:warning_msg) { "installer and pkg stanzas require an uninstall stanza" }
<add> let(:message) { "installer and pkg stanzas require an uninstall stanza" }
<ide>
<ide> context "when the Cask does not require an uninstall" do
<ide> let(:cask_token) { "basic-cask" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the pkg Cask has an uninstall" do
<ide> let(:cask_token) { "with-uninstall-pkgutil" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the installer Cask has an uninstall" do
<ide> let(:cask_token) { "installer-with-uninstall" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the installer Cask does not have an uninstall" do
<ide> let(:cask_token) { "with-installer-manual" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide>
<ide> context "when the pkg Cask does not have an uninstall" do
<ide> let(:cask_token) { "pkg-without-uninstall" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "preflight stanza checks" do
<del> let(:warning_msg) { "only a single preflight stanza is allowed" }
<add> let(:message) { "only a single preflight stanza is allowed" }
<ide>
<ide> context "when the Cask has no preflight stanza" do
<ide> let(:cask_token) { "with-zap-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has only one preflight stanza" do
<ide> let(:cask_token) { "with-preflight" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has multiple preflight stanzas" do
<ide> let(:cask_token) { "with-preflight-multi" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<del> describe "uninstall_postflight stanza checks" do
<del> let(:warning_msg) { "only a single postflight stanza is allowed" }
<add> describe "postflight stanza checks" do
<add> let(:message) { "only a single postflight stanza is allowed" }
<ide>
<ide> context "when the Cask has no postflight stanza" do
<ide> let(:cask_token) { "with-zap-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has only one postflight stanza" do
<ide> let(:cask_token) { "with-postflight" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has multiple postflight stanzas" do
<ide> let(:cask_token) { "with-postflight-multi" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "uninstall stanza checks" do
<del> let(:warning_msg) { "only a single uninstall stanza is allowed" }
<add> let(:message) { "only a single uninstall stanza is allowed" }
<ide>
<ide> context "when the Cask has no uninstall stanza" do
<ide> let(:cask_token) { "with-zap-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has only one uninstall stanza" do
<ide> let(:cask_token) { "with-uninstall-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has multiple uninstall stanzas" do
<ide> let(:cask_token) { "with-uninstall-multi" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "uninstall_preflight stanza checks" do
<del> let(:warning_msg) { "only a single uninstall_preflight stanza is allowed" }
<add> let(:message) { "only a single uninstall_preflight stanza is allowed" }
<ide>
<ide> context "when the Cask has no uninstall_preflight stanza" do
<ide> let(:cask_token) { "with-zap-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has only one uninstall_preflight stanza" do
<ide> let(:cask_token) { "with-uninstall-preflight" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has multiple uninstall_preflight stanzas" do
<ide> let(:cask_token) { "with-uninstall-preflight-multi" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "uninstall_postflight stanza checks" do
<del> let(:warning_msg) { "only a single uninstall_postflight stanza is allowed" }
<add> let(:message) { "only a single uninstall_postflight stanza is allowed" }
<ide>
<ide> context "when the Cask has no uninstall_postflight stanza" do
<ide> let(:cask_token) { "with-zap-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has only one uninstall_postflight stanza" do
<ide> let(:cask_token) { "with-uninstall-postflight" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has multiple uninstall_postflight stanzas" do
<ide> let(:cask_token) { "with-uninstall-postflight-multi" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "zap stanza checks" do
<del> let(:warning_msg) { "only a single zap stanza is allowed" }
<add> let(:message) { "only a single zap stanza is allowed" }
<ide>
<ide> context "when the Cask has no zap stanza" do
<ide> let(:cask_token) { "with-uninstall-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has only one zap stanza" do
<ide> let(:cask_token) { "with-zap-rmdir" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask has multiple zap stanzas" do
<ide> let(:cask_token) { "with-zap-multi" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "version checks" do
<del> let(:error_msg) { "you should use version :latest instead of version 'latest'" }
<add> let(:message) { "you should use version :latest instead of version 'latest'" }
<ide>
<ide> context "when version is 'latest'" do
<ide> let(:cask_token) { "version-latest-string" }
<ide>
<del> it { is_expected.to fail_with(error_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide>
<ide> context "when version is :latest" do
<ide> let(:cask_token) { "version-latest-with-checksum" }
<ide>
<del> it { is_expected.not_to fail_with(error_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> def tmp_cask(name, text)
<ide> end
<ide>
<ide> describe "hosting with appcast checks" do
<del> let(:appcast_warning) { /please add an appcast/ }
<add> let(:message) { /please add an appcast/ }
<ide>
<ide> context "when the download does not use hosting with an appcast" do
<ide> let(:cask_token) { "basic-cask" }
<ide>
<del> it { is_expected.not_to warn_with(appcast_warning) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download uses GitHub releases and has an appcast" do
<ide> let(:cask_token) { "github-with-appcast" }
<ide>
<del> it { is_expected.not_to warn_with(appcast_warning) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download uses GitHub releases and does not have an appcast" do
<ide> let(:cask_token) { "github-without-appcast" }
<ide>
<del> it { is_expected.to warn_with(appcast_warning) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download is hosted on SourceForge and has an appcast" do
<ide> let(:cask_token) { "sourceforge-with-appcast" }
<ide>
<del> it { is_expected.not_to warn_with(appcast_warning) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download is hosted on SourceForge and does not have an appcast" do
<ide> let(:cask_token) { "sourceforge-correct-url-format" }
<ide>
<del> it { is_expected.to warn_with(appcast_warning) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download is hosted on DevMate and has an appcast" do
<ide> let(:cask_token) { "devmate-with-appcast" }
<ide>
<del> it { is_expected.not_to warn_with(appcast_warning) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download is hosted on DevMate and does not have an appcast" do
<ide> let(:cask_token) { "devmate-without-appcast" }
<ide>
<del> it { is_expected.to warn_with(appcast_warning) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download is hosted on HockeyApp and has an appcast" do
<ide> let(:cask_token) { "hockeyapp-with-appcast" }
<ide>
<del> it { is_expected.not_to warn_with(appcast_warning) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the download is hosted on HockeyApp and does not have an appcast" do
<ide> let(:cask_token) { "hockeyapp-without-appcast" }
<ide>
<del> it { is_expected.to warn_with(appcast_warning) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "latest with appcast checks" do
<del> let(:warning_msg) { "Casks with an appcast should not use version :latest" }
<add> let(:message) { "Casks with an appcast should not use version :latest" }
<ide>
<ide> context "when the Cask is :latest and does not have an appcast" do
<ide> let(:cask_token) { "version-latest" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask is versioned and has an appcast" do
<ide> let(:cask_token) { "with-appcast" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "when the Cask is :latest and has an appcast" do
<ide> let(:cask_token) { "latest-with-appcast" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "denylist checks" do
<del> context "when the Cask isn't disallowed" do
<add> context "when the Cask is not on the denylist" do
<ide> let(:cask_token) { "adobe-air" }
<ide>
<ide> it { is_expected.to pass }
<ide> end
<ide>
<del> context "when the Cask is disallowed" do
<add> context "when the Cask is on the denylist" do
<ide> context "and it's in the official Homebrew tap" do
<ide> let(:cask_token) { "adobe-illustrator" }
<ide>
<ide> def tmp_cask(name, text)
<ide> end
<ide>
<ide> describe "latest with auto_updates checks" do
<del> let(:warning_msg) { "Casks with `version :latest` should not use `auto_updates`" }
<add> let(:message) { "Casks with `version :latest` should not use `auto_updates`" }
<ide>
<ide> context "when the Cask is :latest and does not have auto_updates" do
<ide> let(:cask_token) { "version-latest" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.to pass }
<ide> end
<ide>
<ide> context "when the Cask is versioned and does not have auto_updates" do
<ide> let(:cask_token) { "basic-cask" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.to pass }
<ide> end
<ide>
<ide> context "when the Cask is versioned and has auto_updates" do
<ide> let(:cask_token) { "auto-updates" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.to pass }
<ide> end
<ide>
<ide> context "when the Cask is :latest and has auto_updates" do
<ide> let(:cask_token) { "latest-with-auto-updates" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> describe "preferred download URL formats" do
<del> let(:warning_msg) { /URL format incorrect/ }
<add> let(:message) { /URL format incorrect/ }
<ide>
<ide> context "with incorrect SourceForge URL format" do
<ide> let(:cask_token) { "sourceforge-incorrect-url-format" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide>
<ide> context "with correct SourceForge URL format" do
<ide> let(:cask_token) { "sourceforge-correct-url-format" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "with correct SourceForge URL format for version :latest" do
<ide> let(:cask_token) { "sourceforge-version-latest-correct-url-format" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide>
<ide> context "with incorrect OSDN URL format" do
<ide> let(:cask_token) { "osdn-incorrect-url-format" }
<ide>
<del> it { is_expected.to warn_with(warning_msg) }
<add> it { is_expected.to fail_with(message) }
<ide> end
<ide>
<ide> context "with correct OSDN URL format" do
<ide> let(:cask_token) { "osdn-correct-url-format" }
<ide>
<del> it { is_expected.not_to warn_with(warning_msg) }
<add> it { is_expected.not_to fail_with(message) }
<ide> end
<ide> end
<ide>
<ide> def tmp_cask(name, text)
<ide> context "when cask token does not conflict with a core formula" do
<ide> let(:formula_names) { %w[other-formula] }
<ide>
<del> it { is_expected.not_to warn_with(/possible duplicate/) }
<add> it { is_expected.to pass }
<ide> end
<ide> end
<ide>
<ide> def tmp_cask(name, text)
<ide> let(:cask) { Cask::CaskLoader.load(cask_token) }
<ide> let(:download_double) { instance_double(Cask::Download) }
<ide> let(:verify) { class_double(Cask::Verify).as_stubbed_const }
<del> let(:error_msg) { "Download Failed" }
<add> let(:message) { "Download Failed" }
<ide>
<ide> before do
<ide> allow(audit).to receive(:download).and_return(download_double)
<add> allow(audit).to receive(:check_https_availability)
<ide> end
<ide>
<ide> it "when download and verification succeed it does not fail" do
<ide> expect(download_double).to receive(:perform)
<ide> expect(verify).to receive(:all)
<del> expect(subject).not_to fail_with(/#{error_msg}/)
<add> expect(subject).to pass
<ide> end
<ide>
<del> it "when download fails it does not fail" do
<del> expect(download_double).to receive(:perform).and_raise(StandardError.new(error_msg))
<del> expect(subject).to fail_with(/#{error_msg}/)
<add> it "when download fails it fails" do
<add> expect(download_double).to receive(:perform).and_raise(StandardError.new(message))
<add> expect(subject).to fail_with(/#{message}/)
<ide> end
<ide>
<del> it "when verification fails it does not fail" do
<add> it "when verification fails it fails" do
<ide> expect(download_double).to receive(:perform)
<del> expect(verify).to receive(:all).and_raise(StandardError.new(error_msg))
<del> expect(subject).to fail_with(/#{error_msg}/)
<add> expect(verify).to receive(:all).and_raise(StandardError.new(message))
<add> expect(subject).to fail_with(/#{message}/)
<ide> end
<ide> end
<ide>
<ide> def tmp_cask(name, text)
<ide> context "when `new_cask` is true" do
<ide> let(:new_cask) { true }
<ide>
<del> it "warns" do
<del> expect(subject).to warn_with(/should have a description/)
<add> it "fails" do
<add> expect(subject).to fail_with(/should have a description/)
<ide> end
<ide> end
<ide>
<del> context "when `new_cask` is true" do
<add> context "when `new_cask` is false" do
<ide> let(:new_cask) { false }
<ide>
<del> it "does not warn" do
<del> expect(subject).not_to warn_with(/should have a description/)
<add> it "warns" do
<add> expect(subject).to warn_with(/should have a description/)
<ide> end
<ide> end
<ide> end
<ide> def tmp_cask(name, text)
<ide> RUBY
<ide> end
<ide>
<del> it "does not warn" do
<del> expect(subject).not_to warn_with(/should have a description/)
<add> it "passes" do
<add> expect(subject).to pass
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/test/cask/cmd/cat_spec.rb
<ide> sha256 "8c62a2b791cf5f0da6066a0a4b6e85f62949cd60975da062df44adf887f4370b"
<ide>
<ide> url "https://brew.sh/TestCask.dmg"
<add> name "Basic Cask"
<add> desc "Cask for testing basic functionality"
<ide> homepage "https://brew.sh/"
<ide>
<ide> app "TestCask.app"
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/adobe-air.rb
<ide> sha256 "8c62a2b791cf5f0da6066a0a4b6e85f62949cd60975da062df44adf887f4370b"
<ide>
<ide> url "https://brew.sh/TestCask.dmg"
<del> name "Adobe Air"
<add> name "Adobe AIR"
<add> desc "Cross-platform application runtime"
<ide> homepage "https://brew.sh/"
<ide>
<ide> app "TestCask.app"
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/auto-updates.rb
<ide> sha256 "5633c3a0f2e572cbf021507dec78c50998b398c343232bdfc7e26221d0a5db4d"
<ide>
<ide> url "file://#{TEST_FIXTURE_DIR}/cask/MyFancyApp.zip"
<add> name "Auto-Updates"
<add> desc "Cask which auto-updates"
<ide> homepage "https://brew.sh/MyFancyApp"
<ide>
<ide> auto_updates true
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/basic-cask.rb
<ide> sha256 "8c62a2b791cf5f0da6066a0a4b6e85f62949cd60975da062df44adf887f4370b"
<ide>
<ide> url "https://brew.sh/TestCask.dmg"
<add> name "Basic Cask"
<add> desc "Cask for testing basic functionality"
<ide> homepage "https://brew.sh/"
<ide>
<ide> app "TestCask.app"
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/latest-with-auto-updates.rb
<ide> sha256 :no_check
<ide>
<ide> url "file://#{TEST_FIXTURE_DIR}/cask/caffeine.zip"
<add> name "Latest with Auto-Updates"
<add> desc "Unversioned cask which auto-updates"
<ide> homepage "https://brew.sh/latest-with-auto-updates"
<ide>
<ide> auto_updates true
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/pkg-without-uninstall.rb
<ide> sha256 "8c62a2b791cf5f0da6066a0a4b6e85f62949cd60975da062df44adf887f4370b"
<ide>
<ide> url "file://#{TEST_FIXTURE_DIR}/cask/MyFancyPkg.zip"
<add> name "PKG without Uninstall"
<add> desc "Cask with a package installer and no uninstall stanza"
<ide> homepage "https://brew.sh/fancy-pkg"
<ide>
<ide> pkg "Fancy.pkg"
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/version-latest.rb
<ide> sha256 :no_check
<ide>
<ide> url "file://#{TEST_FIXTURE_DIR}/cask/caffeines.zip"
<add> name "Version Latest"
<add> desc "Unversioned cask"
<ide> homepage "https://brew.sh/"
<ide>
<ide> app "Caffeine Mini.app"
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/with-binary.rb
<ide> sha256 "d5b2dfbef7ea28c25f7a77cd7fa14d013d82b626db1d82e00e25822464ba19e2"
<ide>
<ide> url "file://#{TEST_FIXTURE_DIR}/cask/AppWithBinary.zip"
<add> name "With Binary"
<add> desc "Cask with a binary stanza"
<ide> homepage "https://brew.sh/with-binary"
<ide>
<ide> app "App.app"
<ide><path>Library/Homebrew/test/support/fixtures/cask/Casks/with-installer-manual.rb
<ide> sha256 "67cdb8a02803ef37fdbf7e0be205863172e41a561ca446cd84f0d7ab35a99d94"
<ide>
<ide> url "file://#{TEST_FIXTURE_DIR}/cask/caffeine.zip"
<add> name "With Installer Manual"
<add> desc "Cask with a manual installer"
<ide> homepage "https://brew.sh/"
<ide>
<ide> installer manual: "Caffeine.app"
<ide><path>Library/Homebrew/test/support/fixtures/third-party/Casks/pharo.rb
<ide>
<ide> url "https://brew.sh/ThirdParty.dmg"
<ide> name "Pharo"
<add> desc "Cask from a third-party tap"
<ide> homepage "https://brew.sh/"
<ide>
<ide> app "ThirdParty.app" | 11 |
Text | Text | fix typo in active support changelog. [ci skip] | e94be7b04cda949b2188a910257b984f98127a31 | <ide><path>activesupport/CHANGELOG.md
<del>* Remove deprecated string based terminators for y`ActiveSupport::Callbacks`.
<add>* Remove deprecated string based terminators for `ActiveSupport::Callbacks`.
<ide>
<ide> *Yves Senn*
<ide> | 1 |
Text | Text | suggest jest as default | e0168d6750d6f24f2102b37f6bad4eb5ccc4c033 | <ide><path>docs/recipes/WritingTests.md
<ide> Because most of the Redux code you write are functions, and many of them are pur
<ide>
<ide> ### Setting Up
<ide>
<del>We recommend [Mocha](http://mochajs.org/) as the testing engine.
<add>We recommend [Jest](http://facebook.github.io/jest/) as the testing engine.
<ide> Note that it runs in a Node environment, so you won't have access to the DOM.
<ide>
<ide> ```
<del>npm install --save-dev mocha
<add>npm install --save-dev jest
<ide> ```
<ide>
<del>To use it together with [Babel](http://babeljs.io), you will need to install `babel-register`:
<add>To use it together with [Babel](http://babeljs.io), you will need to install `babel-jest`:
<ide>
<ide> ```js
<del>npm install --save-dev babel-register
<add>npm install --save-dev babel-jest
<ide> ```
<ide>
<ide> and configure it to use ES2015 features in `.babelrc`:
<ide> Then, add this to `scripts` in your `package.json`:
<ide> ...
<ide> "scripts": {
<ide> ...
<del> "test": "mocha --compilers js:babel-register --recursive",
<add> "test": "jest",
<ide> "test:watch": "npm test -- --watch"
<ide> },
<ide> ...
<ide> export function addTodo(text) {
<ide> can be tested like:
<ide>
<ide> ```js
<del>import expect from 'expect'
<ide> import * as actions from '../../actions/TodoActions'
<ide> import * as types from '../../constants/ActionTypes'
<ide>
<ide> export default function todos(state = initialState, action) {
<ide> can be tested like:
<ide>
<ide> ```js
<del>import expect from 'expect'
<ide> import reducer from '../../reducers/todos'
<ide> import * as types from '../../constants/ActionTypes'
<ide>
<ide> export default Header
<ide> can be tested like:
<ide>
<ide> ```js
<del>import expect from 'expect'
<ide> import React from 'react'
<ide> import { shallow } from 'enzyme'
<ide> import Header from '../../components/Header'
<ide> Middleware functions wrap behavior of `dispatch` calls in Redux, so to test this
<ide> #### Example
<ide>
<ide> ```js
<del>import expect from 'expect'
<ide> import * as types from '../../constants/ActionTypes'
<ide> import singleDispatch from '../../middleware/singleDispatch'
<ide> | 1 |
Javascript | Javascript | remove trailing whitespace | 4561c61bcf9c84ae146e509cbd9e7a5b9a5245ba | <ide><path>packages/ember-htmlbars/lib/helpers/with.js
<ide> import shouldDisplay from "ember-views/streams/should_display";
<ide> {{#if isProlificBlogger}}
<ide> {{user.name}} has written more than {{posts.model.length}} blog posts!
<ide> {{else}}
<del> {{user.name}} has only written {{posts.model.length}} blog posts.
<add> {{user.name}} has only written {{posts.model.length}} blog posts.
<ide> {{/if}}
<ide> {{/with}}
<ide> ``` | 1 |
Python | Python | fix reading strings from conf | 4d9f36b45ad48b9c04cd86a8801bc19fb273c256 | <ide><path>airflow/bin/cli.py
<ide> def run(args, dag=None):
<ide> remote_log_location,
<ide> append=True)
<ide> # Other
<del> elif remote_base:
<add> elif remote_base and remote_base != 'None':
<ide> logging.error(
<ide> 'Unsupported remote log location: {}'.format(remote_base))
<ide>
<ide><path>airflow/configuration.py
<ide> def run_command(command):
<ide> # must supply a remote location URL (starting with either 's3://...' or
<ide> # 'gs://...') and an Airflow connection id that provides access to the storage
<ide> # location.
<del>remote_base_log_folder = None
<del>remote_log_conn_id = None
<add>remote_base_log_folder =
<add>remote_log_conn_id =
<ide> # Use server-side encryption for logs stored in S3
<ide> encrypt_s3_logs = False
<ide> # deprecated option for remote log storage, use remote_base_log_folder instead!
<del># s3_log_folder = None
<add># s3_log_folder =
<ide>
<ide> # The executor class that airflow should use. Choices include
<ide> # SequentialExecutor, LocalExecutor, CeleryExecutor
<ide><path>airflow/utils/logging.py
<ide> def write(self, log, remote_log_location, append=False):
<ide> log,
<ide> key=remote_log_location,
<ide> replace=True,
<del> encrypt=configuration.get('core', 'ENCRYPT_S3_LOGS'))
<add> encrypt=configuration.get_bool('core', 'ENCRYPT_S3_LOGS'))
<ide> return
<ide> except:
<ide> pass | 3 |
Ruby | Ruby | add support for redis channels | 4f36bc66e640cdd4e42ab1174cb61cd7e3b17b0d | <ide><path>lib/action_cable/channel.rb
<ide> module ActionCable
<ide> module Channel
<ide> autoload :Callbacks, 'action_cable/channel/callbacks'
<add> autoload :Redis, 'action_cable/channel/redis'
<ide> autoload :Base, 'action_cable/channel/base'
<ide> end
<ide> end
<ide><path>lib/action_cable/channel/base.rb
<ide> module Channel
<ide>
<ide> class Base
<ide> include Callbacks
<add> include Redis
<ide>
<ide> on_subscribe :start_periodic_timers
<ide> on_unsubscribe :stop_periodic_timers
<ide><path>lib/action_cable/channel/redis.rb
<add>module ActionCable
<add> module Channel
<add>
<add> module Redis
<add> extend ActiveSupport::Concern
<add>
<add> included do
<add> on_unsubscribe :unsubscribe_from_redis_channels
<add> end
<add>
<add> def subscribe_to(redis_channel, callback = nil)
<add> @_redis_channels ||= []
<add> @_redis_channels << redis_channel
<add>
<add> callback ||= -> (message) { broadcast ActiveSupport::JSON.decode(message) }
<add> redis.pubsub.subscribe(redis_channel, &callback)
<add> end
<add>
<add> protected
<add> def unsubscribe_from_redis_channels
<add> if @_redis_channels
<add> @_redis_channels.each { |channel| @connection.pubsub.unsubscribe(channel) }
<add> end
<add> end
<add>
<add> def redis
<add> @connection.redis
<add> end
<add> end
<add>
<add> end
<add>end
<ide>\ No newline at end of file | 3 |
Text | Text | remove legacy -j test.py option from building.md | 506816891b3a5908e640ae5786ac5166cfdae76f | <ide><path>BUILDING.md
<ide> by providing the name of a subsystem:
<ide>
<ide> ```text
<ide> $ make coverage-clean
<del>$ NODE_V8_COVERAGE=coverage/tmp tools/test.py -J --mode=release child-process
<add>$ NODE_V8_COVERAGE=coverage/tmp tools/test.py --mode=release child-process
<ide> $ make coverage-report-js
<ide> ```
<ide> | 1 |
Javascript | Javascript | remove textdecoder feature tests | b57d35034c49c5abb10902122cdb0319e012fe98 | <ide><path>test/features/tests.js
<ide> var tests = [
<ide> impact: 'Important',
<ide> area: 'Core'
<ide> },
<del> {
<del> id: 'TextDecoder',
<del> name: 'TextDecoder is present',
<del> run: function () {
<del> if (typeof TextDecoder != 'undefined')
<del> return { output: 'Success', emulated: '' };
<del> else
<del> return { output: 'Failed', emulated: 'No' };
<del> },
<del> impact: 'Critical',
<del> area: 'Core'
<del> },
<ide> {
<ide> id: 'Worker',
<ide> name: 'Worker is present',
<ide> var tests = [
<ide> impact: 'Important',
<ide> area: 'Core'
<ide> },
<del> {
<del> id: 'Worker-TextDecoder',
<del> name: 'TextDecoder is present in web workers',
<del> run: function () {
<del> if (typeof Worker == 'undefined')
<del> return { output: 'Skipped', emulated: '' };
<del>
<del> var emulatable = typeof TextDecoder !== 'undefined';
<del> try {
<del> var worker = new Worker('worker-stub.js');
<del>
<del> var promise = new Promise();
<del> var timeout = setTimeout(function () {
<del> promise.resolve({ output: 'Failed',
<del> emulated: emulatable ? '?' : 'No' });
<del> }, 5000);
<del>
<del> worker.addEventListener('message', function (e) {
<del> var data = e.data;
<del> if (data.action === 'TextDecoder') {
<del> if (data.result) {
<del> promise.resolve({ output: 'Success', emulated: '' });
<del> } else {
<del> promise.resolve({ output: 'Failed',
<del> emulated: data.emulated ? 'Yes' : 'No' });
<del> }
<del> } else {
<del> promise.resolve({ output: 'Failed',
<del> emulated: emulatable ? 'Yes' : 'No' });
<del> }
<del> }, false);
<del> worker.postMessage({action: 'TextDecoder'});
<del> return promise;
<del> } catch (e) {
<del> return { output: 'Failed', emulated: emulatable ? 'Yes' : 'No' };
<del> }
<del> },
<del> impact: 'Important',
<del> area: 'Core'
<del> },
<ide> {
<ide> id: 'Canvas Blend Mode',
<ide> name: 'Canvas supports extended blend modes',
<ide><path>test/features/worker-stub.js
<ide> onmessage = function (e) {
<ide> }
<ide> postMessage({action: 'xhr', result: responseExists});
<ide> break;
<del> case 'TextDecoder':
<del> postMessage({action: 'TextDecoder',
<del> result: typeof TextDecoder !== 'undefined',
<del> emulated: typeof FileReaderSync !== 'undefined'});
<del> break;
<ide> }
<ide> };
<ide> | 2 |
Javascript | Javascript | add docs for ngvalue directive | 805efb4c47f457004a5b35f532712b9b62105a99 | <ide><path>src/ng/directive/input.js
<ide> var ngListDirective = function() {
<ide>
<ide>
<ide> var CONSTANT_VALUE_REGEXP = /^(true|false|\d+)$/;
<del>
<add>/**
<add> * @ngdoc directive
<add> * @name ng.directive:ngValue
<add> *
<add> * @description
<add> * Binds the given expression to the value of `input[select]` or `input[radio]`, so
<add> * that when the element is selected, the `ngModel` of that element is set to the
<add> * bound value.
<add> *
<add> * `ngValue` is useful when dynamically generating lists of radio buttons using `ng-repeat`, as
<add> * shown below.
<add> *
<add> * @element input
<add> * @param {string=} ngValue angular expression, whose value will be bound to the `value` attribute
<add> * of the `input` element
<add> *
<add> * @example
<add> <doc:example>
<add> <doc:source>
<add> <script>
<add> function Ctrl($scope) {
<add> $scope.names = ['pizza', 'unicorns', 'robots'];
<add> $scope.my = { favorite: 'unicorns' };
<add> }
<add> </script>
<add> <form ng-controller="Ctrl">
<add> <h2>Which is your favorite?</h2>
<add> <label ng-repeat="name in names" for="{{name}}">
<add> {{name}}
<add> <input type="radio"
<add> ng-model="my.favorite"
<add> ng-value="name"
<add> id="{{name}}"
<add> name="favorite">
<add> </label>
<add> </span>
<add> <div>You chose {{my.favorite}}</div>
<add> </form>
<add> </doc:source>
<add> <doc:scenario>
<add> it('should initialize to model', function() {
<add> expect(binding('my.favorite')).toEqual('unicorns');
<add> });
<add> it('should bind the values to the inputs', function() {
<add> input('my.favorite').select('pizza');
<add> expect(binding('my.favorite')).toEqual('pizza');
<add> });
<add> </doc:scenario>
<add> </doc:example>
<add> */
<ide> var ngValueDirective = function() {
<ide> return {
<ide> priority: 100,
<ide> compile: function(tpl, tplAttr) {
<ide> if (CONSTANT_VALUE_REGEXP.test(tplAttr.ngValue)) {
<del> return function(scope, elm, attr) {
<add> return function ngValueConstantLink(scope, elm, attr) {
<ide> attr.$set('value', scope.$eval(attr.ngValue));
<ide> };
<ide> } else {
<del> return function(scope, elm, attr) {
<add> return function ngValueLink(scope, elm, attr) {
<ide> scope.$watch(attr.ngValue, function valueWatchAction(value) {
<ide> attr.$set('value', value);
<ide> }); | 1 |
Javascript | Javascript | try another approach at fixing windows flow issues | 025d867dceccfa54cfa12122a982ad3c3eff995a | <ide><path>packages/events/TopLevelEventTypes.js
<ide> * @flow
<ide> */
<ide>
<del>import type {DOMTopLevelEventType} from 'react-dom/src/events/DOMTopLevelEventTypes';
<del>
<ide> type RNTopLevelEventType =
<ide> | 'topMouseDown'
<ide> | 'topMouseMove'
<ide> type RNTopLevelEventType =
<ide> | 'topTouchMove'
<ide> | 'topTouchStart';
<ide>
<add>export opaque type DOMTopLevelEventType = string;
<add>
<add>export function unsafeCastStringToDOMTopLevelType(
<add> topLevelType: string,
<add>): DOMTopLevelEventType {
<add> return topLevelType;
<add>}
<add>
<add>export function unsafeCastDOMTopLevelTypeToString(
<add> topLevelType: DOMTopLevelEventType,
<add>): string {
<add> return topLevelType;
<add>}
<add>
<ide> export type TopLevelType = DOMTopLevelEventType | RNTopLevelEventType;
<ide><path>packages/react-dom/src/events/DOMTopLevelEventTypes.js
<ide> * @flow
<ide> */
<ide>
<add>import type {DOMTopLevelEventType} from 'events/TopLevelEventTypes';
<add>
<add>import {
<add> unsafeCastStringToDOMTopLevelType,
<add> unsafeCastDOMTopLevelTypeToString,
<add>} from 'events/TopLevelEventTypes';
<ide> import getVendorPrefixedEventName from './getVendorPrefixedEventName';
<ide>
<ide> /**
<ide> import getVendorPrefixedEventName from './getVendorPrefixedEventName';
<ide> * of a constant in this module.
<ide> */
<ide>
<del>// eslint-disable-next-line no-undef
<del>export opaque type DOMTopLevelEventType =
<del> | 'abort'
<del> | 'animationend'
<del> | 'animationiteration'
<del> | 'animationstart'
<del> | 'blur'
<del> | 'canplay'
<del> | 'canplaythrough'
<del> | 'cancel'
<del> | 'change'
<del> | 'click'
<del> | 'close'
<del> | 'compositionend'
<del> | 'compositionstart'
<del> | 'compositionupdate'
<del> | 'contextmenu'
<del> | 'copy'
<del> | 'cut'
<del> | 'dblclick'
<del> | 'drag'
<del> | 'dragend'
<del> | 'dragenter'
<del> | 'dragexit'
<del> | 'dragleave'
<del> | 'dragover'
<del> | 'dragstart'
<del> | 'drop'
<del> | 'durationchange'
<del> | 'emptied'
<del> | 'encrypted'
<del> | 'ended'
<del> | 'error'
<del> | 'focus'
<del> | 'input'
<del> | 'invalid'
<del> | 'keydown'
<del> | 'keypress'
<del> | 'keyup'
<del> | 'load'
<del> | 'loadstart'
<del> | 'loadeddata'
<del> | 'loadedmetadata'
<del> | 'mousedown'
<del> | 'mousemove'
<del> | 'mouseout'
<del> | 'mouseover'
<del> | 'mouseup'
<del> | 'paste'
<del> | 'pause'
<del> | 'play'
<del> | 'playing'
<del> | 'progress'
<del> | 'ratechange'
<del> | 'reset'
<del> | 'scroll'
<del> | 'seeked'
<del> | 'seeking'
<del> | 'selectionchange'
<del> | 'stalled'
<del> | 'submit'
<del> | 'suspend'
<del> | 'textInput'
<del> | 'timeupdate'
<del> | 'toggle'
<del> | 'touchcancel'
<del> | 'touchend'
<del> | 'touchmove'
<del> | 'touchstart'
<del> | 'transitionend'
<del> | 'volumechange'
<del> | 'waiting'
<del> | 'wheel';
<del>
<del>export const TOP_ABORT: DOMTopLevelEventType = 'abort';
<del>export const TOP_ANIMATION_END: DOMTopLevelEventType = getVendorPrefixedEventName(
<del> 'animationend',
<add>export const TOP_ABORT = unsafeCastStringToDOMTopLevelType('abort');
<add>export const TOP_ANIMATION_END = unsafeCastStringToDOMTopLevelType(
<add> getVendorPrefixedEventName('animationend'),
<add>);
<add>export const TOP_ANIMATION_ITERATION = unsafeCastStringToDOMTopLevelType(
<add> getVendorPrefixedEventName('animationiteration'),
<add>);
<add>export const TOP_ANIMATION_START = unsafeCastStringToDOMTopLevelType(
<add> getVendorPrefixedEventName('animationstart'),
<add>);
<add>export const TOP_BLUR = unsafeCastStringToDOMTopLevelType('blur');
<add>export const TOP_CAN_PLAY = unsafeCastStringToDOMTopLevelType('canplay');
<add>export const TOP_CAN_PLAY_THROUGH = unsafeCastStringToDOMTopLevelType(
<add> 'canplaythrough',
<add>);
<add>export const TOP_CANCEL = unsafeCastStringToDOMTopLevelType('cancel');
<add>export const TOP_CHANGE = unsafeCastStringToDOMTopLevelType('change');
<add>export const TOP_CLICK = unsafeCastStringToDOMTopLevelType('click');
<add>export const TOP_CLOSE = unsafeCastStringToDOMTopLevelType('close');
<add>export const TOP_COMPOSITION_END = unsafeCastStringToDOMTopLevelType(
<add> 'compositionend',
<add>);
<add>export const TOP_COMPOSITION_START = unsafeCastStringToDOMTopLevelType(
<add> 'compositionstart',
<add>);
<add>export const TOP_COMPOSITION_UPDATE = unsafeCastStringToDOMTopLevelType(
<add> 'compositionupdate',
<add>);
<add>export const TOP_CONTEXT_MENU = unsafeCastStringToDOMTopLevelType(
<add> 'contextmenu',
<add>);
<add>export const TOP_COPY = unsafeCastStringToDOMTopLevelType('copy');
<add>export const TOP_CUT = unsafeCastStringToDOMTopLevelType('cut');
<add>export const TOP_DOUBLE_CLICK = unsafeCastStringToDOMTopLevelType('dblclick');
<add>export const TOP_DRAG = unsafeCastStringToDOMTopLevelType('drag');
<add>export const TOP_DRAG_END = unsafeCastStringToDOMTopLevelType('dragend');
<add>export const TOP_DRAG_ENTER = unsafeCastStringToDOMTopLevelType('dragenter');
<add>export const TOP_DRAG_EXIT = unsafeCastStringToDOMTopLevelType('dragexit');
<add>export const TOP_DRAG_LEAVE = unsafeCastStringToDOMTopLevelType('dragleave');
<add>export const TOP_DRAG_OVER = unsafeCastStringToDOMTopLevelType('dragover');
<add>export const TOP_DRAG_START = unsafeCastStringToDOMTopLevelType('dragstart');
<add>export const TOP_DROP = unsafeCastStringToDOMTopLevelType('drop');
<add>export const TOP_DURATION_CHANGE = unsafeCastStringToDOMTopLevelType(
<add> 'durationchange',
<add>);
<add>export const TOP_EMPTIED = unsafeCastStringToDOMTopLevelType('emptied');
<add>export const TOP_ENCRYPTED = unsafeCastStringToDOMTopLevelType('encrypted');
<add>export const TOP_ENDED = unsafeCastStringToDOMTopLevelType('ended');
<add>export const TOP_ERROR = unsafeCastStringToDOMTopLevelType('error');
<add>export const TOP_FOCUS = unsafeCastStringToDOMTopLevelType('focus');
<add>export const TOP_INPUT = unsafeCastStringToDOMTopLevelType('input');
<add>export const TOP_INVALID = unsafeCastStringToDOMTopLevelType('invalid');
<add>export const TOP_KEY_DOWN = unsafeCastStringToDOMTopLevelType('keydown');
<add>export const TOP_KEY_PRESS = unsafeCastStringToDOMTopLevelType('keypress');
<add>export const TOP_KEY_UP = unsafeCastStringToDOMTopLevelType('keyup');
<add>export const TOP_LOAD = unsafeCastStringToDOMTopLevelType('load');
<add>export const TOP_LOAD_START = unsafeCastStringToDOMTopLevelType('loadstart');
<add>export const TOP_LOADED_DATA = unsafeCastStringToDOMTopLevelType('loadeddata');
<add>export const TOP_LOADED_METADATA = unsafeCastStringToDOMTopLevelType(
<add> 'loadedmetadata',
<add>);
<add>export const TOP_MOUSE_DOWN = unsafeCastStringToDOMTopLevelType('mousedown');
<add>export const TOP_MOUSE_MOVE = unsafeCastStringToDOMTopLevelType('mousemove');
<add>export const TOP_MOUSE_OUT = unsafeCastStringToDOMTopLevelType('mouseout');
<add>export const TOP_MOUSE_OVER = unsafeCastStringToDOMTopLevelType('mouseover');
<add>export const TOP_MOUSE_UP = unsafeCastStringToDOMTopLevelType('mouseup');
<add>export const TOP_PASTE = unsafeCastStringToDOMTopLevelType('paste');
<add>export const TOP_PAUSE = unsafeCastStringToDOMTopLevelType('pause');
<add>export const TOP_PLAY = unsafeCastStringToDOMTopLevelType('play');
<add>export const TOP_PLAYING = unsafeCastStringToDOMTopLevelType('playing');
<add>export const TOP_PROGRESS = unsafeCastStringToDOMTopLevelType('progress');
<add>export const TOP_RATE_CHANGE = unsafeCastStringToDOMTopLevelType('ratechange');
<add>export const TOP_RESET = unsafeCastStringToDOMTopLevelType('reset');
<add>export const TOP_SCROLL = unsafeCastStringToDOMTopLevelType('scroll');
<add>export const TOP_SEEKED = unsafeCastStringToDOMTopLevelType('seeked');
<add>export const TOP_SEEKING = unsafeCastStringToDOMTopLevelType('seeking');
<add>export const TOP_SELECTION_CHANGE = unsafeCastStringToDOMTopLevelType(
<add> 'selectionchange',
<ide> );
<del>export const TOP_ANIMATION_ITERATION: DOMTopLevelEventType = getVendorPrefixedEventName(
<del> 'animationiteration',
<add>export const TOP_STALLED = unsafeCastStringToDOMTopLevelType('stalled');
<add>export const TOP_SUBMIT = unsafeCastStringToDOMTopLevelType('submit');
<add>export const TOP_SUSPEND = unsafeCastStringToDOMTopLevelType('suspend');
<add>export const TOP_TEXT_INPUT = unsafeCastStringToDOMTopLevelType('textInput');
<add>export const TOP_TIME_UPDATE = unsafeCastStringToDOMTopLevelType('timeupdate');
<add>export const TOP_TOGGLE = unsafeCastStringToDOMTopLevelType('toggle');
<add>export const TOP_TOUCH_CANCEL = unsafeCastStringToDOMTopLevelType(
<add> 'touchcancel',
<ide> );
<del>export const TOP_ANIMATION_START: DOMTopLevelEventType = getVendorPrefixedEventName(
<del> 'animationstart',
<add>export const TOP_TOUCH_END = unsafeCastStringToDOMTopLevelType('touchend');
<add>export const TOP_TOUCH_MOVE = unsafeCastStringToDOMTopLevelType('touchmove');
<add>export const TOP_TOUCH_START = unsafeCastStringToDOMTopLevelType('touchstart');
<add>export const TOP_TRANSITION_END = unsafeCastStringToDOMTopLevelType(
<add> getVendorPrefixedEventName('transitionend'),
<ide> );
<del>export const TOP_BLUR: DOMTopLevelEventType = 'blur';
<del>export const TOP_CAN_PLAY: DOMTopLevelEventType = 'canplay';
<del>export const TOP_CAN_PLAY_THROUGH: DOMTopLevelEventType = 'canplaythrough';
<del>export const TOP_CANCEL: DOMTopLevelEventType = 'cancel';
<del>export const TOP_CHANGE: DOMTopLevelEventType = 'change';
<del>export const TOP_CLICK: DOMTopLevelEventType = 'click';
<del>export const TOP_CLOSE: DOMTopLevelEventType = 'close';
<del>export const TOP_COMPOSITION_END: DOMTopLevelEventType = 'compositionend';
<del>export const TOP_COMPOSITION_START: DOMTopLevelEventType = 'compositionstart';
<del>export const TOP_COMPOSITION_UPDATE: DOMTopLevelEventType = 'compositionupdate';
<del>export const TOP_CONTEXT_MENU: DOMTopLevelEventType = 'contextmenu';
<del>export const TOP_COPY: DOMTopLevelEventType = 'copy';
<del>export const TOP_CUT: DOMTopLevelEventType = 'cut';
<del>export const TOP_DOUBLE_CLICK: DOMTopLevelEventType = 'dblclick';
<del>export const TOP_DRAG: DOMTopLevelEventType = 'drag';
<del>export const TOP_DRAG_END: DOMTopLevelEventType = 'dragend';
<del>export const TOP_DRAG_ENTER: DOMTopLevelEventType = 'dragenter';
<del>export const TOP_DRAG_EXIT: DOMTopLevelEventType = 'dragexit';
<del>export const TOP_DRAG_LEAVE: DOMTopLevelEventType = 'dragleave';
<del>export const TOP_DRAG_OVER: DOMTopLevelEventType = 'dragover';
<del>export const TOP_DRAG_START: DOMTopLevelEventType = 'dragstart';
<del>export const TOP_DROP: DOMTopLevelEventType = 'drop';
<del>export const TOP_DURATION_CHANGE: DOMTopLevelEventType = 'durationchange';
<del>export const TOP_EMPTIED: DOMTopLevelEventType = 'emptied';
<del>export const TOP_ENCRYPTED: DOMTopLevelEventType = 'encrypted';
<del>export const TOP_ENDED: DOMTopLevelEventType = 'ended';
<del>export const TOP_ERROR: DOMTopLevelEventType = 'error';
<del>export const TOP_FOCUS: DOMTopLevelEventType = 'focus';
<del>export const TOP_INPUT: DOMTopLevelEventType = 'input';
<del>export const TOP_INVALID: DOMTopLevelEventType = 'invalid';
<del>export const TOP_KEY_DOWN: DOMTopLevelEventType = 'keydown';
<del>export const TOP_KEY_PRESS: DOMTopLevelEventType = 'keypress';
<del>export const TOP_KEY_UP: DOMTopLevelEventType = 'keyup';
<del>export const TOP_LOAD: DOMTopLevelEventType = 'load';
<del>export const TOP_LOAD_START: DOMTopLevelEventType = 'loadstart';
<del>export const TOP_LOADED_DATA: DOMTopLevelEventType = 'loadeddata';
<del>export const TOP_LOADED_METADATA: DOMTopLevelEventType = 'loadedmetadata';
<del>export const TOP_MOUSE_DOWN: DOMTopLevelEventType = 'mousedown';
<del>export const TOP_MOUSE_MOVE: DOMTopLevelEventType = 'mousemove';
<del>export const TOP_MOUSE_OUT: DOMTopLevelEventType = 'mouseout';
<del>export const TOP_MOUSE_OVER: DOMTopLevelEventType = 'mouseover';
<del>export const TOP_MOUSE_UP: DOMTopLevelEventType = 'mouseup';
<del>export const TOP_PASTE: DOMTopLevelEventType = 'paste';
<del>export const TOP_PAUSE: DOMTopLevelEventType = 'pause';
<del>export const TOP_PLAY: DOMTopLevelEventType = 'play';
<del>export const TOP_PLAYING: DOMTopLevelEventType = 'playing';
<del>export const TOP_PROGRESS: DOMTopLevelEventType = 'progress';
<del>export const TOP_RATE_CHANGE: DOMTopLevelEventType = 'ratechange';
<del>export const TOP_RESET: DOMTopLevelEventType = 'reset';
<del>export const TOP_SCROLL: DOMTopLevelEventType = 'scroll';
<del>export const TOP_SEEKED: DOMTopLevelEventType = 'seeked';
<del>export const TOP_SEEKING: DOMTopLevelEventType = 'seeking';
<del>export const TOP_SELECTION_CHANGE: DOMTopLevelEventType = 'selectionchange';
<del>export const TOP_STALLED: DOMTopLevelEventType = 'stalled';
<del>export const TOP_SUBMIT: DOMTopLevelEventType = 'submit';
<del>export const TOP_SUSPEND: DOMTopLevelEventType = 'suspend';
<del>export const TOP_TEXT_INPUT: DOMTopLevelEventType = 'textInput';
<del>export const TOP_TIME_UPDATE: DOMTopLevelEventType = 'timeupdate';
<del>export const TOP_TOGGLE: DOMTopLevelEventType = 'toggle';
<del>export const TOP_TOUCH_CANCEL: DOMTopLevelEventType = 'touchcancel';
<del>export const TOP_TOUCH_END: DOMTopLevelEventType = 'touchend';
<del>export const TOP_TOUCH_MOVE: DOMTopLevelEventType = 'touchmove';
<del>export const TOP_TOUCH_START: DOMTopLevelEventType = 'touchstart';
<del>export const TOP_TRANSITION_END: DOMTopLevelEventType = getVendorPrefixedEventName(
<del> 'transitionend',
<add>export const TOP_VOLUME_CHANGE = unsafeCastStringToDOMTopLevelType(
<add> 'volumechange',
<ide> );
<del>export const TOP_VOLUME_CHANGE: DOMTopLevelEventType = 'volumechange';
<del>export const TOP_WAITING: DOMTopLevelEventType = 'waiting';
<del>export const TOP_WHEEL: DOMTopLevelEventType = 'wheel';
<add>export const TOP_WAITING = unsafeCastStringToDOMTopLevelType('waiting');
<add>export const TOP_WHEEL = unsafeCastStringToDOMTopLevelType('wheel');
<ide>
<del>export const mediaEventTypes: Array<DOMTopLevelEventType> = [
<add>export const mediaEventTypes = [
<ide> TOP_ABORT,
<ide> TOP_CAN_PLAY,
<ide> TOP_CAN_PLAY_THROUGH,
<ide> export const mediaEventTypes: Array<DOMTopLevelEventType> = [
<ide> ];
<ide>
<ide> export function getRawEventName(topLevelType: DOMTopLevelEventType): string {
<del> return topLevelType;
<add> return unsafeCastDOMTopLevelTypeToString(topLevelType);
<ide> }
<ide><path>packages/react-dom/src/events/ReactDOMEventListener.js
<ide>
<ide> import type {AnyNativeEvent} from 'events/PluginModuleType';
<ide> import type {Fiber} from 'react-reconciler/src/ReactFiber';
<del>import type {DOMTopLevelEventType} from './DOMTopLevelEventTypes';
<add>import type {DOMTopLevelEventType} from 'events/TopLevelEventTypes';
<ide>
<ide> import {batchedUpdates, interactiveUpdates} from 'events/ReactGenericBatching';
<ide> import {runExtractedEventsInBatch} from 'events/EventPluginHub';
<ide><path>packages/react-dom/src/events/SimpleEventPlugin.js
<ide> * @flow
<ide> */
<ide>
<del>import type {TopLevelType} from 'events/TopLevelEventTypes';
<del>import type {DOMTopLevelEventType} from './DOMTopLevelEventTypes';
<add>import type {
<add> TopLevelType,
<add> DOMTopLevelEventType,
<add>} from 'events/TopLevelEventTypes';
<ide> import type {
<ide> DispatchConfig,
<ide> ReactSyntheticEvent, | 4 |
Go | Go | remove support for overlay/overlay2 without d_type | 0abb8dec3f730f3ad2cc9a161c97968a6bfd0631 | <ide><path>daemon/graphdriver/driver.go
<ide> package graphdriver
<ide>
<ide> import (
<del> "errors"
<ide> "fmt"
<ide> "io"
<ide> "os"
<ide> const (
<ide> var (
<ide> // All registered drivers
<ide> drivers map[string]InitFunc
<del>
<del> // ErrNotSupported returned when driver is not supported.
<del> ErrNotSupported = errors.New("driver not supported")
<del> // ErrPrerequisites returned when driver does not meet prerequisites.
<del> ErrPrerequisites = errors.New("prerequisites for driver not satisfied (wrong filesystem?)")
<del> // ErrIncompatibleFS returned when file system is not supported.
<del> ErrIncompatibleFS = fmt.Errorf("backing file system is unsupported for this graph driver")
<ide> )
<ide>
<ide> //CreateOpts contains optional arguments for Create() and CreateReadWrite()
<ide> func New(name string, pg plugingetter.PluginGetter, config Options) (Driver, err
<ide> for _, name := range list {
<ide> driver, err := getBuiltinDriver(name, config.Root, config.DriverOptions, config.UIDMaps, config.GIDMaps)
<ide> if err != nil {
<del> if isDriverNotSupported(err) {
<add> if IsDriverNotSupported(err) {
<ide> continue
<ide> }
<ide> return nil, err
<ide> func New(name string, pg plugingetter.PluginGetter, config Options) (Driver, err
<ide> for name, initFunc := range drivers {
<ide> driver, err := initFunc(filepath.Join(config.Root, name), config.DriverOptions, config.UIDMaps, config.GIDMaps)
<ide> if err != nil {
<del> if isDriverNotSupported(err) {
<add> if IsDriverNotSupported(err) {
<ide> continue
<ide> }
<ide> return nil, err
<ide> func New(name string, pg plugingetter.PluginGetter, config Options) (Driver, err
<ide> return nil, fmt.Errorf("No supported storage backend found")
<ide> }
<ide>
<del>// isDriverNotSupported returns true if the error initializing
<del>// the graph driver is a non-supported error.
<del>func isDriverNotSupported(err error) bool {
<del> return err == ErrNotSupported || err == ErrPrerequisites || err == ErrIncompatibleFS
<del>}
<del>
<ide> // scanPriorDrivers returns an un-ordered scan of directories of prior storage drivers
<ide> func scanPriorDrivers(root string) map[string]bool {
<ide> driversMap := make(map[string]bool)
<ide><path>daemon/graphdriver/errors.go
<add>package graphdriver
<add>
<add>const (
<add> // ErrNotSupported returned when driver is not supported.
<add> ErrNotSupported NotSupportedError = "driver not supported"
<add> // ErrPrerequisites returned when driver does not meet prerequisites.
<add> ErrPrerequisites NotSupportedError = "prerequisites for driver not satisfied (wrong filesystem?)"
<add> // ErrIncompatibleFS returned when file system is not supported.
<add> ErrIncompatibleFS NotSupportedError = "backing file system is unsupported for this graph driver"
<add>)
<add>
<add>// ErrUnSupported signals that the graph-driver is not supported on the current configuration
<add>type ErrUnSupported interface {
<add> NotSupported()
<add>}
<add>
<add>// NotSupportedError signals that the graph-driver is not supported on the current configuration
<add>type NotSupportedError string
<add>
<add>func (e NotSupportedError) Error() string {
<add> return string(e)
<add>}
<add>
<add>// NotSupported signals that a graph-driver is not supported.
<add>func (e NotSupportedError) NotSupported() {}
<add>
<add>// IsDriverNotSupported returns true if the error initializing
<add>// the graph driver is a non-supported error.
<add>func IsDriverNotSupported(err error) bool {
<add> switch err.(type) {
<add> case ErrUnSupported:
<add> return true
<add> default:
<add> return false
<add> }
<add>}
<ide><path>daemon/graphdriver/graphtest/graphtest_unix.go
<ide> func newDriver(t testing.TB, name string, options []string) *Driver {
<ide> d, err := graphdriver.GetDriver(name, nil, graphdriver.Options{DriverOptions: options, Root: root})
<ide> if err != nil {
<ide> t.Logf("graphdriver: %v\n", err)
<del> if err == graphdriver.ErrNotSupported || err == graphdriver.ErrPrerequisites || err == graphdriver.ErrIncompatibleFS {
<add> if graphdriver.IsDriverNotSupported(err) {
<ide> t.Skipf("Driver %s not supported", name)
<ide> }
<ide> t.Fatal(err)
<ide><path>daemon/graphdriver/overlay/overlay.go
<ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap
<ide> }
<ide>
<ide> switch fsMagic {
<del> case graphdriver.FsMagicAufs, graphdriver.FsMagicBtrfs, graphdriver.FsMagicOverlay, graphdriver.FsMagicZfs, graphdriver.FsMagicEcryptfs, graphdriver.FsMagicNfsFs:
<add> case graphdriver.FsMagicAufs, graphdriver.FsMagicBtrfs, graphdriver.FsMagicEcryptfs, graphdriver.FsMagicNfsFs, graphdriver.FsMagicOverlay, graphdriver.FsMagicZfs:
<ide> logrus.Errorf("'overlay' is not supported over %s", backingFs)
<ide> return nil, graphdriver.ErrIncompatibleFS
<ide> }
<ide>
<add> supportsDType, err := fsutils.SupportsDType(testdir)
<add> if err != nil {
<add> return nil, err
<add> }
<add> if !supportsDType {
<add> return nil, overlayutils.ErrDTypeNotSupported("overlay", backingFs)
<add> }
<add>
<ide> rootUID, rootGID, err := idtools.GetRootUIDGID(uidMaps, gidMaps)
<ide> if err != nil {
<ide> return nil, err
<ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap
<ide> return nil, err
<ide> }
<ide>
<del> supportsDType, err := fsutils.SupportsDType(home)
<del> if err != nil {
<del> return nil, err
<del> }
<del> if !supportsDType {
<del> // not a fatal error until v17.12 (#27443)
<del> logrus.Warn(overlayutils.ErrDTypeNotSupported("overlay", backingFs))
<del> }
<del>
<ide> d := &Driver{
<ide> home: home,
<ide> uidMaps: uidMaps,
<ide><path>daemon/graphdriver/overlay2/overlay.go
<ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap
<ide> backingFs = fsName
<ide> }
<ide>
<del> // check if they are running over btrfs, aufs, zfs, overlay, or ecryptfs
<ide> switch fsMagic {
<del> case graphdriver.FsMagicAufs, graphdriver.FsMagicZfs, graphdriver.FsMagicOverlay, graphdriver.FsMagicEcryptfs, graphdriver.FsMagicNfsFs:
<add> case graphdriver.FsMagicAufs, graphdriver.FsMagicEcryptfs, graphdriver.FsMagicNfsFs, graphdriver.FsMagicOverlay, graphdriver.FsMagicZfs:
<ide> logrus.Errorf("'overlay2' is not supported over %s", backingFs)
<ide> return nil, graphdriver.ErrIncompatibleFS
<ide> case graphdriver.FsMagicBtrfs:
<ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap
<ide> if opts.overrideKernelCheck {
<ide> logrus.Warn("Using pre-4.0.0 kernel for overlay2, mount failures may require kernel update")
<ide> } else {
<del> if err := supportsMultipleLowerDir(filepath.Dir(home)); err != nil {
<add> if err := supportsMultipleLowerDir(testdir); err != nil {
<ide> logrus.Debugf("Multiple lower dirs not supported: %v", err)
<ide> return nil, graphdriver.ErrNotSupported
<ide> }
<ide> }
<ide> }
<add> supportsDType, err := fsutils.SupportsDType(testdir)
<add> if err != nil {
<add> return nil, err
<add> }
<add> if !supportsDType {
<add> return nil, overlayutils.ErrDTypeNotSupported("overlay2", backingFs)
<add> }
<ide>
<ide> rootUID, rootGID, err := idtools.GetRootUIDGID(uidMaps, gidMaps)
<ide> if err != nil {
<ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap
<ide> return nil, err
<ide> }
<ide>
<del> supportsDType, err := fsutils.SupportsDType(home)
<del> if err != nil {
<del> return nil, err
<del> }
<del> if !supportsDType {
<del> // not a fatal error until v17.12 (#27443)
<del> logrus.Warn(overlayutils.ErrDTypeNotSupported("overlay2", backingFs))
<del> }
<del>
<ide> d := &Driver{
<ide> home: home,
<ide> uidMaps: uidMaps,
<ide><path>daemon/graphdriver/overlayutils/overlayutils.go
<ide> package overlayutils
<ide>
<ide> import (
<del> "errors"
<ide> "fmt"
<add>
<add> "github.com/docker/docker/daemon/graphdriver"
<ide> )
<ide>
<ide> // ErrDTypeNotSupported denotes that the backing filesystem doesn't support d_type.
<ide> func ErrDTypeNotSupported(driver, backingFs string) error {
<ide> if backingFs == "xfs" {
<ide> msg += " Reformat the filesystem with ftype=1 to enable d_type support."
<ide> }
<del> msg += " Running without d_type support will no longer be supported in Docker 17.12."
<del> return errors.New(msg)
<add> msg += " Backing filesystems without d_type support are not supported."
<add>
<add> return graphdriver.NotSupportedError(msg)
<ide> } | 6 |
Text | Text | fix typo in 'find_in_batches' example | 8f24923d7ad3c838437fba2ee3e5eee655660bbc | <ide><path>guides/source/active_record_querying.md
<ide> end
<ide> `find_in_batches` works on model classes, as seen above, and also on relations:
<ide>
<ide> ```ruby
<del>Invoice.pending.find_in_batches do |invoice|
<add>Invoice.pending.find_in_batches do |invoices|
<ide> pending_invoices_export.add_invoices(invoices)
<ide> end
<ide> ``` | 1 |
Javascript | Javascript | enable last 4 italian certs | a901ad1a604944087e71dac5073fac514aaaae30 | <ide><path>config/i18n/all-langs.js
<ide> const auditedCerts = {
<ide> 'front-end-libraries',
<ide> 'data-visualization',
<ide> 'apis-and-microservices',
<del> 'quality-assurance'
<add> 'quality-assurance',
<add> 'scientific-computing-with-python',
<add> 'data-analysis-with-python',
<add> 'information-security',
<add> 'machine-learning-with-python'
<ide> ],
<ide> portuguese: [
<ide> 'responsive-web-design', | 1 |
Python | Python | add a test for pad-wrapping by length 0 | 87131155b86b221e63cf8d9ead61490a59e3db53 | <ide><path>numpy/lib/tests/test_arraypad.py
<ide> def test_check_02(self):
<ide> b = np.array([3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1])
<ide> assert_array_equal(a, b)
<ide>
<add> def test_pad_with_zero(self):
<add> a = np.ones((3, 5))
<add> b = np.pad(a, (0, 5), mode="wrap")
<add> assert_array_equal(a, b[:-5, :-5])
<add>
<ide>
<ide> class TestStatLen(object):
<ide> def test_check_simple(self): | 1 |
Python | Python | clarify docs for fliplr() / flipud() | ada38d2229f87823fd3b2a9d8ed104c3c22bbf4e | <ide><path>numpy/lib/twodim_base.py
<ide> def _flip_dispatcher(m):
<ide> @array_function_dispatch(_flip_dispatcher)
<ide> def fliplr(m):
<ide> """
<del> Flip array in the left/right direction.
<add> Reverse the order of elements along axis 1 (left/right).
<ide>
<del> Flip the entries in each row in the left/right direction.
<del> Columns are preserved, but appear in a different order than before.
<add> For a 2-D array, this flips the entries in each row in the left/right
<add> direction. Columns are preserved, but appear in a different order than
<add> before.
<ide>
<ide> Parameters
<ide> ----------
<ide> def fliplr(m):
<ide> See Also
<ide> --------
<ide> flipud : Flip array in the up/down direction.
<add> flip : Flip array in one or more dimesions.
<ide> rot90 : Rotate array counterclockwise.
<ide>
<ide> Notes
<ide> -----
<del> Equivalent to m[:,::-1]. Requires the array to be at least 2-D.
<add> Equivalent to ``m[:,::-1]`` or ``np.flip(m, axis=1)``.
<add> Requires the array to be at least 2-D.
<ide>
<ide> Examples
<ide> --------
<ide> def fliplr(m):
<ide> @array_function_dispatch(_flip_dispatcher)
<ide> def flipud(m):
<ide> """
<del> Flip array in the up/down direction.
<add> Reverse the order of elements along axis 0 (up/down).
<ide>
<del> Flip the entries in each column in the up/down direction.
<del> Rows are preserved, but appear in a different order than before.
<add> For a 2-D array, this flips the entries in each column in the up/down
<add> direction. Rows are preserved, but appear in a different order than before.
<ide>
<ide> Parameters
<ide> ----------
<ide> def flipud(m):
<ide> See Also
<ide> --------
<ide> fliplr : Flip array in the left/right direction.
<add> flip : Flip array in one or more dimesions.
<ide> rot90 : Rotate array counterclockwise.
<ide>
<ide> Notes
<ide> -----
<del> Equivalent to ``m[::-1,...]``.
<del> Does not require the array to be two-dimensional.
<add> Equivalent to ``m[::-1, ...]`` or ``np.flip(m, axis=0)``.
<add> Requires the array to be at least 1-D.
<ide>
<ide> Examples
<ide> -------- | 1 |
Go | Go | avoid some allocations | 132da3f036287896cb3ba9f9f2573ad36bbb69cc | <ide><path>daemon/logger/jsonfilelog/jsonfilelog.go
<ide> import (
<ide>
<ide> "github.com/docker/docker/daemon/logger"
<ide> "github.com/docker/docker/pkg/jsonlog"
<add> "github.com/docker/docker/pkg/timeutils"
<ide> )
<ide>
<ide> // JSONFileLogger is Logger implementation for default docker logging:
<ide> func New(filename string) (logger.Logger, error) {
<ide> func (l *JSONFileLogger) Log(msg *logger.Message) error {
<ide> l.mu.Lock()
<ide> defer l.mu.Unlock()
<del> err := (&jsonlog.JSONLog{Log: string(msg.Line) + "\n", Stream: msg.Source, Created: msg.Timestamp}).MarshalJSONBuf(l.buf)
<add> timestamp, err := timeutils.FastMarshalJSON(msg.Timestamp)
<add> if err != nil {
<add> return err
<add> }
<add> err = (&jsonlog.JSONLogBytes{Log: append(msg.Line, '\n'), Stream: msg.Source, Created: timestamp}).MarshalJSONBuf(l.buf)
<ide> if err != nil {
<ide> return err
<ide> } | 1 |
Javascript | Javascript | add update() to react.addons | 854d1f7c1b107436e4bac87ec0d5f35e28ce4b20 | <ide><path>src/browser/ReactWithAddons.js
<ide> var ReactCSSTransitionGroup = require('ReactCSSTransitionGroup');
<ide>
<ide> var cx = require('cx');
<ide> var cloneWithProps = require('cloneWithProps');
<add>var update = require('update');
<ide>
<ide> React.addons = {
<ide> LinkedStateMixin: LinkedStateMixin,
<ide> CSSTransitionGroup: ReactCSSTransitionGroup,
<ide> TransitionGroup: ReactTransitionGroup,
<ide>
<ide> classSet: cx,
<del> cloneWithProps: cloneWithProps
<add> cloneWithProps: cloneWithProps,
<add> update: update
<ide> };
<ide>
<ide> if (__DEV__) { | 1 |
Mixed | Ruby | use correct timezone when parsing date in json | a3ddd5f1572f6ebf95f4c0a789413ee1b2ecbab5 | <ide><path>activesupport/CHANGELOG.md
<add>* Fix parsing JSON time in `YYYY-MM-DD hh:mm:ss` (without `Z`).
<add> Before such time was considered in UTC timezone, now, to comply with standard, it uses local timezone.
<add>
<add> *Grzegorz Witek*
<add>
<ide> * Match `HashWithIndifferentAccess#default`'s behaviour with `Hash#default`.
<ide>
<ide> *David Cornu*
<ide><path>activesupport/lib/active_support/json/decoding.rb
<ide> module ActiveSupport
<ide>
<ide> module JSON
<ide> # matches YAML-formatted dates
<del> DATE_REGEX = /^(?:\d{4}-\d{2}-\d{2}|\d{4}-\d{1,2}-\d{1,2}[T \t]+\d{1,2}:\d{2}:\d{2}(\.[0-9]*)?(([ \t]*)Z|[-+]\d{2}?(:\d{2})?))$/
<add> DATE_REGEX = /^\d{4}-\d{2}-\d{2}$/
<add> DATETIME_REGEX = /^(?:\d{4}-\d{2}-\d{2}|\d{4}-\d{1,2}-\d{1,2}[T \t]+\d{1,2}:\d{2}:\d{2}(\.[0-9]*)?(([ \t]*)Z|[-+]\d{2}?(:\d{2})?)?)$/
<ide>
<ide> class << self
<ide> # Parses a JSON string (JavaScript Object Notation) into a hash.
<ide> def convert_dates_from(data)
<ide> nil
<ide> when DATE_REGEX
<ide> begin
<del> DateTime.parse(data)
<add> Date.parse(data)
<add> rescue ArgumentError
<add> data
<add> end
<add> when DATETIME_REGEX
<add> begin
<add> Time.zone.parse(data)
<ide> rescue ArgumentError
<ide> data
<ide> end
<ide><path>activesupport/test/json/decoding_test.rb
<ide> require 'abstract_unit'
<ide> require 'active_support/json'
<ide> require 'active_support/time'
<add>require 'time_zone_test_helpers'
<ide>
<ide> class TestJSONDecoding < ActiveSupport::TestCase
<add> include TimeZoneTestHelpers
<add>
<ide> class Foo
<ide> def self.json_create(object)
<ide> "Foo"
<ide> def self.json_create(object)
<ide> %(["2007-01-01 01:12:34 Z"]) => [Time.utc(2007, 1, 1, 1, 12, 34)],
<ide> %(["2007-01-01 01:12:34 Z", "2007-01-01 01:12:35 Z"]) => [Time.utc(2007, 1, 1, 1, 12, 34), Time.utc(2007, 1, 1, 1, 12, 35)],
<ide> # no time zone
<del> %({"a": "2007-01-01 01:12:34"}) => {'a' => "2007-01-01 01:12:34"},
<add> %({"a": "2007-01-01 01:12:34"}) => {'a' => Time.new(2007, 1, 1, 1, 12, 34, "-05:00")},
<ide> # invalid date
<ide> %({"a": "1089-10-40"}) => {'a' => "1089-10-40"},
<ide> # xmlschema date notation
<add> %({"a": "2009-08-10T19:01:02"}) => {'a' => Time.new(2009, 8, 10, 19, 1, 2, "-04:00")},
<ide> %({"a": "2009-08-10T19:01:02Z"}) => {'a' => Time.utc(2009, 8, 10, 19, 1, 2)},
<ide> %({"a": "2009-08-10T19:01:02+02:00"}) => {'a' => Time.utc(2009, 8, 10, 17, 1, 2)},
<ide> %({"a": "2009-08-10T19:01:02-05:00"}) => {'a' => Time.utc(2009, 8, 11, 00, 1, 2)},
<ide> def self.json_create(object)
<ide>
<ide> TESTS.each_with_index do |(json, expected), index|
<ide> test "json decodes #{index}" do
<del> with_parse_json_times(true) do
<del> silence_warnings do
<del> assert_equal expected, ActiveSupport::JSON.decode(json), "JSON decoding \
<del> failed for #{json}"
<add> with_tz_default 'Eastern Time (US & Canada)' do
<add> with_parse_json_times(true) do
<add> silence_warnings do
<add> assert_equal expected, ActiveSupport::JSON.decode(json), "JSON decoding \
<add> failed for #{json}"
<add> end
<ide> end
<ide> end
<ide> end | 3 |
PHP | PHP | fix route list and kernel | abd85c916df0cc0a6dc55de943a39db8b7eb4e0d | <ide><path>src/Illuminate/Foundation/Console/RouteListCommand.php
<ide> protected function getControllerMiddlewareFromInstance($controller, $method)
<ide>
<ide> $results = [];
<ide>
<del> foreach ($controller->getMiddleware() as $name => $options) {
<del> if (! $method || ! $this->methodExcludedByOptions($method, $options)) {
<del> $results[] = Arr::get($middleware, $name, $name);
<add> foreach ($controller->getMiddleware() as $data) {
<add> if (! is_string($data['middleware'])) {
<add> continue;
<add> }
<add>
<add> if (! $method || ! $this->methodExcludedByOptions($method, $data['options'])) {
<add> $results[] = Arr::get($middleware, $data['middleware'], $data['middleware']);
<ide> }
<ide> }
<ide>
<ide><path>src/Illuminate/Foundation/Http/Kernel.php
<ide> public function terminate($request, $response)
<ide> );
<ide>
<ide> foreach ($middlewares as $middleware) {
<add> if (! is_string($middleware)) {
<add> continue;
<add> }
<add>
<ide> list($name, $parameters) = $this->parseMiddleware($middleware);
<ide>
<ide> $instance = $this->app->make($name);
<ide><path>src/Illuminate/Routing/Controller.php
<ide> abstract class Controller
<ide> /**
<ide> * Register middleware on the controller.
<ide> *
<del> * @param array|string|\Closure $middlewares
<add> * @param array|string|\Closure $middleware
<ide> * @param array $options
<ide> * @return \Illuminate\Routing\ControllerMiddlewareOptions
<ide> */
<del> public function middleware($middlewares, array $options = [])
<add> public function middleware($middleware, array $options = [])
<ide> {
<del> foreach ((array) $middlewares as $middleware) {
<add> foreach ((array) $middleware as $m) {
<ide> $this->middleware[] = [
<del> 'middleware' => $middleware,
<add> 'middleware' => $m,
<ide> 'options' => &$options,
<ide> ];
<ide> }
<ide><path>src/Illuminate/Routing/Router.php
<ide> public function resolveMiddlewareClassName($name)
<ide> return $name;
<ide> } elseif (isset($map[$name]) && $map[$name] instanceof Closure) {
<ide> return $map[$name];
<add>
<ide> // If the middleware is the name of a middleware group, we will return the array
<ide> // of middlewares that belong to the group. This allows developers to group a
<ide> // set of middleware under single keys that can be conveniently referenced.
<ide> } elseif (isset($this->middlewareGroups[$name])) {
<ide> return $this->parseMiddlewareGroup($name);
<add>
<ide> // Finally, when the middleware is simply a string mapped to a class name the
<ide> // middleware name will get parsed into the full class name and parameters
<ide> // which may be run using the Pipeline which accepts this string format. | 4 |
Python | Python | fix typing errors reported by dmypy | a091c1fbf9dfec84953bbe2c938081a1b4fc0f63 | <ide><path>airflow/plugins_manager.py
<ide>
<ide> log = logging.getLogger(__name__)
<ide>
<del>import_errors = {}
<add>import_errors: Dict[str, str] = {}
<ide>
<ide> plugins = None # type: Optional[List[AirflowPlugin]]
<ide>
<ide><path>tests/providers/google/marketing_platform/operators/test_display_video.py
<ide> # KIND, either express or implied. See the License for the
<ide> # specific language governing permissions and limitations
<ide> # under the License.
<del>
<add>from typing import Optional
<ide> from unittest import TestCase, mock
<ide>
<ide> from airflow.providers.google.marketing_platform.operators.display_video import (
<ide>
<ide> API_VERSION = "api_version"
<ide> GCP_CONN_ID = "google_cloud_default"
<del>DELEGATE_TO = None
<add>DELEGATE_TO: Optional[str] = None
<ide>
<ide>
<ide> class TestGoogleDisplayVideo360CreateReportOperator(TestCase): | 2 |
Text | Text | add version note to path aliases docs | 4d0d09ba9da07fcfe4825ca4e3118dabfae9c45f | <ide><path>docs/advanced-features/module-path-aliases.md
<ide> description: Configure module path aliases that allow you to remap certain impor
<ide>
<ide> # Absolute Imports and Module path aliases
<ide>
<del>Next.js automatically supports the `tsconfig.json` and `jsconfig.json` `"paths"` and `"baseUrl"` options.
<add>Next.js automatically supports the `tsconfig.json` and `jsconfig.json` `"paths"` and `"baseUrl"` options since [Next.js 9.4](https://nextjs.org/blog/next-9-4).
<ide>
<ide> > Note: `jsconfig.json` can be used when you don't use TypeScript
<ide> | 1 |
Text | Text | add solution block | a3eab17b4c2cea1e69554139dc985898d57c8970 | <ide><path>curriculum/challenges/portuguese/01-responsive-web-design/basic-css/add-a-negative-margin-to-an-element.portuguese.md
<ide> tests:
<ide> ## Solução
<ide> <section id='solution'>
<ide>
<del>```js
<del>// solution required
<add>```html
<add><style>
<add> .injected-text {
<add> margin-bottom: -25px;
<add> text-align: center;
<add> }
<add>
<add> .box {
<add> border-style: solid;
<add> border-color: black;
<add> border-width: 5px;
<add> text-align: center;
<add> }
<add>
<add> .yellow-box {
<add> background-color: yellow;
<add> padding: 10px;
<add> }
<add>
<add> .red-box {
<add> background-color: crimson;
<add> color: #fff;
<add> padding: 20px;
<add> margin: -15px;
<add> }
<add>
<add> .blue-box {
<add> background-color: blue;
<add> color: #fff;
<add> padding: 20px;
<add> margin: 20px;
<add> margin-top: -15px;
<add> }
<add></style>
<add>
<add><div class="box yellow-box">
<add> <h5 class="box red-box">padding</h5>
<add> <h5 class="box blue-box">padding</h5>
<add></div>
<ide> ```
<ide> </section> | 1 |
Text | Text | add changelog for v1.3.0-beta.18 and v1.2.22 | b33716f42a7d0e6d104fd5552dff6e5795174895 | <ide><path>CHANGELOG.md
<add><a name="1.3.0-beta.18"></a>
<add># 1.3.0-beta.18 spontaneous-combustion (2014-08-12)
<add>
<add>
<add>## Bug Fixes
<add>
<add>- **$compile:** make '='-bindings NaN-aware
<add> ([5038bf79](https://github.com/angular/angular.js/commit/5038bf79c6c8251d7449d887b44a4321e619c534),
<add> [#8553](https://github.com/angular/angular.js/issues/8553), [#8554](https://github.com/angular/angular.js/issues/8554))
<add>- **$location:** add semicolon to whitelist of delimiters to unencode
<add> ([36258033](https://github.com/angular/angular.js/commit/3625803349de04f175f87a22cbb608738003811a),
<add> [#5019](https://github.com/angular/angular.js/issues/5019))
<add>- **$parse:**
<add> - one-time binding for literal expressions works as expected
<add> ([c024f282](https://github.com/angular/angular.js/commit/c024f28217cf8eedd695dd4b933ecf2ba4243c15),
<add> [#8209](https://github.com/angular/angular.js/issues/8209))
<add> - correctly assign expressions who's path is undefined and that use brackets notation
<add> ([c03ad249](https://github.com/angular/angular.js/commit/c03ad249033e701f3ad7aa358102e1cb87f5025c),
<add> [#8039](https://github.com/angular/angular.js/issues/8039))
<add>- **Scope:** add deregisterNotifier to oneTimeLiteralWatch signature
<add> ([a001a417](https://github.com/angular/angular.js/commit/a001a417d5c12bad0fa09c88e045622b95239e2f))
<add>- **jqLite:**
<add> - allow `triggerHandler()` to accept custom event
<add> ([01d81cda](https://github.com/angular/angular.js/commit/01d81cdab3dbbcb8b4204769eb5272096eb0837f),
<add> [#8469](https://github.com/angular/angular.js/issues/8469))
<add> - fix regression where mutating the dom tree on a event breaks jqLite.remove
<add> ([a00c9bca](https://github.com/angular/angular.js/commit/a00c9bca401abe5b5b0a217be82333056422c811),
<add> [#8359](https://github.com/angular/angular.js/issues/8359))
<add>- **ngSanitize:** ensure `html` is a string in htmlParser()
<add> ([34781f18](https://github.com/angular/angular.js/commit/34781f18cb75ded9ae29f4b78f5bacd079f76709),
<add> [#8417](https://github.com/angular/angular.js/issues/8417), [#8416](https://github.com/angular/angular.js/issues/8416))
<add>- **select:**
<add> - ensure that at least one option has the `selected` attribute set
<add> ([25a476ea](https://github.com/angular/angular.js/commit/25a476ea096b200fb4f422aaa9cd7215e2596ad3),
<add> [#8366](https://github.com/angular/angular.js/issues/8366), [#8429](https://github.com/angular/angular.js/issues/8429))
<add> - do not update selected property of an option element on digest with no change event
<add> ([cdc7db3f](https://github.com/angular/angular.js/commit/cdc7db3f35368a9175ed96c63f4bf56593fe1876),
<add> [#8221](https://github.com/angular/angular.js/issues/8221), [#7715](https://github.com/angular/angular.js/issues/7715))
<add>
<add>
<add>## Features
<add>
<add>- **$parse:** allow for assignments in ternary operator branches
<add> ([2d678f1d](https://github.com/angular/angular.js/commit/2d678f1d0a3714fdd49e582b92787312af129947),
<add> [#8512](https://github.com/angular/angular.js/issues/8512), [#8484](https://github.com/angular/angular.js/issues/8484))
<add>- **form:** Add new $submitted state to forms
<add> ([108a69be](https://github.com/angular/angular.js/commit/108a69be17df5884d026c57b2be3235c576250fe),
<add> [#8056](https://github.com/angular/angular.js/issues/8056))
<add>- **http:** allow caching for JSONP requests
<add> ([3607c982](https://github.com/angular/angular.js/commit/3607c9822f57b4d01b3f09a6ae4efc7168bec6c5),
<add> [#1947](https://github.com/angular/angular.js/issues/1947), [#8356](https://github.com/angular/angular.js/issues/8356))
<add>- **jQuery:** upgrade to jQuery to 2.1.1
<add> ([9e7cb3c3](https://github.com/angular/angular.js/commit/9e7cb3c37543008e6236bb5a2c4536df2e1e43a9))
<add>- **ngMock:** allow override of when/expect definitions
<add> ([477626d8](https://github.com/angular/angular.js/commit/477626d846b4de65d1d5c7071e6a94361395ff42),
<add> [#5766](https://github.com/angular/angular.js/issues/5766), [#8352](https://github.com/angular/angular.js/issues/8352))
<add>
<add>
<add>## Performance Improvements
<add>
<add>- **$q:** move Deferred and Promise methods to prototypes
<add> ([23bc92b1](https://github.com/angular/angular.js/commit/23bc92b17df882a907fb326320f0622717fefe7b),
<add> [#8300](https://github.com/angular/angular.js/issues/8300))
<add>- **input:** prevent additional $digest when input is already touched
<add> ([dd2a803f](https://github.com/angular/angular.js/commit/dd2a803f4f03ab629a51623c026d3e3f9dc9e91f),
<add> [#8450](https://github.com/angular/angular.js/issues/8450))
<add>
<add>
<add>## Breaking Changes
<add>
<add>- **jQuery:** due to [9e7cb3c3](https://github.com/angular/angular.js/commit/9e7cb3c37543008e6236bb5a2c4536df2e1e43a9),
<add> Angular no longer supports jQuery versions below 2.1.1.
<add>
<add>
<add><a name="1.2.2"></a>
<add># 1.2.2 finicky-pleasure (2014-08-12)
<add>
<add>
<add>## Bug Fixes
<add>
<add>- **$compile:** make '='-bindings NaN-aware
<add> ([0b0acb03](https://github.com/angular/angular.js/commit/0b0acb03424a273965fa6e6175d584f53a90252c),
<add> [#8553](https://github.com/angular/angular.js/issues/8553), [#8554](https://github.com/angular/angular.js/issues/8554))
<add>- **$parse:** correctly assign expressions who's path is undefined and that use brackets notation
<add> ([60366c8d](https://github.com/angular/angular.js/commit/60366c8d0bb5ffdd1bd8a8971820eb4868f3efd5),
<add> [#8039](https://github.com/angular/angular.js/issues/8039))
<add>- **jqLite:** allow `triggerHandler()` to accept custom event
<add> ([d262378b](https://github.com/angular/angular.js/commit/d262378b7c047dcd925cf4b55b80c0697b292232),
<add> [#8469](https://github.com/angular/angular.js/issues/8469), [#8505](https://github.com/angular/angular.js/issues/8505))
<add>- **ngSanitize:** ensure `html` is a string in htmlParser()
<add> ([9ee07551](https://github.com/angular/angular.js/commit/9ee075518f1ccec0f34aa49bd007aa2ed9a3b12e),
<add> [#8417](https://github.com/angular/angular.js/issues/8417), [#8416](https://github.com/angular/angular.js/issues/8416))
<add>- **select:**
<add> - ensure that at least one option has the `selected` attribute set
<add> ([79538afd](https://github.com/angular/angular.js/commit/79538afd7bd730d49be8eb988a3a54848d8ddaec),
<add> [#8366](https://github.com/angular/angular.js/issues/8366), [#8429](https://github.com/angular/angular.js/issues/8429))
<add> - do not update selected property of an option element on digest with no change event
<add> ([c2860944](https://github.com/angular/angular.js/commit/c2860944c61a0b910f703fe8a9717188ed387893),
<add> [#8221](https://github.com/angular/angular.js/issues/8221), [#7715](https://github.com/angular/angular.js/issues/7715))
<add>
<add>
<add>## Features
<add>
<add>- **$parse:** allow for assignments in ternary operator branches
<add> ([93b0c2d8](https://github.com/angular/angular.js/commit/93b0c2d8925e354159cc421e5be1bca9582f7b70),
<add> [#8512](https://github.com/angular/angular.js/issues/8512), [#8484](https://github.com/angular/angular.js/issues/8484))
<add>- **http:** allow caching for JSONP requests
<add> ([eab5731a](https://github.com/angular/angular.js/commit/eab5731afc788c59f3f2988db372299268df8614),
<add> [#1947](https://github.com/angular/angular.js/issues/1947), [#8356](https://github.com/angular/angular.js/issues/8356))
<add>
<add>
<ide> <a name="1.3.0-beta.17"></a>
<ide> # 1.3.0-beta.17 turing-autocompletion (2014-07-25)
<ide> | 1 |
Python | Python | fix typo in postgreshook | 47265e7b58bc28bcbbffc981442b6cc27a3af39c | <ide><path>airflow/providers/google/cloud/hooks/cloud_sql.py
<ide> def cleanup_database_hook(self) -> None:
<ide> if not self.db_hook:
<ide> raise ValueError("The db_hook should be set")
<ide> if not isinstance(self.db_hook, PostgresHook):
<del> raise ValueError(f"The db_hook should be PostrgresHook and is {type(self.db_hook)}")
<add> raise ValueError(f"The db_hook should be PostgresHook and is {type(self.db_hook)}")
<ide> conn = getattr(self.db_hook, 'conn')
<ide> if conn and conn.notices:
<ide> for output in self.db_hook.conn.notices: | 1 |
Text | Text | move princejwesley to emeritus | ff82b76ff044ea66f2f7155a1b154e333b827b6d | <ide><path>README.md
<ide> For information about the governance of the Node.js project, see
<ide> **Ali Ijaz Sheikh** <[email protected]> (he/him)
<ide> * [oyyd](https://github.com/oyyd) -
<ide> **Ouyang Yadong** <[email protected]> (he/him)
<del>* [princejwesley](https://github.com/princejwesley) -
<del>**Prince John Wesley** <[email protected]>
<ide> * [psmarshall](https://github.com/psmarshall) -
<ide> **Peter Marshall** <[email protected]> (he/him)
<ide> * [Qard](https://github.com/Qard) -
<ide> For information about the governance of the Node.js project, see
<ide> **Bert Belder** <[email protected]>
<ide> * [pmq20](https://github.com/pmq20) -
<ide> **Minqi Pan** <[email protected]>
<add>* [princejwesley](https://github.com/princejwesley) -
<add>**Prince John Wesley** <[email protected]>
<ide> * [rlidwka](https://github.com/rlidwka) -
<ide> **Alex Kocharin** <[email protected]>
<ide> * [rmg](https://github.com/rmg) - | 1 |
Mixed | Ruby | add ability to ignore tables in the schema cache | e3b4a462f7c0737258ee1c055126904c941cb424 | <ide><path>activerecord/CHANGELOG.md
<add>* Add config option for ignoring tables when dumping the schema cache.
<add>
<add> Applications can now be configured to ignore certain tables when dumping the schema cache.
<add>
<add> The configuration option can table an array of tables:
<add>
<add> ```ruby
<add> config.active_record.schema_cache_ignored_tables = ["ignored_table", "another_ignored_table"]
<add> ```
<add>
<add> Or a regex:
<add>
<add> ```ruby
<add> config.active_record.schema_cache_ignored_tables = [/^_/]
<add> ```
<add>
<add> *Eileen M. Uchitelle*
<add>
<ide> * Make schema cache methods return consistent results.
<ide>
<ide> Previously the schema cache methods `primary_keys`, `columns, `columns_hash`, and `indexes`
<ide><path>activerecord/lib/active_record.rb
<ide> module Tasks
<ide> autoload :TestDatabases, "active_record/test_databases"
<ide> autoload :TestFixtures, "active_record/fixtures"
<ide>
<add> # A list of tables or regex's to match tables to ignore when
<add> # dumping the schema cache. For example if this is set to +[/^_/]+
<add> # the schema cache will not dump tables named with an underscore.
<add> singleton_class.attr_accessor :schema_cache_ignored_tables
<add> self.schema_cache_ignored_tables = []
<add>
<ide> singleton_class.attr_accessor :legacy_connection_handling
<ide> self.legacy_connection_handling = true
<ide>
<ide><path>activerecord/lib/active_record/connection_adapters/schema_cache.rb
<ide> def primary_keys(table_name)
<ide>
<ide> # A cached lookup for table existence.
<ide> def data_source_exists?(name)
<add> return if ignored_table?(name)
<ide> prepare_data_sources if @data_sources.empty?
<ide> return @data_sources[name] if @data_sources.key? name
<ide>
<ide> def data_sources(name)
<ide>
<ide> # Get the columns for a table
<ide> def columns(table_name)
<add> if ignored_table?(table_name)
<add> raise ActiveRecord::StatementInvalid, "Table '#{table_name}' doesn't exist"
<add> end
<add>
<ide> @columns.fetch(table_name) do
<ide> @columns[deep_deduplicate(table_name)] = deep_deduplicate(connection.columns(table_name))
<ide> end
<ide> def clear_data_source_cache!(name)
<ide>
<ide> def dump_to(filename)
<ide> clear!
<del> connection.data_sources.each { |table| add(table) }
<add> tables_to_cache.each { |table| add(table) }
<ide> open(filename) { |f|
<ide> if filename.include?(".dump")
<ide> f.write(Marshal.dump(self))
<ide> def marshal_load(array)
<ide> end
<ide>
<ide> private
<add> def tables_to_cache
<add> connection.data_sources.reject do |table|
<add> ignored_table?(table)
<add> end
<add> end
<add>
<add> def ignored_table?(table_name)
<add> ActiveRecord.schema_cache_ignored_tables.any? do |ignored|
<add> ignored === table_name
<add> end
<add> end
<add>
<ide> def reset_version!
<ide> @version = connection.migration_context.current_version
<ide> end
<ide> def deep_deduplicate(value)
<ide> end
<ide>
<ide> def prepare_data_sources
<del> connection.data_sources.each { |source| @data_sources[source] = true }
<add> tables_to_cache.each do |source|
<add> @data_sources[source] = true
<add> end
<ide> end
<ide>
<ide> def open(filename)
<ide><path>activerecord/test/cases/connection_adapters/schema_cache_test.rb
<ide> def test_marshal_dump_and_load_via_disk
<ide> tempfile.unlink
<ide> end
<ide>
<add> def test_marshal_dump_and_load_with_ignored_tables
<add> old_ignore = ActiveRecord.schema_cache_ignored_tables
<add> ActiveRecord.schema_cache_ignored_tables = ["p_schema_migrations"]
<add> # Create an empty cache.
<add> cache = SchemaCache.new @connection
<add>
<add> tempfile = Tempfile.new(["schema_cache-", ".dump"])
<add> # Dump it. It should get populated before dumping.
<add> cache.dump_to(tempfile.path)
<add>
<add> # Load a new cache.
<add> cache = SchemaCache.load_from(tempfile.path)
<add> cache.connection = @connection
<add>
<add> # Assert a table in the cache
<add> assert cache.data_sources("posts"), "expected posts to be in the cached data_sources"
<add> assert_equal 12, cache.columns("posts").size
<add> assert_equal 12, cache.columns_hash("posts").size
<add> assert cache.data_sources("posts")
<add> assert_equal "id", cache.primary_keys("posts")
<add> assert_equal 1, cache.indexes("posts").size
<add>
<add> # Assert ignored table. Behavior should match non-existent table.
<add> assert_nil cache.data_sources("p_schema_migrations"), "expected comments to not be in the cached data_sources"
<add> assert_raises ActiveRecord::StatementInvalid do
<add> cache.columns("p_schema_migrations")
<add> end
<add> assert_raises ActiveRecord::StatementInvalid do
<add> cache.columns_hash("p_schema_migrations").size
<add> end
<add> assert_nil cache.primary_keys("p_schema_migrations")
<add> assert_equal [], cache.indexes("p_schema_migrations")
<add> ensure
<add> tempfile.unlink
<add> ActiveRecord.schema_cache_ignored_tables = old_ignore
<add> end
<add>
<ide> def test_marshal_dump_and_load_with_gzip
<ide> # Create an empty cache.
<ide> cache = SchemaCache.new @connection | 4 |
Go | Go | support parallel rm | 2b773257e0ee45db2c413774a3affae01c7996c1 | <ide><path>cli/command/container/rm.go
<ide> func runRm(dockerCli *command.DockerCli, opts *rmOptions) error {
<ide> ctx := context.Background()
<ide>
<ide> var errs []string
<del> for _, name := range opts.containers {
<del> if name == "" {
<add> options := types.ContainerRemoveOptions{
<add> RemoveVolumes: opts.rmVolumes,
<add> RemoveLinks: opts.rmLink,
<add> Force: opts.force,
<add> }
<add>
<add> errChan := parallelOperation(ctx, opts.containers, func(ctx context.Context, container string) error {
<add> if container == "" {
<ide> return fmt.Errorf("Container name cannot be empty")
<ide> }
<del> name = strings.Trim(name, "/")
<add> container = strings.Trim(container, "/")
<add> return dockerCli.Client().ContainerRemove(ctx, container, options)
<add> })
<ide>
<del> if err := removeContainer(dockerCli, ctx, name, opts.rmVolumes, opts.rmLink, opts.force); err != nil {
<add> for _, name := range opts.containers {
<add> if err := <-errChan; err != nil {
<ide> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(dockerCli.Out(), "%s\n", name)
<ide> func runRm(dockerCli *command.DockerCli, opts *rmOptions) error {
<ide> }
<ide> return nil
<ide> }
<del>
<del>func removeContainer(dockerCli *command.DockerCli, ctx context.Context, container string, removeVolumes, removeLinks, force bool) error {
<del> options := types.ContainerRemoveOptions{
<del> RemoveVolumes: removeVolumes,
<del> RemoveLinks: removeLinks,
<del> Force: force,
<del> }
<del> if err := dockerCli.Client().ContainerRemove(ctx, container, options); err != nil {
<del> return err
<del> }
<del> return nil
<del>}
<ide><path>cli/command/container/utils.go
<ide> func getExitCode(dockerCli *command.DockerCli, ctx context.Context, containerID
<ide> return c.State.Running, c.State.ExitCode, nil
<ide> }
<ide>
<del>func parallelOperation(ctx context.Context, cids []string, op func(ctx context.Context, id string) error) chan error {
<del> if len(cids) == 0 {
<add>func parallelOperation(ctx context.Context, containers []string, op func(ctx context.Context, container string) error) chan error {
<add> if len(containers) == 0 {
<ide> return nil
<ide> }
<ide> const defaultParallel int = 50
<ide> func parallelOperation(ctx context.Context, cids []string, op func(ctx context.C
<ide>
<ide> // make sure result is printed in correct order
<ide> output := map[string]chan error{}
<del> for _, c := range cids {
<add> for _, c := range containers {
<ide> output[c] = make(chan error, 1)
<ide> }
<ide> go func() {
<del> for _, c := range cids {
<add> for _, c := range containers {
<ide> err := <-output[c]
<ide> errChan <- err
<ide> }
<ide> }()
<ide>
<ide> go func() {
<del> for _, c := range cids {
<add> for _, c := range containers {
<ide> sem <- struct{}{} // Wait for active queue sem to drain.
<ide> go func(container string) {
<ide> output[container] <- op(ctx, container) | 2 |
Ruby | Ruby | fix typo `--ssl-cipher` | 02b50df3e37c463c9fb6acea10697f94538230fd | <ide><path>activerecord/lib/active_record/tasks/mysql_database_tasks.rb
<ide> def prepare_command_options
<ide> 'sslca' => '--ssl-ca',
<ide> 'sslcert' => '--ssl-cert',
<ide> 'sslcapath' => '--ssl-capath',
<del> 'sslcipher' => '--ssh-cipher',
<add> 'sslcipher' => '--ssl-cipher',
<ide> 'sslkey' => '--ssl-key'
<ide> }.map { |opt, arg| "#{arg}=#{configuration[opt]}" if configuration[opt] }.compact
<ide> | 1 |
Javascript | Javascript | define eacces as a runtime error | 11b6c0de4114fa499bcffbda91d8cea9d3bdea03 | <ide><path>lib/internal/child_process.js
<ide> const { isUint8Array } = require('internal/util/types');
<ide> const spawn_sync = process.binding('spawn_sync');
<ide>
<ide> const {
<add> UV_EACCES,
<ide> UV_EAGAIN,
<ide> UV_EINVAL,
<ide> UV_EMFILE,
<ide> ChildProcess.prototype.spawn = function(options) {
<ide> var err = this._handle.spawn(options);
<ide>
<ide> // Run-time errors should emit an error, not throw an exception.
<del> if (err === UV_EAGAIN ||
<add> if (err === UV_EACCES ||
<add> err === UV_EAGAIN ||
<ide> err === UV_EMFILE ||
<ide> err === UV_ENFILE ||
<ide> err === UV_ENOENT) { | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.