content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Java | Java | add javadoc since for jaxb2xmldecoder(mimetype...) | 946fc39d423da66833a2130256c68c834c7685ca | <ide><path>spring-web/src/main/java/org/springframework/http/codec/xml/Jaxb2XmlDecoder.java
<ide> public Jaxb2XmlDecoder() {
<ide> super(MimeTypeUtils.APPLICATION_XML, MimeTypeUtils.TEXT_XML);
<ide> }
<ide>
<add> /**
<add> * Create a {@code Jaxb2XmlDecoder} with the specified MIME types.
<add> * @param supportedMimeTypes supported MIME types
<add> * @since 5.1.9
<add> */
<ide> public Jaxb2XmlDecoder(MimeType... supportedMimeTypes) {
<ide> super(supportedMimeTypes);
<ide> } | 1 |
Javascript | Javascript | export an object instead of array for asset type | 71e84e6ee89782ca56f70e20a19a733081a91beb | <ide><path>Examples/UIExplorer/js/CameraRollExample.js
<ide> *
<ide> * @flow
<ide> * @providesModule CameraRollExample
<add> * @format
<ide> */
<ide> 'use strict';
<ide>
<ide> const {
<ide> Switch,
<ide> Text,
<ide> View,
<del> TouchableOpacity
<add> TouchableOpacity,
<ide> } = ReactNative;
<ide>
<ide> const invariant = require('fbjs/lib/invariant');
<ide> class CameraRollExample extends React.Component {
<ide> />
<ide> <Text>{'Group Type: ' + this.state.groupTypes}</Text>
<ide> <CameraRollView
<del> ref={(ref) => { this._cameraRollView = ref; }}
<add> ref={ref => {
<add> this._cameraRollView = ref;
<add> }}
<ide> batchSize={20}
<ide> groupTypes={this.state.groupTypes}
<ide> renderImage={this._renderImage}
<ide> class CameraRollExample extends React.Component {
<ide> );
<ide> }
<ide>
<del> loadAsset = (asset) => {
<add> loadAsset(asset) {
<ide> if (this.props.navigator) {
<ide> this.props.navigator.push({
<ide> title: 'Camera Roll Image',
<ide> component: AssetScaledImageExampleView,
<ide> backButtonTitle: 'Back',
<del> passProps: { asset: asset },
<add> passProps: {asset: asset},
<ide> });
<ide> }
<del> };
<add> }
<ide>
<del> _renderImage = (asset) => {
<add> _renderImage = asset => {
<ide> const imageSize = this.state.bigImages ? 150 : 75;
<ide> const imageStyle = [styles.image, {width: imageSize, height: imageSize}];
<ide> const {location} = asset.node;
<del> const locationStr = location ? JSON.stringify(location) : 'Unknown location';
<add> const locationStr = location
<add> ? JSON.stringify(location)
<add> : 'Unknown location';
<ide> return (
<del> <TouchableOpacity key={asset} onPress={ this.loadAsset.bind( this, asset ) }>
<add> <TouchableOpacity key={asset} onPress={this.loadAsset.bind(this, asset)}>
<ide> <View style={styles.row}>
<del> <Image
<del> source={asset.node.image}
<del> style={imageStyle}
<del> />
<add> <Image source={asset.node.image} style={imageStyle} />
<ide> <View style={styles.info}>
<ide> <Text style={styles.url}>{asset.node.image.uri}</Text>
<ide> <Text>{locationStr}</Text>
<ide> class CameraRollExample extends React.Component {
<ide> );
<ide> };
<ide>
<del> _onSliderChange = (value) => {
<del> const options = CameraRoll.GroupTypesOptions;
<add> _onSliderChange = value => {
<add> const options = Object.keys(CameraRoll.GroupTypesOptions);
<ide> const index = Math.floor(value * options.length * 0.99);
<ide> const groupTypes = options[index];
<ide> if (groupTypes !== this.state.groupTypes) {
<ide> this.setState({groupTypes: groupTypes});
<ide> }
<ide> };
<ide>
<del> _onSwitchChange = (value) => {
<add> _onSwitchChange = value => {
<ide> invariant(this._cameraRollView, 'ref should be set');
<ide> this._cameraRollView.rendererChanged();
<del> this.setState({ bigImages: value });
<add> this.setState({bigImages: value});
<ide> };
<ide> }
<ide>
<ide> const styles = StyleSheet.create({
<ide> });
<ide>
<ide> exports.title = 'Camera Roll';
<del>exports.description = 'Example component that uses CameraRoll to list user\'s photos';
<add>exports.description =
<add> "Example component that uses CameraRoll to list user's photos";
<ide> exports.examples = [
<ide> {
<ide> title: 'Photos',
<del> render(): React.Element<any> { return <CameraRollExample />; }
<del> }
<add> render(): React.Element<any> {
<add> return <CameraRollExample />;
<add> },
<add> },
<ide> ];
<ide><path>Libraries/CameraRoll/CameraRoll.js
<ide> *
<ide> * @providesModule CameraRoll
<ide> * @flow
<add> * @format
<ide> */
<ide> 'use strict';
<ide>
<del>var {PropTypes, checkPropTypes} = require('React');
<del>var RCTCameraRollManager = require('NativeModules').CameraRollManager;
<del>
<del>var createStrictShapeTypeChecker = require('createStrictShapeTypeChecker');
<del>var deepFreezeAndThrowOnMutationInDev =
<del> require('deepFreezeAndThrowOnMutationInDev');
<del>var invariant = require('fbjs/lib/invariant');
<del>
<del>var GROUP_TYPES_OPTIONS = [
<del> 'Album',
<del> 'All',
<del> 'Event',
<del> 'Faces',
<del> 'Library',
<del> 'PhotoStream',
<del> 'SavedPhotos', // default
<del>];
<del>
<del>var ASSET_TYPE_OPTIONS = [
<del> 'All',
<del> 'Videos',
<del> 'Photos', // default
<del>];
<del>
<del>// Flow treats Object and Array as disjoint types, currently.
<del>deepFreezeAndThrowOnMutationInDev((GROUP_TYPES_OPTIONS: any));
<del>deepFreezeAndThrowOnMutationInDev((ASSET_TYPE_OPTIONS: any));
<add>const {PropTypes, checkPropTypes} = require('React');
<add>const RCTCameraRollManager = require('NativeModules').CameraRollManager;
<add>
<add>const createStrictShapeTypeChecker = require('createStrictShapeTypeChecker');
<add>const invariant = require('fbjs/lib/invariant');
<add>
<add>const GROUP_TYPES_OPTIONS = {
<add> Album: 'Album',
<add> All: 'All',
<add> Event: 'Event',
<add> Faces: 'Faces',
<add> Library: 'Library',
<add> PhotoStream: 'PhotoStream',
<add> SavedPhotos: 'SavedPhotos', // default
<add>};
<add>
<add>const ASSET_TYPE_OPTIONS = {
<add> All: 'All',
<add> Videos: 'Videos',
<add> Photos: 'Photos',
<add>};
<ide>
<ide> /**
<ide> * Shape of the param arg for the `getPhotos` function.
<ide> */
<del>var getPhotosParamChecker = createStrictShapeTypeChecker({
<add>const getPhotosParamChecker = createStrictShapeTypeChecker({
<ide> /**
<ide> * The number of photos wanted in reverse order of the photo application
<ide> * (i.e. most recent first for SavedPhotos).
<ide> var getPhotosParamChecker = createStrictShapeTypeChecker({
<ide> /**
<ide> * Specifies which group types to filter the results to.
<ide> */
<del> groupTypes: PropTypes.oneOf(GROUP_TYPES_OPTIONS),
<add> groupTypes: PropTypes.oneOf(Object.keys(GROUP_TYPES_OPTIONS)),
<ide>
<ide> /**
<ide> * Specifies filter on group names, like 'Recent Photos' or custom album
<ide> var getPhotosParamChecker = createStrictShapeTypeChecker({
<ide> /**
<ide> * Specifies filter on asset type
<ide> */
<del> assetType: PropTypes.oneOf(ASSET_TYPE_OPTIONS),
<add> assetType: PropTypes.oneOf(Object.keys(ASSET_TYPE_OPTIONS)),
<ide>
<ide> /**
<ide> * Filter by mimetype (e.g. image/jpeg).
<ide> var getPhotosParamChecker = createStrictShapeTypeChecker({
<ide> /**
<ide> * Shape of the return value of the `getPhotos` function.
<ide> */
<del>var getPhotosReturnChecker = createStrictShapeTypeChecker({
<add>const getPhotosReturnChecker = createStrictShapeTypeChecker({
<ide> // $FlowFixMe(>=0.41.0)
<del> edges: PropTypes.arrayOf(createStrictShapeTypeChecker({
<del> node: createStrictShapeTypeChecker({
<del> type: PropTypes.string.isRequired,
<del> group_name: PropTypes.string.isRequired,
<del> image: createStrictShapeTypeChecker({
<del> uri: PropTypes.string.isRequired,
<del> height: PropTypes.number.isRequired,
<del> width: PropTypes.number.isRequired,
<del> isStored: PropTypes.bool,
<add> edges: PropTypes.arrayOf(
<add> createStrictShapeTypeChecker({
<add> node: createStrictShapeTypeChecker({
<add> type: PropTypes.string.isRequired,
<add> group_name: PropTypes.string.isRequired,
<add> image: createStrictShapeTypeChecker({
<add> uri: PropTypes.string.isRequired,
<add> height: PropTypes.number.isRequired,
<add> width: PropTypes.number.isRequired,
<add> isStored: PropTypes.bool,
<add> }).isRequired,
<add> timestamp: PropTypes.number.isRequired,
<add> location: createStrictShapeTypeChecker({
<add> latitude: PropTypes.number,
<add> longitude: PropTypes.number,
<add> altitude: PropTypes.number,
<add> heading: PropTypes.number,
<add> speed: PropTypes.number,
<add> }),
<ide> }).isRequired,
<del> timestamp: PropTypes.number.isRequired,
<del> location: createStrictShapeTypeChecker({
<del> latitude: PropTypes.number,
<del> longitude: PropTypes.number,
<del> altitude: PropTypes.number,
<del> heading: PropTypes.number,
<del> speed: PropTypes.number,
<del> }),
<del> }).isRequired,
<del> })).isRequired,
<add> }),
<add> ).isRequired,
<ide> page_info: createStrictShapeTypeChecker({
<ide> has_next_page: PropTypes.bool.isRequired,
<ide> start_cursor: PropTypes.string,
<ide> var getPhotosReturnChecker = createStrictShapeTypeChecker({
<ide> *
<ide> */
<ide> class CameraRoll {
<del>
<del> static GroupTypesOptions: Array<string>;
<del> static AssetTypeOptions: Array<string>;
<add> static GroupTypesOptions: Object = GROUP_TYPES_OPTIONS;
<add> static AssetTypeOptions: Object = ASSET_TYPE_OPTIONS;
<ide>
<ide> static saveImageWithTag(tag: string): Promise<Object> {
<del> console.warn('CameraRoll.saveImageWithTag is deprecated. Use CameraRoll.saveToCameraRoll instead');
<add> console.warn(
<add> 'CameraRoll.saveImageWithTag is deprecated. Use CameraRoll.saveToCameraRoll instead',
<add> );
<ide> return this.saveToCameraRoll(tag, 'photo');
<ide> }
<ide>
<ide> class CameraRoll {
<ide> *
<ide> * Returns a Promise which will resolve with the new URI.
<ide> */
<del> static saveToCameraRoll(tag: string, type?: 'photo' | 'video'): Promise<Object> {
<add> static saveToCameraRoll(
<add> tag: string,
<add> type?: 'photo' | 'video',
<add> ): Promise<Object> {
<ide> invariant(
<ide> typeof tag === 'string',
<del> 'CameraRoll.saveToCameraRoll must be a valid string.'
<add> 'CameraRoll.saveToCameraRoll must be a valid string.',
<ide> );
<ide>
<ide> invariant(
<ide> type === 'photo' || type === 'video' || type === undefined,
<ide> // $FlowFixMe(>=0.28.0)
<del> `The second argument to saveToCameraRoll must be 'photo' or 'video'. You passed ${type}`
<add> `The second argument to saveToCameraRoll must be 'photo' or 'video'. You passed ${type}`,
<ide> );
<ide>
<ide> let mediaType = 'photo';
<ide> class CameraRoll {
<ide> */
<ide> static getPhotos(params) {
<ide> if (__DEV__) {
<del> checkPropTypes({params: getPhotosParamChecker}, {params}, 'params', 'CameraRoll.getPhotos');
<add> checkPropTypes(
<add> {params: getPhotosParamChecker},
<add> {params},
<add> 'params',
<add> 'CameraRoll.getPhotos',
<add> );
<ide> }
<ide> if (arguments.length > 1) {
<del> console.warn('CameraRoll.getPhotos(tag, success, error) is deprecated. Use the returned Promise instead');
<add> console.warn(
<add> 'CameraRoll.getPhotos(tag, success, error) is deprecated. Use the returned Promise instead',
<add> );
<ide> let successCallback = arguments[1];
<ide> if (__DEV__) {
<ide> const callback = arguments[1];
<del> successCallback = (response) => {
<add> successCallback = response => {
<ide> checkPropTypes(
<ide> {response: getPhotosReturnChecker},
<ide> {response},
<ide> 'response',
<del> 'CameraRoll.getPhotos callback'
<add> 'CameraRoll.getPhotos callback',
<ide> );
<ide> callback(response);
<ide> };
<ide> }
<del> const errorCallback = arguments[2] || ( () => {} );
<del> RCTCameraRollManager.getPhotos(params).then(successCallback, errorCallback);
<add> const errorCallback = arguments[2] || (() => {});
<add> RCTCameraRollManager.getPhotos(params).then(
<add> successCallback,
<add> errorCallback,
<add> );
<ide> }
<ide> // TODO: Add the __DEV__ check back in to verify the Promise result
<ide> return RCTCameraRollManager.getPhotos(params);
<ide> }
<ide> }
<ide>
<del>CameraRoll.GroupTypesOptions = GROUP_TYPES_OPTIONS;
<del>CameraRoll.AssetTypeOptions = ASSET_TYPE_OPTIONS;
<del>
<ide> module.exports = CameraRoll; | 2 |
Mixed | Go | implement tail for docker logs | 1dc0caf9c0170ab8e7f5a2f77f2dbdacff322eff | <ide><path>api/client/commands.go
<ide> func (cli *DockerCli) CmdLogs(args ...string) error {
<ide> cmd = cli.Subcmd("logs", "CONTAINER", "Fetch the logs of a container")
<ide> follow = cmd.Bool([]string{"f", "-follow"}, false, "Follow log output")
<ide> times = cmd.Bool([]string{"t", "-timestamps"}, false, "Show timestamps")
<add> tail = cmd.String([]string{"-tail"}, "all", "Output the specified number of lines at the end of logs(all logs by default)")
<ide> )
<ide>
<ide> if err := cmd.Parse(args); err != nil {
<ide> func (cli *DockerCli) CmdLogs(args ...string) error {
<ide> if *follow {
<ide> v.Set("follow", "1")
<ide> }
<add> v.Set("tail", *tail)
<ide>
<ide> return cli.streamHelper("GET", "/containers/"+name+"/logs?"+v.Encode(), env.GetSubEnv("Config").GetBool("Tty"), nil, cli.out, cli.err, nil)
<ide> }
<ide><path>api/server/server.go
<ide> func getContainersLogs(eng *engine.Engine, version version.Version, w http.Respo
<ide> return err
<ide> }
<ide> logsJob.Setenv("follow", r.Form.Get("follow"))
<add> logsJob.Setenv("tail", r.Form.Get("tail"))
<ide> logsJob.Setenv("stdout", r.Form.Get("stdout"))
<ide> logsJob.Setenv("stderr", r.Form.Get("stderr"))
<ide> logsJob.Setenv("timestamps", r.Form.Get("timestamps"))
<ide><path>docs/sources/reference/api/docker_remote_api_v1.13.md
<ide> Get stdout and stderr logs from the container ``id``
<ide>
<ide> **Example request**:
<ide>
<del> GET /containers/4fa6e0f0c678/logs?stderr=1&stdout=1×tamps=1&follow=1 HTTP/1.1
<add> GET /containers/4fa6e0f0c678/logs?stderr=1&stdout=1×tamps=1&follow=1&tail=10 HTTP/1.1
<ide>
<ide> **Example response**:
<ide>
<ide> Get stdout and stderr logs from the container ``id``
<ide>
<ide> Β
<ide>
<del> - **follow** β 1/True/true or 0/False/false, return stream.
<del> Default false
<del> - **stdout** β 1/True/true or 0/False/false, if logs=true, return
<del> stdout log. Default false
<del> - **stderr** β 1/True/true or 0/False/false, if logs=true, return
<del> stderr log. Default false
<del> - **timestamps** β 1/True/true or 0/False/false, if logs=true, print
<del> timestamps for every log line. Default false
<add> - **follow** β 1/True/true or 0/False/false, return stream. Default false
<add> - **stdout** β 1/True/true or 0/False/false, show stdout log. Default false
<add> - **stderr** β 1/True/true or 0/False/false, show stderr log. Default false
<add> - **timestamps** β 1/True/true or 0/False/false, print timestamps for
<add> every log line. Default false
<add> - **tail** β Output specified number of lines at the end of logs: `all` or `<number>`. Default all
<ide>
<ide> Status Codes:
<ide>
<ide><path>docs/sources/reference/commandline/cli.md
<ide> specify this by adding the server name.
<ide>
<ide> -f, --follow=false Follow log output
<ide> -t, --timestamps=false Show timestamps
<add> --tail="all" Output the specified number of lines at the end of logs (all logs by default)
<ide>
<del>The `docker logs` command batch-retrieves all logs
<del>present at the time of execution.
<add>The `docker logs` command batch-retrieves logs present at the time of execution.
<ide>
<del>The ``docker logs --follow`` command will first return all logs from the
<del>beginning and then continue streaming new output from the container's `STDOUT`
<del>and `STDERR`.
<add>The `docker logs --follow` command will continue streaming the new output from
<add>the container's `STDOUT` and `STDERR`.
<add>
<add>Passing a negative number or a non-integer to --tail is invalid and the
<add>value is set to all in that case. This behavior may change in the future.
<ide>
<ide> ## port
<ide>
<ide><path>integration-cli/docker_cli_logs_test.go
<ide> func TestLogsStderrInStdout(t *testing.T) {
<ide>
<ide> logDone("logs - stderr in stdout (with pseudo-tty)")
<ide> }
<add>
<add>func TestLogsTail(t *testing.T) {
<add> testLen := 100
<add> runCmd := exec.Command(dockerBinary, "run", "-d", "busybox", "sh", "-c", fmt.Sprintf("for i in $(seq 1 %d); do echo =; done;", testLen))
<add>
<add> out, _, _, err := runCommandWithStdoutStderr(runCmd)
<add> errorOut(err, t, fmt.Sprintf("run failed with errors: %v", err))
<add>
<add> cleanedContainerID := stripTrailingCharacters(out)
<add> exec.Command(dockerBinary, "wait", cleanedContainerID).Run()
<add>
<add> logsCmd := exec.Command(dockerBinary, "logs", "--tail", "5", cleanedContainerID)
<add> out, _, _, err = runCommandWithStdoutStderr(logsCmd)
<add> errorOut(err, t, fmt.Sprintf("failed to log container: %v %v", out, err))
<add>
<add> lines := strings.Split(out, "\n")
<add>
<add> if len(lines) != 6 {
<add> t.Fatalf("Expected log %d lines, received %d\n", 6, len(lines))
<add> }
<add>
<add> logsCmd = exec.Command(dockerBinary, "logs", "--tail", "all", cleanedContainerID)
<add> out, _, _, err = runCommandWithStdoutStderr(logsCmd)
<add> errorOut(err, t, fmt.Sprintf("failed to log container: %v %v", out, err))
<add>
<add> lines = strings.Split(out, "\n")
<add>
<add> if len(lines) != testLen+1 {
<add> t.Fatalf("Expected log %d lines, received %d\n", testLen+1, len(lines))
<add> }
<add>
<add> logsCmd = exec.Command(dockerBinary, "logs", "--tail", "random", cleanedContainerID)
<add> out, _, _, err = runCommandWithStdoutStderr(logsCmd)
<add> errorOut(err, t, fmt.Sprintf("failed to log container: %v %v", out, err))
<add>
<add> lines = strings.Split(out, "\n")
<add>
<add> if len(lines) != testLen+1 {
<add> t.Fatalf("Expected log %d lines, received %d\n", testLen+1, len(lines))
<add> }
<add>
<add> deleteContainer(cleanedContainerID)
<add> logDone("logs - logs tail")
<add>}
<ide><path>pkg/tailfile/tailfile.go
<add>package tailfile
<add>
<add>import (
<add> "bytes"
<add> "errors"
<add> "os"
<add>)
<add>
<add>const blockSize = 1024
<add>
<add>var eol = []byte("\n")
<add>var ErrNonPositiveLinesNumber = errors.New("Lines number must be positive")
<add>
<add>//TailFile returns last n lines of file f
<add>func TailFile(f *os.File, n int) ([][]byte, error) {
<add> if n <= 0 {
<add> return nil, ErrNonPositiveLinesNumber
<add> }
<add> size, err := f.Seek(0, os.SEEK_END)
<add> if err != nil {
<add> return nil, err
<add> }
<add> block := -1
<add> var data []byte
<add> var cnt int
<add> for {
<add> var b []byte
<add> step := int64(block * blockSize)
<add> left := size + step // how many bytes to beginning
<add> if left < 0 {
<add> if _, err := f.Seek(0, os.SEEK_SET); err != nil {
<add> return nil, err
<add> }
<add> b = make([]byte, blockSize+left)
<add> if _, err := f.Read(b); err != nil {
<add> return nil, err
<add> }
<add> data = append(b, data...)
<add> break
<add> } else {
<add> b = make([]byte, blockSize)
<add> if _, err := f.Seek(step, os.SEEK_END); err != nil {
<add> return nil, err
<add> }
<add> if _, err := f.Read(b); err != nil {
<add> return nil, err
<add> }
<add> data = append(b, data...)
<add> }
<add> cnt += bytes.Count(b, eol)
<add> if cnt > n {
<add> break
<add> }
<add> block--
<add> }
<add> lines := bytes.Split(data, eol)
<add> if n < len(lines) {
<add> return lines[len(lines)-n-1 : len(lines)-1], nil
<add> }
<add> return lines[:len(lines)-1], nil
<add>}
<ide><path>pkg/tailfile/tailfile_test.go
<add>package tailfile
<add>
<add>import (
<add> "io/ioutil"
<add> "os"
<add> "testing"
<add>)
<add>
<add>func TestTailFile(t *testing.T) {
<add> f, err := ioutil.TempFile("", "tail-test")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> defer f.Close()
<add> defer os.RemoveAll(f.Name())
<add> testFile := []byte(`first line
<add>second line
<add>third line
<add>fourth line
<add>fifth line
<add>next first line
<add>next second line
<add>next third line
<add>next fourth line
<add>next fifth line
<add>last first line
<add>next first line
<add>next second line
<add>next third line
<add>next fourth line
<add>next fifth line
<add>next first line
<add>next second line
<add>next third line
<add>next fourth line
<add>next fifth line
<add>last second line
<add>last third line
<add>last fourth line
<add>last fifth line
<add>truncated line`)
<add> if _, err := f.Write(testFile); err != nil {
<add> t.Fatal(err)
<add> }
<add> if _, err := f.Seek(0, os.SEEK_SET); err != nil {
<add> t.Fatal(err)
<add> }
<add> expected := []string{"last fourth line", "last fifth line"}
<add> res, err := TailFile(f, 2)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> for i, l := range res {
<add> t.Logf("%s", l)
<add> if expected[i] != string(l) {
<add> t.Fatalf("Expected line %s, got %s", expected[i], l)
<add> }
<add> }
<add>}
<add>
<add>func TestTailFileManyLines(t *testing.T) {
<add> f, err := ioutil.TempFile("", "tail-test")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> defer f.Close()
<add> defer os.RemoveAll(f.Name())
<add> testFile := []byte(`first line
<add>second line
<add>truncated line`)
<add> if _, err := f.Write(testFile); err != nil {
<add> t.Fatal(err)
<add> }
<add> if _, err := f.Seek(0, os.SEEK_SET); err != nil {
<add> t.Fatal(err)
<add> }
<add> expected := []string{"first line", "second line"}
<add> res, err := TailFile(f, 10000)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> for i, l := range res {
<add> t.Logf("%s", l)
<add> if expected[i] != string(l) {
<add> t.Fatalf("Expected line %s, got %s", expected[i], l)
<add> }
<add> }
<add>}
<add>
<add>func TestTailEmptyFile(t *testing.T) {
<add> f, err := ioutil.TempFile("", "tail-test")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> defer f.Close()
<add> defer os.RemoveAll(f.Name())
<add> res, err := TailFile(f, 10000)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if len(res) != 0 {
<add> t.Fatal("Must be empty slice from empty file")
<add> }
<add>}
<add>
<add>func TestTailNegativeN(t *testing.T) {
<add> f, err := ioutil.TempFile("", "tail-test")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> defer f.Close()
<add> defer os.RemoveAll(f.Name())
<add> testFile := []byte(`first line
<add>second line
<add>truncated line`)
<add> if _, err := f.Write(testFile); err != nil {
<add> t.Fatal(err)
<add> }
<add> if _, err := f.Seek(0, os.SEEK_SET); err != nil {
<add> t.Fatal(err)
<add> }
<add> if _, err := TailFile(f, -1); err != ErrNonPositiveLinesNumber {
<add> t.Fatalf("Expected ErrNonPositiveLinesNumber, got %s", err)
<add> }
<add> if _, err := TailFile(f, 0); err != ErrNonPositiveLinesNumber {
<add> t.Fatalf("Expected ErrNonPositiveLinesNumber, got %s", err)
<add> }
<add>}
<add>
<add>func BenchmarkTail(b *testing.B) {
<add> f, err := ioutil.TempFile("", "tail-test")
<add> if err != nil {
<add> b.Fatal(err)
<add> }
<add> defer f.Close()
<add> defer os.RemoveAll(f.Name())
<add> for i := 0; i < 10000; i++ {
<add> if _, err := f.Write([]byte("tailfile pretty interesting line\n")); err != nil {
<add> b.Fatal(err)
<add> }
<add> }
<add> b.ResetTimer()
<add> for i := 0; i < b.N; i++ {
<add> if _, err := TailFile(f, 1000); err != nil {
<add> b.Fatal(err)
<add> }
<add> }
<add>}
<ide><path>server/server.go
<ide> package server
<ide>
<ide> import (
<add> "bytes"
<ide> "encoding/json"
<ide> "fmt"
<ide> "io"
<ide> import (
<ide> "github.com/dotcloud/docker/image"
<ide> "github.com/dotcloud/docker/pkg/graphdb"
<ide> "github.com/dotcloud/docker/pkg/signal"
<add> "github.com/dotcloud/docker/pkg/tailfile"
<ide> "github.com/dotcloud/docker/registry"
<ide> "github.com/dotcloud/docker/runconfig"
<ide> "github.com/dotcloud/docker/utils"
<ide> func (srv *Server) ContainerLogs(job *engine.Job) engine.Status {
<ide> name = job.Args[0]
<ide> stdout = job.GetenvBool("stdout")
<ide> stderr = job.GetenvBool("stderr")
<add> tail = job.Getenv("tail")
<ide> follow = job.GetenvBool("follow")
<ide> times = job.GetenvBool("timestamps")
<add> lines = -1
<ide> format string
<ide> )
<ide> if !(stdout || stderr) {
<ide> func (srv *Server) ContainerLogs(job *engine.Job) engine.Status {
<ide> if times {
<ide> format = time.StampMilli
<ide> }
<add> if tail == "" {
<add> tail = "all"
<add> }
<ide> container := srv.daemon.Get(name)
<ide> if container == nil {
<ide> return job.Errorf("No such container: %s", name)
<ide> func (srv *Server) ContainerLogs(job *engine.Job) engine.Status {
<ide> } else if err != nil {
<ide> utils.Errorf("Error reading logs (json): %s", err)
<ide> } else {
<del> dec := json.NewDecoder(cLog)
<del> for {
<del> l := &utils.JSONLog{}
<del>
<del> if err := dec.Decode(l); err == io.EOF {
<del> break
<del> } else if err != nil {
<del> utils.Errorf("Error streaming logs: %s", err)
<del> break
<del> }
<del> logLine := l.Log
<del> if times {
<del> logLine = fmt.Sprintf("[%s] %s", l.Created.Format(format), logLine)
<add> if tail != "all" {
<add> var err error
<add> lines, err = strconv.Atoi(tail)
<add> if err != nil {
<add> utils.Errorf("Failed to parse tail %s, error: %v, show all logs", err)
<add> lines = -1
<ide> }
<del> if l.Stream == "stdout" && stdout {
<del> fmt.Fprintf(job.Stdout, "%s", logLine)
<add> }
<add> if lines != 0 {
<add> if lines > 0 {
<add> f := cLog.(*os.File)
<add> ls, err := tailfile.TailFile(f, lines)
<add> if err != nil {
<add> return job.Error(err)
<add> }
<add> tmp := bytes.NewBuffer([]byte{})
<add> for _, l := range ls {
<add> fmt.Fprintf(tmp, "%s\n", l)
<add> }
<add> cLog = tmp
<ide> }
<del> if l.Stream == "stderr" && stderr {
<del> fmt.Fprintf(job.Stderr, "%s", logLine)
<add> dec := json.NewDecoder(cLog)
<add> for {
<add> l := &utils.JSONLog{}
<add>
<add> if err := dec.Decode(l); err == io.EOF {
<add> break
<add> } else if err != nil {
<add> utils.Errorf("Error streaming logs: %s", err)
<add> break
<add> }
<add> logLine := l.Log
<add> if times {
<add> logLine = fmt.Sprintf("[%s] %s", l.Created.Format(format), logLine)
<add> }
<add> if l.Stream == "stdout" && stdout {
<add> fmt.Fprintf(job.Stdout, "%s", logLine)
<add> }
<add> if l.Stream == "stderr" && stderr {
<add> fmt.Fprintf(job.Stderr, "%s", logLine)
<add> }
<ide> }
<ide> }
<ide> } | 8 |
Python | Python | refactor the code in a separate function | 69fc915b52fc42b288fa503b2c44c4cc4691ec18 | <ide><path>libcloud/httplib_ssl.py
<ide> def connect(self):
<ide> if self.http_proxy_used:
<ide> self._activate_http_proxy(sock=sock)
<ide>
<add> ssl_version = libcloud.security.SSL_VERSION
<add>
<ide> try:
<ide> self.sock = ssl.wrap_socket(
<ide> sock,
<ide> self.key_file,
<ide> self.cert_file,
<ide> cert_reqs=ssl.CERT_REQUIRED,
<ide> ca_certs=self.ca_cert,
<del> ssl_version=libcloud.security.SSL_VERSION)
<add> ssl_version=ssl_version)
<ide> except socket.error:
<ide> exc = sys.exc_info()[1]
<del> exc_msg = str(exc)
<del>
<ide> # Re-throw an exception with a more friendly error message
<del> if 'connection reset by peer' in exc_msg.lower():
<del> ssl_version = libcloud.security.SSL_VERSION
<del> ssl_version = SSL_CONSTANT_TO_TLS_VERSION_MAP[ssl_version]
<del> msg = (UNSUPPORTED_TLS_VERSION_ERROR_MSG %
<del> (exc_msg, ssl_version))
<del>
<del> # Note: In some cases arguments are (errno, message) and in
<del> # other it's just (message,)
<del> exc_args = getattr(exc, 'args', [])
<del>
<del> if len(exc_args) == 2:
<del> new_exc_args = [exc.args[0], msg]
<del> else:
<del> new_exc_args = [msg]
<del>
<del> new_exc = socket.error(*new_exc_args)
<del> new_exc.original_exc = exc
<del> raise new_exc
<del>
<add> exc = get_socket_error_exception(ssl_version=ssl_version, exc=exc)
<ide> raise exc
<ide>
<ide> cert = self.sock.getpeercert()
<ide> def connect(self):
<ide> except CertificateError:
<ide> e = sys.exc_info()[1]
<ide> raise ssl.SSLError('Failed to verify hostname: %s' % (str(e)))
<add>
<add>
<add>def get_socket_error_exception(ssl_version, exc):
<add> """
<add> Function which intercepts socket.error exceptions and re-throws an
<add> exception with a more user-friendly message in case server doesn't support
<add> requested SSL version.
<add> """
<add> exc_msg = str(exc)
<add>
<add> # Re-throw an exception with a more friendly error message
<add> if 'connection reset by peer' in exc_msg.lower():
<add> ssl_version_name = SSL_CONSTANT_TO_TLS_VERSION_MAP[ssl_version]
<add> msg = (UNSUPPORTED_TLS_VERSION_ERROR_MSG %
<add> (exc_msg, ssl_version_name))
<add>
<add> # Note: In some cases arguments are (errno, message) and in
<add> # other it's just (message,)
<add> exc_args = getattr(exc, 'args', [])
<add>
<add> if len(exc_args) == 2:
<add> new_exc_args = [exc.args[0], msg]
<add> else:
<add> new_exc_args = [msg]
<add>
<add> new_exc = socket.error(*new_exc_args)
<add> new_exc.original_exc = exc
<add> return new_exc | 1 |
Javascript | Javascript | fix unused bonedics, rootbone and keys | 883603a32018b7ebaa7413ba0b3626d02322349a | <ide><path>examples/js/loaders/XLoader.js
<ide> THREE.XLoader.prototype = {
<ide> scope.loadingXdata.FrameInfo_Raw[ nowFrameName ].Geometry.groupsNeedUpdate = true;
<ide> var putBones = [];
<ide> var BoneInverse = [];
<del> var BoneDics = [];
<del> var rootBone = new THREE.Bone();
<ide> if ( scope.loadingXdata.FrameInfo_Raw[ nowFrameName ].BoneInfs != null && scope.loadingXdata.FrameInfo_Raw[ nowFrameName ].BoneInfs.length ) {
<ide>
<ide> var keys = Object.keys( scope.loadingXdata.FrameInfo_Raw );
<ide> THREE.XLoader.prototype = {
<ide>
<ide> var scope = this;
<ide> var i = scope.nowReaded;
<del> var keys = Object.keys( scope.loadingXdata.FrameInfo_Raw );
<ide> var tgtModel = null;
<ide> for ( var m = 0; m < scope.loadingXdata.FrameInfo.length; m ++ ) {
<ide> | 1 |
Python | Python | remove "redefinition" of dict element | e7e6cbfb8ff4a85be123c124d8ea0a449afe9f9c | <ide><path>project_euler/problem_074/sol1.py
<ide> 871: 2,
<ide> 45361: 2,
<ide> 872: 2,
<del> 45361: 2,
<ide> }
<ide>
<ide> | 1 |
Go | Go | resolve conflicts with restart policies | e49c70109228d657790190007c8f9d7c55a25be2 | <ide><path>api/client/commands.go
<ide> func (cli *DockerCli) CmdRun(args ...string) error {
<ide> flDetach = cmd.Bool([]string{"d", "-detach"}, false, "Detached mode: run the container in the background and print the new container ID")
<ide> flSigProxy = cmd.Bool([]string{"#sig-proxy", "-sig-proxy"}, true, "Proxy received signals to the process (even in non-TTY mode). SIGCHLD, SIGSTOP, and SIGKILL are not proxied.")
<ide> flName = cmd.String([]string{"#name", "-name"}, "", "Assign a name to the container")
<add> flAttach *opts.ListOpts
<ide>
<del> flAttach *opts.ListOpts
<del>
<del> ErrConflictAttachDetach = fmt.Errorf("Conflicting options: -a and -d")
<add> ErrConflictAttachDetach = fmt.Errorf("Conflicting options: -a and -d")
<add> ErrConflictRestartPolicyAndAutoRemove = fmt.Errorf("Conflicting options: --restart and --rm")
<add> ErrConflictDetachAutoRemove = fmt.Errorf("Conflicting options: --rm and -d")
<ide> )
<ide>
<ide> config, hostConfig, cmd, err := runconfig.ParseSubcommand(cmd, args, nil)
<ide> func (cli *DockerCli) CmdRun(args ...string) error {
<ide> if fl := cmd.Lookup("attach"); fl != nil {
<ide> flAttach = fl.Value.(*opts.ListOpts)
<ide> if flAttach.Len() != 0 {
<del> return fmt.Errorf("Conflicting options: -a and -d")
<add> return ErrConflictAttachDetach
<ide> }
<ide> }
<ide> if *flAutoRemove {
<del> return fmt.Errorf("Conflicting options: --rm and -d")
<add> return ErrConflictDetachAutoRemove
<ide> }
<ide>
<ide> config.AttachStdin = false
<ide> func (cli *DockerCli) CmdRun(args ...string) error {
<ide> }()
<ide> }
<ide>
<add> if *flAutoRemove && (hostConfig.RestartPolicy.Name == "always" || hostConfig.RestartPolicy.Name == "on-failure") {
<add> return ErrConflictRestartPolicyAndAutoRemove
<add> }
<add>
<ide> // We need to instanciate the chan because the select needs it. It can
<ide> // be closed but can't be uninitialized.
<ide> hijacked := make(chan io.Closer)
<ide><path>runconfig/parse.go
<ide> import (
<ide> )
<ide>
<ide> var (
<del> ErrInvalidWorkingDirectory = fmt.Errorf("The working directory is invalid. It needs to be an absolute path.")
<del> ErrConflictContainerNetworkAndLinks = fmt.Errorf("Conflicting options: --net=container can't be used with links. This would result in undefined behavior.")
<del> ErrConflictContainerNetworkAndDns = fmt.Errorf("Conflicting options: --net=container can't be used with --dns. This configuration is invalid.")
<del> ErrConflictDetachAutoRemove = fmt.Errorf("Conflicting options: --rm and -d")
<del> ErrConflictNetworkHostname = fmt.Errorf("Conflicting options: -h and the network mode (--net)")
<del> ErrConflictHostNetworkAndDns = fmt.Errorf("Conflicting options: --net=host can't be used with --dns. This configuration is invalid.")
<del> ErrConflictHostNetworkAndLinks = fmt.Errorf("Conflicting options: --net=host can't be used with links. This would result in undefined behavior.")
<del> ErrConflictRestartPolicyAndAutoRemove = fmt.Errorf("Conflicting options: --restart and --rm")
<add> ErrInvalidWorkingDirectory = fmt.Errorf("The working directory is invalid. It needs to be an absolute path.")
<add> ErrConflictContainerNetworkAndLinks = fmt.Errorf("Conflicting options: --net=container can't be used with links. This would result in undefined behavior.")
<add> ErrConflictContainerNetworkAndDns = fmt.Errorf("Conflicting options: --net=container can't be used with --dns. This configuration is invalid.")
<add> ErrConflictNetworkHostname = fmt.Errorf("Conflicting options: -h and the network mode (--net)")
<add> ErrConflictHostNetworkAndDns = fmt.Errorf("Conflicting options: --net=host can't be used with --dns. This configuration is invalid.")
<add> ErrConflictHostNetworkAndLinks = fmt.Errorf("Conflicting options: --net=host can't be used with links. This would result in undefined behavior.")
<ide> )
<ide>
<ide> // FIXME Only used in tests
<ide> func parseRun(cmd *flag.FlagSet, args []string, sysInfo *sysinfo.SysInfo) (*Conf
<ide> return nil, nil, cmd, err
<ide> }
<ide>
<del> if *flAutoRemove && (restartPolicy.Name == "always" || restartPolicy.Name == "on-failure") {
<del> return nil, nil, cmd, ErrConflictRestartPolicyAndAutoRemove
<del> }
<del>
<ide> config := &Config{
<ide> Hostname: hostname,
<ide> Domainname: domainname, | 2 |
Ruby | Ruby | use assert_nil when appropriate | 0fc4a52c6c34deaee59c55b1d7ff3ea62eda2b36 | <ide><path>Library/Homebrew/test/shell_test.rb
<ide> def test_path_to_shell
<ide> end
<ide>
<ide> def test_path_to_shell_failure
<del> assert_equal nil, Utils::Shell.path_to_shell("")
<del> assert_equal nil, Utils::Shell.path_to_shell("@@")
<del> assert_equal nil, Utils::Shell.path_to_shell("invalid_shell-4.2")
<add> assert_nil Utils::Shell.path_to_shell("")
<add> assert_nil Utils::Shell.path_to_shell("@@")
<add> assert_nil Utils::Shell.path_to_shell("invalid_shell-4.2")
<ide> end
<ide>
<ide> def test_sh_quote
<ide><path>Library/Homebrew/test/tab_test.rb
<ide> def test_to_json
<ide> assert_equal @tab.changed_files, tab.changed_files
<ide> assert_equal @tab.tap, tab.tap
<ide> assert_equal @tab.spec, tab.spec
<del> assert_equal @tab.time, tab.time
<add> assert_nil @tab.time
<add> assert_nil tab.time
<ide> assert_equal @tab.HEAD, tab.HEAD
<ide> assert_equal @tab.compiler, tab.compiler
<ide> assert_equal @tab.stdlib, tab.stdlib
<del> assert_equal @tab.runtime_dependencies, tab.runtime_dependencies
<add> assert_nil @tab.runtime_dependencies
<add> assert_nil tab.runtime_dependencies
<ide> assert_equal @tab.stable_version, tab.stable_version
<ide> assert_equal @tab.devel_version, tab.devel_version
<ide> assert_equal @tab.head_version, tab.head_version
<del> assert_equal @tab.source["path"], tab.source["path"]
<add> assert_nil @tab.source["path"]
<add> assert_nil tab.source["path"]
<ide> end
<ide>
<ide> def test_remap_deprecated_options | 2 |
Text | Text | fix typos | 2385358edd9652ce115632002027ac71bf2633db | <ide><path>CHANGELOG-6.x.md
<ide> ### Fixed
<ide> - Fixed default value for $count in `PhpRedisConnection::spop()` method ([#30546](https://github.com/laravel/framework/pull/30546))
<ide> - Fixed breaking compatibility with multi-schema postgres ([#30562](https://github.com/laravel/framework/pull/30562), [6460d2b](https://github.com/laravel/framework/commit/6460d2b1bd89f470a76f5c2c3bddd390fe430e0f))
<del>- Fixed `Model::isDirty()` with `colelction` \ `object` casts ([#30565](https://github.com/laravel/framework/pull/30565))
<add>- Fixed `Model::isDirty()` with `collection` / `object` casts ([#30565](https://github.com/laravel/framework/pull/30565))
<ide> - Fixed `bcc` in `MailgunTransport::send()` ([#30569](https://github.com/laravel/framework/pull/30569))
<ide>
<ide> ### Changed
<ide>
<ide> ### Added
<ide> - Added ability to override `setUserPassword` on password reset ([#30218](https://github.com/laravel/framework/pull/30218))
<del>- Added firing `deleting` \ `deleted` events in `MorphPivot` ([#30229](https://github.com/laravel/framework/pull/30229))
<add>- Added firing `deleting` / `deleted` events in `MorphPivot` ([#30229](https://github.com/laravel/framework/pull/30229))
<ide> - Added locking mechanism for the array cache driver ([#30253](https://github.com/laravel/framework/pull/30253))
<ide> - Added `dropAllViews` functionality to the SQL Server builder ([#30222](https://github.com/laravel/framework/pull/30222))
<ide>
<ide>
<ide> ### Added
<ide> - Added `TestResponse::assertJsonPath()` method ([#29957](https://github.com/laravel/framework/pull/29957))
<del>- Added `hasMacro` \ `getGlobalMacro` \ `hasGlobalMacro` methods to `Eloquent Builder` ([#30008](https://github.com/laravel/framework/pull/30008))
<add>- Added `hasMacro` / `getGlobalMacro` / `hasGlobalMacro` methods to `Eloquent Builder` ([#30008](https://github.com/laravel/framework/pull/30008))
<ide> - Added `Illuminate\Database\Eloquent\Relations\BelongsToMany::getPivotColumns()` method ([#30049](https://github.com/laravel/framework/pull/30049))
<del>- Added `ScheduledTaskFinished` \ `ScheduledTaskStarting` events to signal when scheduled task runs ([#29888](https://github.com/laravel/framework/pull/29888))
<del>- Allowing adding command arguments and options with `InputArgument` \ `InputOption` objects ([#29987](https://github.com/laravel/framework/pull/29987))
<add>- Added `ScheduledTaskFinished` / `ScheduledTaskStarting` events to signal when scheduled task runs ([#29888](https://github.com/laravel/framework/pull/29888))
<add>- Allowing adding command arguments and options with `InputArgument` / `InputOption` objects ([#29987](https://github.com/laravel/framework/pull/29987))
<ide>
<ide> ### Fixed
<ide> - Fixed `__()` with `null` parameter ([#29967](https://github.com/laravel/framework/pull/29967))
<ide> - Fixed adding `NotFoundHttpException` to "allowed" exceptions in tests ([#29975](https://github.com/laravel/framework/pull/29975))
<ide>
<ide> ### Changed
<del>- Make it possible to disable encryption via `0`/`false` ([#29985](https://github.com/laravel/framework/pull/29985))
<add>- Make it possible to disable encryption via `0` / `false` ([#29985](https://github.com/laravel/framework/pull/29985))
<ide> - Allowed a symfony file instance in validate dimensions ([#30009](https://github.com/laravel/framework/pull/30009))
<ide> - Create storage fakes with custom configuration ([#29999](https://github.com/laravel/framework/pull/29999))
<ide> - Set locale in `PendingMail` only if locale present conditionally ([dd1e0a6](https://github.com/laravel/framework/commit/dd1e0a604713ddae21e6a893e4f605a6777300e8)) | 1 |
Ruby | Ruby | handle nil @version | c32865011d1536eb6f7e626a3cb389d35beda48a | <ide><path>Library/Homebrew/requirements/xcode_requirement.rb
<ide> def message
<ide> A full installation of Xcode.app#{version} is required to compile this software.
<ide> Installing just the Command Line Tools is not sufficient.
<ide> EOS
<del> if Version.new(MacOS::Xcode.latest_version) < Version.new(@version)
<add> if @version && Version.new(MacOS::Xcode.latest_version) < Version.new(@version)
<ide> message + <<~EOS
<ide> Xcode#{version} cannot be installed on macOS #{MacOS.version}.
<ide> You must upgrade your version of macOS. | 1 |
Javascript | Javascript | add process.hrtime.bigint benchmark | 69906fbc52b6a89d0a571c70b98701e51900a884 | <ide><path>benchmark/process/bench-hrtime.js
<ide> const assert = require('assert');
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> n: [1e6],
<del> type: ['raw', 'diff']
<add> type: ['raw', 'diff', 'bigint']
<ide> });
<ide>
<ide> function main({ n, type }) {
<ide> const hrtime = process.hrtime;
<del> var noDead = hrtime();
<add> var noDead = type === 'bigint' ? hrtime.bigint() : hrtime();
<ide> var i;
<ide>
<del> if (type === 'raw') {
<del> bench.start();
<del> for (i = 0; i < n; i++) {
<del> noDead = hrtime();
<del> }
<del> bench.end(n);
<del> } else {
<del> bench.start();
<del> for (i = 0; i < n; i++) {
<del> noDead = hrtime(noDead);
<del> }
<del> bench.end(n);
<add> switch (type) {
<add> case 'raw':
<add> bench.start();
<add> for (i = 0; i < n; i++) {
<add> noDead = hrtime();
<add> }
<add> bench.end(n);
<add> break;
<add> case 'diff':
<add> bench.start();
<add> for (i = 0; i < n; i++) {
<add> noDead = hrtime(noDead);
<add> }
<add> bench.end(n);
<add> break;
<add> case 'bigint':
<add> bench.start();
<add> for (i = 0; i < n; i++) {
<add> noDead = hrtime.bigint();
<add> }
<add> bench.end(n);
<add> break;
<ide> }
<ide>
<del> assert.ok(Array.isArray(noDead));
<add> // eslint-disable-next-line valid-typeof
<add> assert.ok(Array.isArray(noDead) || typeof noDead === 'bigint');
<ide> }
<ide><path>lib/internal/process/per_thread.js
<ide> function wrapProcessMethods(binding) {
<ide> ];
<ide> }
<ide>
<del> // Use a BigUint64Array in the closure because V8 does not have an API for
<del> // creating a BigInt out of a uint64_t yet.
<add> // Use a BigUint64Array in the closure because this is actually a bit
<add> // faster than simply returning a BigInt from C++ in V8 7.1.
<ide> const hrBigintValues = new BigUint64Array(1);
<ide> function hrtimeBigInt() {
<ide> _hrtimeBigInt(hrBigintValues); | 2 |
Python | Python | use execute instead of check_output | fff3792e7187eb2bebf8ee1bada93d2fb29802c3 | <ide><path>tools/test.py
<ide> def ArgsToTestPaths(test_root, args, suites):
<ide> return paths
<ide>
<ide>
<del>def get_env_type(vm, options_type):
<add>def get_env_type(vm, options_type, context):
<ide> if options_type is not None:
<ide> env_type = options_type
<ide> else:
<del> if "fips" in subprocess.check_output([vm, "-p",
<del> "process.versions.openssl"]):
<del> env_type = "fips"
<del> # NOTE(nikhil): "simple" is the default value for var 'env_type' and should
<del> # be set last if no if/elif matches. If you plan to add more values, use
<del> # 'elif' above.
<del> else:
<del> env_type = "simple"
<add> # 'simple' is the default value for 'env_type'.
<add> env_type = 'simple'
<add> ssl_ver = Execute([vm, '-p', 'process.versions.openssl'], context).stdout
<add> if 'fips' in ssl_ver:
<add> env_type = 'fips'
<ide> return env_type
<ide>
<ide>
<ide> def Main():
<ide> 'mode': mode,
<ide> 'system': utils.GuessOS(),
<ide> 'arch': vmArch,
<del> 'type': get_env_type(vm, options.type),
<add> 'type': get_env_type(vm, options.type, context),
<ide> }
<ide> test_list = root.ListTests([], path, context, arch, mode)
<ide> unclassified_tests += test_list | 1 |
Java | Java | remove dead code | 7f8ede14077189114c5040da9192b31ff01475db | <ide><path>org.springframework.beans/src/test/java/org/springframework/beans/factory/config/PropertyResourceConfigurerTests.java
<ide> public void testPreferencesPlaceholderConfigurerWithPathInPlaceholder() {
<ide> Preferences.systemRoot().node("mySystemPath/mypath").remove("myName");
<ide> }
<ide>
<del> /* TODO SPR-7508: uncomment after PropertySourcesPlaceholderConfigurer implementation
<del> @Test
<del> public void testPreferencesPlaceholderConfigurerWithCustomPropertiesInEnvironment() {
<del> factory.registerBeanDefinition("tb",
<del> genericBeanDefinition(TestBean.class)
<del> .addPropertyValue("name", "${mypath/myName}")
<del> .addPropertyValue("age", "${myAge}")
<del> .addPropertyValue("touchy", "${myotherpath/myTouchy}")
<del> .getBeanDefinition());
<del>
<del> Properties props = new Properties();
<del> props.put("myAge", "99");
<del> factory.getEnvironment().getPropertySources().add(new PropertiesPropertySource("localProps", props));
<del>
<del> PreferencesPlaceholderConfigurer ppc = new PreferencesPlaceholderConfigurer();
<del> ppc.setSystemTreePath("mySystemPath");
<del> ppc.setUserTreePath("myUserPath");
<del> Preferences.systemRoot().node("mySystemPath").node("mypath").put("myName", "myNameValue");
<del> Preferences.systemRoot().node("mySystemPath/myotherpath").put("myTouchy", "myTouchyValue");
<del> Preferences.userRoot().node("myUserPath/myotherpath").put("myTouchy", "myOtherTouchyValue");
<del> ppc.afterPropertiesSet();
<del> ppc.postProcessBeanFactory(factory);
<del>
<del> TestBean tb = (TestBean) factory.getBean("tb");
<del> assertEquals("myNameValue", tb.getName());
<del> assertEquals(99, tb.getAge());
<del> assertEquals("myOtherTouchyValue", tb.getTouchy());
<del> Preferences.userRoot().node("myUserPath/myotherpath").remove("myTouchy");
<del> Preferences.systemRoot().node("mySystemPath/myotherpath").remove("myTouchy");
<del> Preferences.systemRoot().node("mySystemPath/mypath").remove("myName");
<del> }
<del> */
<del>
<ide>
<ide> private static class ConvertingOverrideConfigurer extends PropertyOverrideConfigurer {
<ide>
<ide><path>org.springframework.context/src/main/java/org/springframework/context/support/AbstractApplicationContext.java
<ide> private void invokeBeanFactoryPostProcessors(
<ide> }
<ide> }
<ide>
<del> /**
<del> * Common location for subclasses to call and receive registration of standard
<del> * {@link BeanFactoryPostProcessor} bean definitions.
<del> *
<del> * @param registry subclass BeanDefinitionRegistry
<del> */
<del> protected void registerStandardBeanFactoryPostProcessors(BeanDefinitionRegistry registry) {
<del> }
<del>
<ide> /**
<ide> * Instantiate and invoke all registered BeanPostProcessor beans,
<ide> * respecting explicit order if given.
<ide><path>org.springframework.context/src/main/java/org/springframework/context/support/AbstractRefreshableApplicationContext.java
<ide> protected void customizeBeanFactory(DefaultListableBeanFactory beanFactory) {
<ide> }
<ide> beanFactory.setParameterNameDiscoverer(new LocalVariableTableParameterNameDiscoverer());
<ide> beanFactory.setAutowireCandidateResolver(new QualifierAnnotationAutowireCandidateResolver());
<del> registerStandardBeanFactoryPostProcessors(beanFactory);
<ide> }
<ide>
<ide> /**
<ide><path>org.springframework.context/src/main/java/org/springframework/context/support/GenericApplicationContext.java
<ide> public GenericApplicationContext() {
<ide> this.beanFactory = new DefaultListableBeanFactory();
<ide> this.beanFactory.setParameterNameDiscoverer(new LocalVariableTableParameterNameDiscoverer());
<ide> this.beanFactory.setAutowireCandidateResolver(new QualifierAnnotationAutowireCandidateResolver());
<del> registerStandardBeanFactoryPostProcessors(this.beanFactory);
<ide> }
<ide>
<ide> /** | 4 |
Python | Python | add test of simple textcat workflow | 1831dbd065a8776a77d18e10b44f84c99bca4c75 | <ide><path>spacy/tests/textcat/test_textcat.py
<add>from __future__ import unicode_literals
<add>from ...language import Language
<add>
<add>def test_simple_train():
<add> nlp = Language()
<add>
<add> nlp.add_pipe(nlp.create_pipe('textcat'))
<add> nlp.get_pipe('textcat').add_label('is_good')
<add>
<add> nlp.begin_training()
<add>
<add> for i in range(5):
<add> for text, answer in [('aaaa', 1.), ('bbbb', 0), ('aa', 1.),
<add> ('bbbbbbbbb', 0.), ('aaaaaa', 1)]:
<add> nlp.update([text], [{'cats': {'answer': answer}}])
<add> doc = nlp(u'aaa')
<add> assert 'is_good' in doc.cats
<add> assert doc.cats['is_good'] >= 0.5
<add> | 1 |
Javascript | Javascript | add blink app in showcase | 977acb9b681b28457e90686deef79b950936dc70 | <ide><path>website/src/react-native/showcase.js
<ide> var featured = [
<ide> linkPlayStore: 'https://play.google.com/store/apps/details?id=com.bloomberg.android.plus&hl=en',
<ide> infoLink: 'https://www.techatbloomberg.com/blog/bloomberg-used-react-native-develop-new-consumer-app/',
<ide> infoTitle: 'How Bloomberg Used React Native to Develop its new Consumer App',
<del> }
<add> },
<add> {
<add> name: 'Blink',
<add> icon: 'https://lh3.googleusercontent.com/QaId7rFtOjAT-2tHVkKB4lebX_w4ujWiO7ZIDe3Hd99TfBmPmiZySbLbVJV65qs0ViM=w300-rw',
<add> linkPlayStore: 'https://play.google.com/store/apps/details?id=com.witapp',
<add> infoLink: 'https://hashnode.com/post/what-we-learned-after-using-react-native-for-a-year-civdr8zv6058l3853wqud7hqp',
<add> infoTitle: 'What we learned after using React Native for a year',
<add> }
<ide> ];
<ide>
<ide> /* | 1 |
Ruby | Ruby | use similar logic to `brew fetch` | 79cb9e051bf4788a4ffb0e59523d342212715cbb | <ide><path>Library/Homebrew/dev-cmd/bump-cask-pr.rb
<ide> # frozen_string_literal: true
<ide>
<ide> require "cask"
<add>require "cask/download"
<ide> require "cli/parser"
<ide> require "utils/tar"
<ide>
<ide> def bump_cask_pr
<ide> silent: true)
<ide>
<ide> tmp_cask = Cask::CaskLoader.load(tmp_contents)
<del> tmp_config = cask.config
<del> tmp_url = tmp_cask.url.to_s
<add> tmp_config = tmp_cask.config
<ide>
<ide> if old_hash != :no_check
<del> new_hash = fetch_resource(cask, new_version, tmp_url) if new_hash.nil?
<add> new_hash = fetch_cask(tmp_contents)[1] if new_hash.nil?
<ide>
<ide> if tmp_contents.include?("Hardware::CPU.intel?")
<ide> other_intel = !Hardware::CPU.intel?
<ide> other_contents = tmp_contents.gsub("Hardware::CPU.intel?", other_intel.to_s)
<del> replacement_pairs << fetch_cask(other_contents, new_version)
<add> replacement_pairs << fetch_cask(other_contents)
<ide> end
<ide> end
<ide>
<ide> cask.languages.each do |language|
<ide> next if language == cask.language
<ide>
<ide> lang_config = tmp_config.merge(Cask::Config.new(explicit: { languages: [language] }))
<del> replacement_pairs << fetch_cask(tmp_contents, new_version, config: lang_config)
<add> replacement_pairs << fetch_cask(tmp_contents, config: lang_config)
<ide> end
<ide> end
<ide> end
<ide> def bump_cask_pr
<ide> GitHub.create_bump_pr(pr_info, args: args)
<ide> end
<ide>
<del> def fetch_resource(cask, version, url, **specs)
<del> resource = Resource.new
<del> resource.url(url, specs)
<del> resource.owner = Resource.new(cask.token)
<del> resource.version = version
<del>
<del> resource_path = resource.fetch
<del> Utils::Tar.validate_file(resource_path)
<del> resource_path.sha256
<del> end
<del>
<del> def fetch_cask(contents, version, config: nil)
<add> def fetch_cask(contents, config: nil)
<ide> cask = Cask::CaskLoader.load(contents)
<ide> cask.config = config if config.present?
<del> url = cask.url.to_s
<ide> old_hash = cask.sha256.to_s
<del> new_hash = fetch_resource(cask, version, url)
<add>
<add> cask_download = Cask::Download.new(cask, quarantine: true)
<add> download = cask_download.fetch(verify_download_integrity: false)
<add> Utils::Tar.validate_file(download)
<add> new_hash = download.sha256
<add>
<ide> [old_hash, new_hash]
<ide> end
<ide> | 1 |
PHP | PHP | add includewhen directive | 5fe39aa3bac01f49ffaf5d44bf03688690106dbe | <ide><path>src/Illuminate/View/Compilers/Concerns/CompilesIncludes.php
<ide> protected function compileIncludeIf($expression)
<ide>
<ide> return "<?php if (\$__env->exists({$expression})) echo \$__env->make({$expression}, array_except(get_defined_vars(), array('__data', '__path')))->render(); ?>";
<ide> }
<add>
<add> /**
<add> * Compile the include-when statements into valid PHP.
<add> *
<add> * @param string $expression
<add> * @return string
<add> */
<add> protected function compileIncludeWhen($expression)
<add> {
<add> $expression = $this->stripParentheses($expression);
<add>
<add> preg_match('/ *(.*), *(.*)$/is', $expression, $matches);
<add>
<add> $when = trim($matches[1]);
<add>
<add> $arguments = trim($matches[2]);
<add>
<add> return "<?php if ({$when}) echo \$__env->make({$arguments}, array_except(get_defined_vars(), array('__data', '__path')))->render(); ?>";
<add> }
<ide> }
<ide><path>tests/View/Blade/BladeIncludeWhenTest.php
<add><?php
<add>
<add>namespace Illuminate\Tests\Blade;
<add>
<add>use Mockery as m;
<add>use PHPUnit\Framework\TestCase;
<add>use Illuminate\View\Compilers\BladeCompiler;
<add>
<add>class BladeIncludeWhenTest extends TestCase
<add>{
<add> public function tearDown()
<add> {
<add> m::close();
<add> }
<add>
<add> public function testIncludeWhensAreCompiled()
<add> {
<add> $compiler = new BladeCompiler($this->getFiles(), __DIR__);
<add> $this->assertEquals('<?php if (true) echo $__env->make(\'foo\', array_except(get_defined_vars(), array(\'__data\', \'__path\')))->render(); ?>', $compiler->compileString('@includeWhen(true, \'foo\')'));
<add> $this->assertEquals('<?php if (true) echo $__env->make(name(foo), array_except(get_defined_vars(), array(\'__data\', \'__path\')))->render(); ?>', $compiler->compileString('@includeWhen(true, name(foo))'));
<add> }
<add>
<add> protected function getFiles()
<add> {
<add> return m::mock('Illuminate\Filesystem\Filesystem');
<add> }
<add>} | 2 |
Javascript | Javascript | fix touchables inspector | 5f8d7ac61817f7cb9fa08a39c513a1114f69746e | <ide><path>Libraries/Components/Touchable/TouchableWithoutFeedback.js
<ide> const TouchableWithoutFeedback = React.createClass({
<ide> ((child._owner && child._owner.getName && child._owner.getName()) || '<unknown>')
<ide> );
<ide> if (Touchable.TOUCH_TARGET_DEBUG && child.type && child.type.displayName === 'View') {
<del> if (!Array.isArray(children)) {
<del> children = [children];
<del> }
<add> children = React.Children.toArray(children);
<ide> children.push(Touchable.renderDebugView({color: 'red', hitSlop: this.props.hitSlop}));
<ide> }
<ide> const style = (Touchable.TOUCH_TARGET_DEBUG && child.type && child.type.displayName === 'Text') ? | 1 |
PHP | PHP | add test skeleton for orm cache shell | 041885dcaa3358c4357603ff85ce0f839e880cb1 | <ide><path>tests/TestCase/Console/Command/OrmCacheShellTest.php
<add><?php
<add>/**
<add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
<add> * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> *
<add> * Licensed under The MIT License
<add> * For full copyright and license information, please see the LICENSE.txt
<add> * Redistributions of files must retain the above copyright notice.
<add> *
<add> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> * @link http://cakephp.org CakePHP(tm) Project
<add> * @since 3.0.0
<add> * @license http://www.opensource.org/licenses/mit-license.php MIT License
<add> */
<add>namespace Cake\Test\TestCase\Console\Command;
<add>
<add>use Cake\Console\Command\OrmCacheShell;
<add>use Cake\Cache\Cache;
<add>use Cake\TestSuite\TestCase;
<add>
<add>/**
<add> * OrmCacheShell test.
<add> */
<add>class OrmCacheShellTest extends TestCase {
<add>
<add>/**
<add> * setup method
<add> *
<add> * @return void
<add> */
<add> public function setUp() {
<add> parent::setUp();
<add> $this->io = $this->getMock('Cake\Console\ConsoleIo');
<add> $this->Shell = new OrmCacheShell($this->io);
<add> }
<add>
<add>/**
<add> * Test build() with no args.
<add> *
<add> * @return void
<add> */
<add> public function testBuildNoArgs() {
<add> $this->markTestIncomplete();
<add> }
<add>
<add>/**
<add> * Test build() with one arg.
<add> *
<add> * @return void
<add> */
<add> public function testBuildNamedModel() {
<add> $this->markTestIncomplete();
<add> }
<add>
<add>/**
<add> * Test build() with a non-existing connection name.
<add> *
<add> * @return void
<add> */
<add> public function testBuildInvalidConnection() {
<add> $this->markTestIncomplete();
<add> }
<add>
<add>/**
<add> * Test clear() with no args.
<add> *
<add> * @return void
<add> */
<add> public function testClearInvalidConnection() {
<add> $this->markTestIncomplete();
<add> }
<add>
<add>/**
<add> * Test clear() with once arg.
<add> *
<add> * @return void
<add> */
<add> public function testClearNoArgs() {
<add> $this->markTestIncomplete();
<add> }
<add>
<add>/**
<add> * Test clear() with a model name.
<add> *
<add> * @return void
<add> */
<add> public function testClearNamedModel() {
<add> $this->markTestIncomplete();
<add> }
<add>
<add>} | 1 |
PHP | PHP | fix assertpushedon docblock | 2574aca756138811b331060bb0d572592971d764 | <ide><path>src/Illuminate/Support/Facades/Queue.php
<ide> * @method static void assertNotPushed(string|\Closure $job, callable $callback = null)
<ide> * @method static void assertNothingPushed()
<ide> * @method static void assertPushed(string|\Closure $job, callable|int $callback = null)
<del> * @method static void assertPushedOn(string $queue, string|\Closure $job, callable|int $callback = null)
<add> * @method static void assertPushedOn(string $queue, string|\Closure $job, callable $callback = null)
<ide> * @method static void assertPushedWithChain(string $job, array $expectedChain = [], callable $callback = null)
<ide> *
<ide> * @see \Illuminate\Queue\QueueManager | 1 |
Text | Text | update example explanation in detail | 158b27c7150af4bfdb4d683e0d3d576f74d6539a | <ide><path>guide/english/javascript/loops/for-loop/index.md
<ide> any of these three expressions or the statement can be omitted. For loops are co
<ide> When indexing over an array many times it is easy to exceed the bounds of the array (ex. try to reference the 4th element of a 3 element array).
<ide>
<ide> ```javascript
<del> // This will cause an error.
<ide> // The bounds of the array will be exceeded.
<add> // Here the value of arr.length is 3
<add> // Since variable 'i' is initialised to 0, it will iterate the array 'arr' 4 (i.e. from i = 0 to i = 3) times.
<add> // Hence 4th iteration will output undefined since there are only three elements in the array.
<ide> var arr = [ 1, 2, 3 ];
<ide> for (var i = 0; i <= arr.length; i++) {
<ide> console.log(arr[i]); | 1 |
Ruby | Ruby | use new skool ruby instead of ruby classicβ’ | b7934afe326f8bba9cdcacdfac93062dff155efe | <ide><path>activerecord/lib/active_record/fixtures.rb
<ide> def read_fixture_files
<ide> end
<ide>
<ide> def read_yaml_fixture_files
<del> yaml_string = ""
<del> Dir["#{@fixture_path}/**/*.yml"].select { |f| test(?f, f) }.each do |subfixture_path|
<del> yaml_string << IO.read(subfixture_path)
<del> end
<del> yaml_string << IO.read(yaml_file_path)
<add> yaml_string = (Dir["#{@fixture_path}/**/*.yml"].select { |f|
<add> File.file?(f)
<add> } + [yaml_file_path]).map { |file_path| IO.read(file_path) }.join
<ide>
<ide> if yaml = parse_yaml_string(yaml_string)
<ide> # If the file is an ordered map, extract its children. | 1 |
PHP | PHP | fix double encoding in postlink() urls | d730ea59db8d6683af10cbd0c2c7fa5c1f5045fe | <ide><path>src/View/Helper/FormHelper.php
<ide> public function postLink($title, $url = null, array $options = [])
<ide>
<ide> $formName = str_replace('.', '', uniqid('post_', true));
<ide> $formOptions = [
<del> 'action' => $this->Url->build($url),
<ide> 'name' => $formName,
<ide> 'style' => 'display:none;',
<ide> 'method' => 'post',
<ide> public function postLink($title, $url = null, array $options = [])
<ide> $formOptions['target'] = $options['target'];
<ide> unset($options['target']);
<ide> }
<add> $templater = $this->templater();
<ide>
<ide> $this->_lastAction($url);
<add> $action = $templater->formatAttributes([
<add> 'action' => $this->Url->build($url),
<add> 'escape' => false
<add> ]);
<ide>
<del> $out = $this->formatTemplate('formStart', [
<del> 'attrs' => $this->templater()->formatAttributes($formOptions)
<add> $out = $templater->format('formStart', [
<add> 'attrs' => $templater->formatAttributes($formOptions) . $action
<ide> ]);
<ide> $out .= $this->hidden('_method', ['value' => $requestMethod]);
<ide> $out .= $this->_csrfField();
<ide> public function postLink($title, $url = null, array $options = [])
<ide> unset($options['data']);
<ide> }
<ide> $out .= $this->secure($fields);
<del> $out .= $this->formatTemplate('formEnd', []);
<add> $out .= $templater->format('formEnd', []);
<ide>
<ide> if ($options['block']) {
<ide> if ($options['block'] === true) {
<ide><path>tests/TestCase/View/Helper/FormHelperTest.php
<ide> public function testPostLink()
<ide> $this->assertHtml($expected, $result);
<ide> }
<ide>
<add> /**
<add> * test postLink() with query string args.
<add> *
<add> * @return void
<add> */
<add> public function testPostLinkWithQuery()
<add> {
<add> $result = $this->Form->postLink(
<add> 'Delete',
<add> ['controller' => 'posts', 'action' => 'delete', 1, '?' => ['a' => 'b', 'c' => 'd']
<add> ]);
<add> $expected = [
<add> 'form' => [
<add> 'method' => 'post', 'action' => '/posts/delete/1?a=b&c=d',
<add> 'name' => 'preg:/post_\w+/', 'style' => 'display:none;'
<add> ],
<add> 'input' => ['type' => 'hidden', 'name' => '_method', 'value' => 'POST'],
<add> '/form',
<add> 'a' => ['href' => '#', 'onclick' => 'preg:/document\.post_\w+\.submit\(\); event\.returnValue = false; return false;/'],
<add> 'Delete',
<add> '/a'
<add> ];
<add> $this->assertHtml($expected, $result);
<add> }
<add>
<ide> /**
<ide> * Test postLink with additional data.
<ide> * | 2 |
Text | Text | clarify symlink resolution for __filename | bb04a8bbf2dc97ee3e75f985de1b8b6ff4f0619c | <ide><path>doc/api/modules.md
<ide> added: v0.0.1
<ide>
<ide> * {string}
<ide>
<del>The file name of the current module. This is the resolved absolute path of the
<del>current module file.
<add>The file name of the current module. This is the current module file's absolute
<add>path with symlinks resolved.
<ide>
<ide> For a main program this is not necessarily the same as the file name used in the
<ide> command line. | 1 |
Ruby | Ruby | cure some ills discovered with the refactoring | 2f7c5f84e4834e49001ed565cfe45f14e120613f | <ide><path>actionpack/lib/action_controller/base.rb
<ide> def action_name
<ide>
<ide> # A unified replacement for the individual renders (work-in-progress).
<ide> def render(options = {}, deprecated_status = nil)
<add> # puts "Rendering: #{options.inspect}"
<ide> raise DoubleRenderError, "Can only render or redirect once per action" if performed?
<ide>
<ide> # Backwards compatibility
<ide> def render(options = {}, deprecated_status = nil)
<ide> end
<ide>
<ide> # Returns the result of the render as a string.
<del> def render_to_string(options) #:doc:
<add> def render_to_string(options = {}) #:doc:
<ide> result = render(options)
<ide> erase_render_results
<ide> return result
<ide> def redirect_to(options = {}, *parameters_for_method_reference) #:doc:
<ide> case options
<ide> when %r{^\w+://.*}
<ide> raise DoubleRenderError, "Can only render or redirect once per action" if performed?
<del> logger.info("Redirected to #{url}") unless logger.nil?
<add> logger.info("Redirected to #{options}") unless logger.nil?
<ide> @response.redirect(options)
<ide> @performed_redirect = true
<ide>
<ide><path>actionpack/lib/action_controller/deprecated_renders_and_redirects.rb
<ide> def render_template(template, status = nil, type = "rhtml") #:doc:
<ide> # considerably faster than rendering through the template engine.
<ide> # Use block for response body if provided (useful for deferred rendering or streaming output).
<ide> def render_text(text = nil, status = nil) #:doc:
<del> render(:text => text, :status => status) { yield }
<add> render :text => text, :status => status
<ide> end
<ide>
<ide> # Renders an empty response that can be used when the request is only interested in triggering an effect. Do note that good
<ide> def render_partial_collection(partial_name, collection, partial_spacer_template
<ide> render :partial => partial_name, :collection => collection, :spacer_template => partial_spacer_template, :locals => local_assigns
<ide> end
<ide>
<add> def render_with_layout(template_name = default_template_name, status = nil, layout = nil) #:nodoc:
<add> render :template => template_name, :status => status, :layout => layout
<add> end
<add>
<add> def render_without_layout(template_name = default_template_name, status = nil) #:nodoc:
<add> render :template => template_name, :status => status, :layout => false
<add> end
<add>
<ide>
<ide> # Deprecated in favor of calling redirect_to directly with the path.
<ide> def redirect_to_path(path) #:doc:
<ide><path>actionpack/lib/action_controller/layout.rb
<ide> module Layout #:nodoc:
<ide> def self.append_features(base)
<ide> super
<ide> base.class_eval do
<del> alias_method :render_without_layout, :render
<del> alias_method :render, :render_with_layout
<add> alias_method :render_with_no_layout, :render
<add> alias_method :render, :render_with_a_layout
<ide>
<ide> class << self
<ide> alias_method :inherited_without_layout, :inherited
<ide> def active_layout(passed_layout = nil)
<ide> active_layout.include?("/") ? active_layout : "layouts/#{active_layout}" if active_layout
<ide> end
<ide>
<del> def xrender_with_layout(template_name = default_template_name, status = nil, layout = nil) #:nodoc:
<del> if layout ||= active_layout and action_has_layout?
<del> add_variables_to_assigns
<del> logger.info("Rendering #{template_name} within #{layout}") unless logger.nil?
<del> @content_for_layout = @template.render_file(template_name, true)
<del> render_without_layout(layout, status)
<del> else
<del> render_without_layout(template_name, status)
<del> end
<del> end
<add> def render_with_a_layout(options = {}, deprecated_status = nil, deprecated_layout = nil) #:nodoc:
<add> if (layout = pick_layout(options, deprecated_layout)) && options.is_a?(Hash) && options[:text]
<add> logger.info("Rendering #{options[:template]} within #{layout}") unless logger.nil?
<ide>
<del> def render_with_layout(options = {}, deprecated_status = nil, deprecated_layout = nil)
<del> if (layout = active_layout_for_r(options, deprecated_layout)) && options[:text]
<del> add_variables_to_assigns
<del> logger.info("Rendering #{template_name} within #{layout}") unless logger.nil?
<add> @content_for_layout = render_with_no_layout(options.merge(:layout => false))
<add> erase_render_results
<ide>
<del> @content_for_layout = render_without_layout(options)
<ide> add_variables_to_assigns
<del>
<del> erase_render_results
<del> render_without_layout(options.merge({ :text => @template.render_file(layout, true), :status => options[:status] || deprecated_status }))
<add> render_with_no_layout(options.merge({ :text => @template.render_file(layout, true), :status => options[:status] || deprecated_status }))
<ide> else
<del> render_without_layout(options, deprecated_status)
<add> render_with_no_layout(options, deprecated_status)
<ide> end
<ide> end
<ide>
<ide> private
<del> def active_layout_for_r(options = {}, deprecated_layout = nil)
<add> def pick_layout(options = {}, deprecated_layout = nil)
<ide> return deprecated_layout unless deprecated_layout.nil?
<ide>
<del> case options[:layout]
<del> when FalseClass
<del> nil
<del> when NilClass
<del> active_layout if action_has_layout?
<del> else
<del> active_layout(options[:layout])
<add> if options.is_a?(Hash)
<add> case options[:layout]
<add> when FalseClass
<add> nil
<add> when NilClass
<add> active_layout if action_has_layout?
<add> else
<add> active_layout(options[:layout])
<add> end
<add> else
<add> (deprecated_layout || active_layout) if action_has_layout?
<ide> end
<ide> end
<ide>
<ide><path>actionpack/lib/action_controller/response.rb
<ide> def initialize
<ide> end
<ide>
<ide> def redirect(to_url, permanently = false)
<del> @headers["Status"] ||= "302 Found"
<add> @headers["Status"] = "302 Found" unless @headers["Status"] == "301 Moved Permanently"
<ide> @headers["location"] = to_url
<ide>
<ide> @body = "<html><body>You are being <a href=\"#{to_url}\">redirected</a>.</body></html>"
<ide><path>actionpack/lib/action_controller/verification.rb
<ide> def verify_action(options) #:nodoc:
<ide>
<ide> if prereqs_invalid
<ide> flash.update(options[:add_flash]) if options[:add_flash]
<del> redirect_to(options[:redirect_to])
<add> redirect_to(options[:redirect_to]) if options[:redirect_to]
<ide> return false
<ide> end
<ide> | 5 |
PHP | PHP | update return type for tap | f67efea418b541dd3e6e083335f40a848f8660f4 | <ide><path>src/Illuminate/Database/Concerns/BuildsQueries.php
<ide> protected function cursorPaginator($items, $perPage, $cursor, $options)
<ide> * Pass the query to a given callback.
<ide> *
<ide> * @param callable $callback
<del> * @return $this
<add> * @return $this|mixed
<ide> */
<ide> public function tap($callback)
<ide> { | 1 |
Javascript | Javascript | remove obsolete mocks | 99b81da6292fcec0d7a375e4f606f85214e20987 | <ide><path>Libraries/BatchedBridge/__mocks__/NativeModules.js
<del>/**
<del> * Copyright (c) 2013-present, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> *
<del> */
<del>'use strict';
<del>
<del>var NativeModules = {
<del> I18n: {
<del> translationsDictionary: JSON.stringify({
<del> 'Good bye, {name}!|Bye message': 'Β‘AdiΓ³s {name}!',
<del> }),
<del> },
<del> Timing: {
<del> createTimer: jest.fn(),
<del> deleteTimer: jest.fn(),
<del> },
<del> GraphPhotoUpload: {
<del> upload: jest.fn(),
<del> },
<del> FacebookSDK: {
<del> login: jest.fn(),
<del> logout: jest.fn(),
<del> queryGraphPath: jest.fn((path, method, params, callback) => callback()),
<del> },
<del> DataManager: {
<del> queryData: jest.fn(),
<del> },
<del> UIManager: {
<del> customBubblingEventTypes: {},
<del> customDirectEventTypes: {},
<del> Dimensions: {
<del> window: {
<del> width: 750,
<del> height: 1334,
<del> scale: 2,
<del> fontScale: 2,
<del> }
<del> },
<del> RCTModalFullscreenView: {
<del> Constants: {},
<del> },
<del> RCTScrollView: {
<del> Constants: {},
<del> },
<del> },
<del> AsyncLocalStorage: {
<del> getItem: jest.fn(),
<del> setItem: jest.fn(),
<del> removeItem: jest.fn(),
<del> clear: jest.fn(),
<del> },
<del> SourceCode: {
<del> scriptURL: null,
<del> },
<del> BuildInfo: {
<del> appVersion: '0',
<del> buildVersion: '0',
<del> },
<del> ModalFullscreenViewManager: {},
<del> AlertManager: {
<del> alertWithArgs: jest.fn(),
<del> },
<del> Clipboard: {
<del> setString: jest.fn(),
<del> },
<del> FbRelayNativeAdapter: {
<del> updateCLC: jest.fn(),
<del> },
<del>};
<del>
<del>module.exports = NativeModules;
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/InitializeJavaScriptAppEngine.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>// Noop
<del>
<del>// TODO #10932517: Move all initialization callers back into react-native
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/RCTEventEmitter.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>var RCTEventEmitter = {
<del> register: jest.fn(),
<del>};
<del>
<del>module.exports = RCTEventEmitter;
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/TextInputState.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>// Mock of the Native Hooks
<del>// TODO: Should this move into the components themselves? E.g. focusable
<del>
<del>var TextInputState = {};
<del>
<del>module.exports = TextInputState;
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/UIManager.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>// Mock of the Native Hooks
<del>
<del>var RCTUIManager = {
<del> createView: jest.fn(),
<del> setChildren: jest.fn(),
<del> manageChildren: jest.fn(),
<del> updateView: jest.fn(),
<del> removeSubviewsFromContainerWithID: jest.fn(),
<del> replaceExistingNonRootView: jest.fn(),
<del>};
<del>
<del>module.exports = RCTUIManager;
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/View.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>var createReactNativeComponentClass = require('createReactNativeComponentClass');
<del>
<del>var View = createReactNativeComponentClass({
<del> validAttributes: {},
<del> uiViewClassName: 'View',
<del>});
<del>
<del>module.exports = View;
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/deepDiffer.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>// TODO: Move deepDiffer into react
<del>
<del>var deepDiffer = function(one: any, two: any): boolean {
<del> if (one === two) {
<del> // Short circuit on identical object references instead of traversing them.
<del> return false;
<del> }
<del> if ((typeof one === 'function') && (typeof two === 'function')) {
<del> // We consider all functions equal
<del> return false;
<del> }
<del> if ((typeof one !== 'object') || (one === null)) {
<del> // Primitives can be directly compared
<del> return one !== two;
<del> }
<del> if ((typeof two !== 'object') || (two === null)) {
<del> // We know they are different because the previous case would have triggered
<del> // otherwise.
<del> return true;
<del> }
<del> if (one.constructor !== two.constructor) {
<del> return true;
<del> }
<del> if (Array.isArray(one)) {
<del> // We know two is also an array because the constructors are equal
<del> var len = one.length;
<del> if (two.length !== len) {
<del> return true;
<del> }
<del> for (var ii = 0; ii < len; ii++) {
<del> if (deepDiffer(one[ii], two[ii])) {
<del> return true;
<del> }
<del> }
<del> } else {
<del> for (var key in one) {
<del> if (deepDiffer(one[key], two[key])) {
<del> return true;
<del> }
<del> }
<del> for (var twoKey in two) {
<del> // The only case we haven't checked yet is keys that are in two but aren't
<del> // in one, which means they are different.
<del> if (one[twoKey] === undefined && two[twoKey] !== undefined) {
<del> return true;
<del> }
<del> }
<del> }
<del> return false;
<del>};
<del>
<del>module.exports = deepDiffer;
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/deepFreezeAndThrowOnMutationInDev.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>// TODO: move into react or fbjs
<del>
<del>var deepFreezeAndThrowOnMutationInDev = function() { };
<del>
<del>module.exports = deepFreezeAndThrowOnMutationInDev;
<ide><path>Libraries/Renderer/src/renderers/native/__mocks__/flattenStyle.js
<del>/**
<del> * Copyright 2013-2015, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> */
<del>
<del>'use strict';
<del>
<del>// TODO: Move flattenStyle into react
<del>
<del>var flattenStyle = function() { };
<del>
<del>module.exports = flattenStyle; | 9 |
Text | Text | add note in updating.md about fab datamodel change | 714a07542c2560b50d013d66f71ad9a209dd70b6 | <ide><path>UPDATING.md
<ide> executor = my_acme_company.executors.MyCustomExecutor
<ide>
<ide> The old configuration is still works but can be abandoned at any time.
<ide>
<add>#### Use `CustomSQLAInterface` instead of `SQLAInterface` for custom data models.
<add>
<add>From Airflow 2.0, if you want to define your own Flask App Builder data models you need to use CustomSQLAInterface
<add>instead of SQLAInterface.
<add>
<add>For Non-RBAC replace:
<add>
<add>```python
<add>from flask_appbuilder.models.sqla.interface import SQLAInterface
<add>
<add>datamodel = SQLAInterface(your_data_model)
<add>```
<add>
<add>with RBAC (in 1.10):
<add>
<add>```python
<add>from airflow.www_rbac.utils import CustomSQLAInterface
<add>
<add>datamodel = CustomSQLAInterface(your_data_model)
<add>```
<add>
<add>and in 2.0:
<add>
<add>```python
<add>from airflow.www.utils import CustomSQLAInterface
<add>
<add>datamodel = CustomSQLAInterface(your_data_model)
<add>```
<add>
<ide> #### Drop plugin support for stat_name_handler
<ide>
<ide> In previous version, you could use plugins mechanism to configure ``stat_name_handler``. You should now use the `stat_name_handler` | 1 |
PHP | PHP | add test for html rendering | 829a6e33ea2d7c5c607ee1bb6487c2f2212d1ddd | <ide><path>tests/TestCase/Error/ExceptionTrapTest.php
<ide> public function testRenderExceptionText()
<ide> $this->assertStringContainsString('ExceptionTrapTest', $out);
<ide> }
<ide>
<add> /**
<add> * Test integration with HTML exception rendering
<add> *
<add> * Run in a separate process because HTML output writes headers.
<add> *
<add> * @runInSeparateProcess
<add> */
<add> public function testRenderExceptionHtml()
<add> {
<add> $trap = new ExceptionTrap([
<add> 'exceptionRenderer' => ExceptionRenderer::class,
<add> ]);
<add> $error = new InvalidArgumentException('nope');
<add>
<add> ob_start();
<add> $trap->handleException($error);
<add> $out = ob_get_clean();
<add>
<add> $this->assertStringContainsString('<!DOCTYPE', $out);
<add> $this->assertStringContainsString('<html', $out);
<add> $this->assertStringContainsString('nope', $out);
<add> $this->assertStringContainsString('ExceptionTrapTest', $out);
<add> }
<add>
<ide> public function testLogException()
<ide> {
<ide> Log::setConfig('test_error', [ | 1 |
Text | Text | fix broken code blocks in readme.md | 8feede229cc801485fff1e4db28a432a2e9aebb4 | <ide><path>examples/pytorch/contrastive-image-text/README.md
<ide> wget http://images.cocodataset.org/annotations/annotations_trainval2017.zip
<ide> wget http://images.cocodataset.org/annotations/image_info_test2017.zip
<ide> cd ..
<ide> ```
<del>```suggestion
<ide>
<ide> Having downloaded COCO dataset manually you should be able to load with the `ydshieh/coc_dataset_script` dataset loading script:
<ide>
<ide> ```py
<ide> COCO_DIR = "data"
<ide> ds = datasets.load_dataset("ydshieh/coco_dataset_script", "2017", data_dir=COCO_DIR)
<add>```
<add>
<ide> ### Create a model from a vision encoder model and a text decoder model
<ide> Next, we create a [VisionTextDualEncoderModel](https://huggingface.co/docs/transformers/model_doc/vision-text-dual-encoder#visiontextdualencoder).
<ide> The `VisionTextDualEncoderModel` class let's you load any vision and text encoder model to create a dual encoder.
<ide> python examples/pytorch/contrastive-image-text/run_clip.py \
<ide> --learning_rate="5e-5" --warmup_steps="0" --weight_decay 0.1 \
<ide> --overwrite_output_dir \
<ide> --push_to_hub
<del>```
<ide>\ No newline at end of file
<add>``` | 1 |
Javascript | Javascript | start caching source maps | d6212cfc795d3848ff080cd89998ef678e83278b | <ide><path>static/index.js
<ide> window.onload = function() {
<ide> // This sets require.extensions['.coffee'].
<ide> require('coffee-cash').register(path.join(cacheDir, 'coffee'));
<ide>
<add> require('coffeestack').setCacheDirectory(path.join(cacheDir, 'coffee', 'source-maps'));
<add>
<ide> require('season').setCacheDir(path.join(cacheDir, 'cson'));
<ide>
<ide> // This redefines require.extensions['.js']. | 1 |
Javascript | Javascript | avoid expensive work for filtered cached assets | 3385d0d1981ffaf19fd486c7fff5805caf9448b4 | <ide><path>lib/stats/DefaultStatsFactoryPlugin.js
<ide> const { makePathsRelative, parseResource } = require("../util/identifier");
<ide> * @typedef {Object} SimpleExtractors
<ide> * @property {ExtractorsByOption<Compilation>} compilation
<ide> * @property {ExtractorsByOption<ExtendedAsset>} asset
<add> * @property {ExtractorsByOption<ExtendedAsset>} asset$visible
<ide> * @property {ExtractorsByOption<{ name: string, chunkGroup: ChunkGroup }>} chunkGroup
<ide> * @property {ExtractorsByOption<Module>} module
<ide> * @property {ExtractorsByOption<Module>} module$visible
<ide> const SIMPLE_EXTRACTORS = {
<ide> }
<ide> },
<ide> asset: {
<add> _: (object, asset, context, options, factory) => {
<add> const { compilation } = context;
<add> object.type = asset.type;
<add> object.name = asset.name;
<add> object.size = asset.source.size();
<add> object.emitted = compilation.emittedAssets.has(asset.name);
<add> object.comparedForEmit = compilation.comparedForEmitAssets.has(
<add> asset.name
<add> );
<add> const cached = !object.emitted && !object.comparedForEmit;
<add> object.cached = cached;
<add> object.info = asset.info;
<add> if (!cached || options.cachedAssets) {
<add> Object.assign(
<add> object,
<add> factory.create(`${context.type}$visible`, asset, context)
<add> );
<add> }
<add> }
<add> },
<add> asset$visible: {
<ide> _: (
<ide> object,
<ide> asset,
<ide> { compilation, compilationFileToChunks, compilationAuxiliaryFileToChunks }
<ide> ) => {
<del> object.type = asset.type;
<del> object.name = asset.name;
<del> object.size = asset.source.size();
<ide> const chunks = compilationFileToChunks.get(asset.name) || [];
<ide> const auxiliaryChunks =
<ide> compilationAuxiliaryFileToChunks.get(asset.name) || [];
<ide> const SIMPLE_EXTRACTORS = {
<ide> c => Array.from(c.idNameHints),
<ide> compareIds
<ide> );
<del> object.emitted = compilation.emittedAssets.has(asset.name);
<del> object.comparedForEmit = compilation.comparedForEmitAssets.has(
<del> asset.name
<del> );
<del> object.cached = !object.emitted && !object.comparedForEmit;
<del> object.info = asset.info;
<ide> object.filteredRelated = asset.related ? asset.related.length : undefined;
<ide> },
<ide> relatedAssets: (object, asset, context, options, factory) => {
<ide> const { type } = context;
<ide> object.related = factory.create(
<del> `${type}.related`,
<add> `${type.slice(0, -8)}.related`,
<ide> asset.related,
<ide> context
<ide> ); | 1 |
Text | Text | add 1.6.1 release info | ee1458fdba025e00840827a6a8e9c2ef1a7edd6b | <ide><path>CHANGELOG.md
<add><a name="1.6.1"></a>
<add># 1.6.1 promise-rectification (2016-12-23)
<add>
<add>
<add>## Bug Fixes
<add>- **$q:** Add traceback to unhandled promise rejections
<add> ([174cb4](https://github.com/angular/angular.js/commit/174cb4a8c81e25581da5b452c2bb43b0fa377a9b)
<add> [#14631](https://github.com/angular/angular.js/issues/14631))
<add>- **$$cookieReader:** correctly handle forbidden access to `document.cookie`
<add> ([33f769](https://github.com/angular/angular.js/commit/33f769b0a1214055c16fb59adad4897bf53d62bf)
<add> [#15523](https://github.com/angular/angular.js/issues/15523))
<add>- **ngOptions:** do not unset the `selected` property unless necessary
<add> ([bc4844](https://github.com/angular/angular.js/commit/bc4844d3b297d80aecef89aa1b32615024decedc)
<add> [#15477](https://github.com/angular/angular.js/issues/15477))
<add>- **ngModelOptions:** work correctly when on the template of `replace` directives
<add> ([5f8ed6](https://github.com/angular/angular.js/commit/5f8ed63f2ab02ffb9c21bf9c29d27c851d162e26)
<add> [#15492](https://github.com/angular/angular.js/issues/15492))
<add>- **ngClassOdd/Even:** add/remove the correct classes when expression/`$index` change simultaneously
<add> ([d52864](https://github.com/angular/angular.js/commit/d528644fe3e9ffd43999e7fc67806059f9e1083e))
<add>- **jqLite:** silently ignore `after()` if element has no parent
<add> ([3d68b9](https://github.com/angular/angular.js/commit/3d68b9502848ff6714ef89bfb95b8e70ae34eff6)
<add> [#15331](https://github.com/angular/angular.js/issues/15331),
<add> [#15475](https://github.com/angular/angular.js/issues/15475))
<add>- **$rootScope:** when adding/removing watchers during $digest
<add> ([163aca](https://github.com/angular/angular.js/commit/163aca336d7586a45255787af41b14b2a12361dd)
<add> [#15422](https://github.com/angular/angular.js/issues/15422))
<add>
<add>
<add>## Performance Improvements
<add>- **ngClass:** avoid unnecessary `.data()` accesses, deep-watching and copies
<add> ([1d3b65](https://github.com/angular/angular.js/commit/1d3b65adc2c22ff662159ef910089cf10d1edb7b)
<add> [#14404](https://github.com/angular/angular.js/issues/14404))
<add>
<add>
<add>
<ide> <a name="1.5.10"></a>
<ide> # 1.5.10 asynchronous-synchronization (2016-12-15)
<ide> | 1 |
Go | Go | add interface for untar | 1d4a82365bfa215767fc4a4cc7e4d5d10dbcd0e4 | <ide><path>pkg/archive/archive.go
<ide> type (
<ide> Compression Compression
<ide> NoLchown bool
<ide> }
<add>
<add> // Archiver allows the reuse of most utility functions of this package
<add> // with a pluggable Untar function.
<add> Archiver struct {
<add> Untar func(io.Reader, string, *TarOptions) error
<add> }
<ide> )
<ide>
<ide> var (
<ide> ErrNotImplemented = errors.New("Function not implemented")
<add> defaultArchiver = &Archiver{Untar}
<ide> )
<ide>
<ide> const (
<ide> loop:
<ide> return nil
<ide> }
<ide>
<del>// TarUntar is a convenience function which calls Tar and Untar, with
<del>// the output of one piped into the other. If either Tar or Untar fails,
<del>// TarUntar aborts and returns the error.
<del>func TarUntar(src string, dst string) error {
<add>func (archiver *Archiver) TarUntar(src, dst string) error {
<ide> log.Debugf("TarUntar(%s %s)", src, dst)
<ide> archive, err := TarWithOptions(src, &TarOptions{Compression: Uncompressed})
<ide> if err != nil {
<ide> return err
<ide> }
<ide> defer archive.Close()
<del> return Untar(archive, dst, nil)
<add> return archiver.Untar(archive, dst, nil)
<ide> }
<ide>
<del>// UntarPath is a convenience function which looks for an archive
<del>// at filesystem path `src`, and unpacks it at `dst`.
<del>func UntarPath(src, dst string) error {
<add>// TarUntar is a convenience function which calls Tar and Untar, with the output of one piped into the other.
<add>// If either Tar or Untar fails, TarUntar aborts and returns the error.
<add>func TarUntar(src, dst string) error {
<add> return defaultArchiver.TarUntar(src, dst)
<add>}
<add>
<add>func (archiver *Archiver) UntarPath(src, dst string) error {
<ide> archive, err := os.Open(src)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> defer archive.Close()
<del> if err := Untar(archive, dst, nil); err != nil {
<add> if err := archiver.Untar(archive, dst, nil); err != nil {
<ide> return err
<ide> }
<ide> return nil
<ide> }
<ide>
<del>// CopyWithTar creates a tar archive of filesystem path `src`, and
<del>// unpacks it at filesystem path `dst`.
<del>// The archive is streamed directly with fixed buffering and no
<del>// intermediary disk IO.
<del>//
<del>func CopyWithTar(src, dst string) error {
<add>// UntarPath is a convenience function which looks for an archive
<add>// at filesystem path `src`, and unpacks it at `dst`.
<add>func UntarPath(src, dst string) error {
<add> return defaultArchiver.UntarPath(src, dst)
<add>}
<add>
<add>func (archiver *Archiver) CopyWithTar(src, dst string) error {
<ide> srcSt, err := os.Stat(src)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> if !srcSt.IsDir() {
<del> return CopyFileWithTar(src, dst)
<add> return archiver.CopyFileWithTar(src, dst)
<ide> }
<ide> // Create dst, copy src's content into it
<ide> log.Debugf("Creating dest directory: %s", dst)
<ide> if err := os.MkdirAll(dst, 0755); err != nil && !os.IsExist(err) {
<ide> return err
<ide> }
<ide> log.Debugf("Calling TarUntar(%s, %s)", src, dst)
<del> return TarUntar(src, dst)
<add> return archiver.TarUntar(src, dst)
<ide> }
<ide>
<del>// CopyFileWithTar emulates the behavior of the 'cp' command-line
<del>// for a single file. It copies a regular file from path `src` to
<del>// path `dst`, and preserves all its metadata.
<del>//
<del>// If `dst` ends with a trailing slash '/', the final destination path
<del>// will be `dst/base(src)`.
<del>func CopyFileWithTar(src, dst string) (err error) {
<add>// CopyWithTar creates a tar archive of filesystem path `src`, and
<add>// unpacks it at filesystem path `dst`.
<add>// The archive is streamed directly with fixed buffering and no
<add>// intermediary disk IO.
<add>func CopyWithTar(src, dst string) error {
<add> return defaultArchiver.CopyWithTar(src, dst)
<add>}
<add>
<add>func (archiver *Archiver) CopyFileWithTar(src, dst string) (err error) {
<ide> log.Debugf("CopyFileWithTar(%s, %s)", src, dst)
<ide> srcSt, err := os.Stat(src)
<ide> if err != nil {
<ide> func CopyFileWithTar(src, dst string) (err error) {
<ide> err = er
<ide> }
<ide> }()
<del> return Untar(r, filepath.Dir(dst), nil)
<add> return archiver.Untar(r, filepath.Dir(dst), nil)
<add>}
<add>
<add>// CopyFileWithTar emulates the behavior of the 'cp' command-line
<add>// for a single file. It copies a regular file from path `src` to
<add>// path `dst`, and preserves all its metadata.
<add>//
<add>// If `dst` ends with a trailing slash '/', the final destination path
<add>// will be `dst/base(src)`.
<add>func CopyFileWithTar(src, dst string) (err error) {
<add> return defaultArchiver.CopyFileWithTar(src, dst)
<ide> }
<ide>
<ide> // CmdStream executes a command, and returns its stdout as a stream. | 1 |
Text | Text | correct external command docs | 007926d7a6aaec1a5ab8614a1f8d4ad70497908c | <ide><path>share/doc/homebrew/External-Commands.md
<ide> without modifying Homebrew's internals.
<ide> ## COMMAND TYPES
<ide> External commands come in two flavors: Ruby commands and shell scripts.
<ide>
<del>In both cases, the command file should be `chmod +x` (executable) and live somewhere in `$PATH`.
<del>
<del>Internally, Homebrew finds commands with `which`(1).
<add>In both cases, the command file should be executable (`chmod +x`) and live somewhere in `$PATH`.
<ide>
<ide> ### RUBY COMMANDS
<ide> An external command `extcmd` implemented as a Ruby command should be named `brew-extcmd.rb`. The command is executed by doing a `require` on the full pathname. As the command is `require`d, it has full access to the Homebrew "environment", i.e. all global variables and modules that any internal command has access to.
<ide> These commands have been contributed by Homebrew users but are not included in t
<ide>
<ide> ## SEE ALSO
<ide> Homebrew Docs: <https://github.com/Homebrew/homebrew/tree/master/share/doc/homebrew>
<del>
<del>`brew`(1), `which`(1), `grep`(1), [`ronn`(1)](http://rtomayko.github.com/ronn/) | 1 |
Text | Text | add tools doc | e63a693c6d4ae76cded0029bb7f60216c4b11da6 | <ide><path>project/TOOLS.md
<add># Tools
<add>
<add>This page describes the tools we use and infrastructure that is in place for
<add>the Docker project.
<add>
<add>### CI
<add>
<add>The Docker project uses [Jenkins](https://jenkins.dockerproject.com/) as our
<add>continuous integration server. Each Pull Request to Docker is tested by running the
<add>equivalent of `make all`. We chose Jenkins because we can host it ourselves and
<add>we run Docker in Docker to test.
<add>
<add>#### Leeroy
<add>
<add>Leeroy is a Go application which integrates Jenkins with
<add>GitHub pull requests. Leeroy uses
<add>[GitHub hooks](http://developer.github.com/v3/repos/hooks/)
<add>to listen for pull request notifications and starts jobs on your Jenkins
<add>server. Using the Jenkins [notification plugin][jnp], Leeroy updates the
<add>pull request using GitHub's
<add>[status API](http://developer.github.com/v3/repos/statuses/)
<add>with pending, success, failure, or error statuses.
<add>
<add>The leeroy repository is maintained at
<add>[github.com/jfrazelle/leeroy](https://github.com/jfrazelle/leeroy).
<add>
<add>#### GordonTheTurtle IRC Bot
<add>
<add>The GordonTheTurtle IRC Bot lives in the
<add>[#docker-maintainers](https://botbot.me/freenode/docker-maintainers/) channel
<add>on Freenode. He is built in Go and is based off the project at
<add>[github.com/fabioxgn/go-bot](https://github.com/fabioxgn/go-bot).
<add>
<add>His main command is `!rebuild`, which rebuilds a given Pull Request for a repository.
<add>This command works by integrating with Leroy. He has a few other commands too, such
<add>as `!gif` or `!godoc`, but we are always looking for more fun commands to add.
<add>
<add>The gordon-bot repository is maintained at
<add>[github.com/jfrazelle/gordon-bot](https://github.com/jfrazelle/gordon-bot)
<add>
<add>### NSQ
<add>
<add>We use [NSQ](https://github.com/bitly/nsq) for various aspects of the project
<add>infrastucture.
<add>
<add>#### Hooks
<add>
<add>The hooks project,
<add>[github.com/crosbymichael/hooks](https://github.com/crosbymichael/hooks),
<add>is a small Go application that manages web hooks from github, hub.docker.com, or
<add>other third party services.
<add>
<add>It can be used for listening to github webhooks & pushing them to a queue,
<add>archiving hooks to rethinkdb for processing, and broadcasting hooks to various
<add>jobs.
<add>
<add>#### Docker Master Binaries
<add>
<add>One of the things queued from the Hooks are the building of the Master
<add>Binaries. This happens on every push to the master branch of Docker. The
<add>repository for this is maintained at
<add>[github.com/jfrazelle/docker-bb](https://github.com/jfrazelle/docker-bb).
<add>
<add>#### Docker Master Docs
<add>
<add>The master build of the docs gets queued from the Hooks as well. They are built
<add>using [github.com/jfrazelle/nsqexec](https://github.com/jfrazelle/nsqexec).
<add>
<add>#### Patch Parser Bot
<add>
<add>The bot, also named GordonTheTurtle, that labels and comments on Pull Requests
<add>listens on Hooks as well. He is capable of knowing if a Pull Request needs to
<add>be signed, or gofmt'd, as well as rebased. The repository for this is maintained at
<add>[github.com/jfrazelle/gh-patch-parser](https://github.com/jfrazelle/gh-patch-parser). | 1 |
Ruby | Ruby | remove github warning | 34ae063c6734b8cab9562d44e771dfcf5ac4582a | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_urls
<ide> problem "Google Code homepage should end with a slash (url is #{f.homepage})."
<ide> end
<ide>
<del> if f.homepage =~ %r[^http://(.*)\.github\.com/]
<del> if $1 != 'github'
<del> problem "GitHub pages should use the github.io domain (url is #{f.homepage})"
<del> end
<del> end
<del>
<ide> urls = @specs.map(&:url)
<ide>
<ide> # Check GNU urls; doesn't apply to mirrors | 1 |
Ruby | Ruby | remove outdated comment | ed6894bf2386cf469018a5b32e60568c8df20e2d | <ide><path>activerecord/test/models/pirate.rb
<ide> class Pirate < ActiveRecord::Base
<ide> has_many :treasures, :as => :looter
<ide> has_many :treasure_estimates, :through => :treasures, :source => :price_estimates
<ide>
<del> # These both have :autosave enabled because accepts_nested_attributes_for is used on them.
<ide> has_one :ship
<ide> has_one :update_only_ship, :class_name => 'Ship'
<ide> has_one :non_validated_ship, :class_name => 'Ship' | 1 |
Javascript | Javascript | remove dead code | 8f1ee0bc6f90ff3417889bedfcb246e4faebe551 | <ide><path>lib/internal/errors.js
<ide> E('ERR_INVALID_ARG_VALUE', (name, value, reason = 'is invalid') => {
<ide> }, TypeError, RangeError);
<ide> E('ERR_INVALID_ARRAY_LENGTH',
<ide> (name, len, actual) => {
<del> internalAssert(typeof actual === 'number', 'actual must be of type number');
<ide> return `The array "${name}" (length ${actual}) must be of length ${len}.`;
<ide> }, TypeError);
<ide> E('ERR_INVALID_ASYNC_ID', 'Invalid %s value: %s', RangeError); | 1 |
Javascript | Javascript | pull request comments + eslint | 12129ac36685b74702b2820d0927c60e49e10b89 | <ide><path>examples/js/loaders/GLTFLoader.js
<ide> THREE.GLTFLoader = ( function () {
<ide>
<ide> case 'directional':
<ide> lightNode = new THREE.DirectionalLight( color );
<del> lightNode.target.position.set( 0, 0, -1 );
<add> lightNode.target.position.set( 0, 0, - 1 );
<ide> lightNode.add( lightNode.target );
<ide> break;
<ide>
<ide> THREE.GLTFLoader = ( function () {
<ide> lightDef.spot.outerConeAngle = lightDef.spot.outerConeAngle !== undefined ? lightDef.spot.outerConeAngle : Math.PI / 4.0;
<ide> lightNode.angle = lightDef.spot.outerConeAngle;
<ide> lightNode.penumbra = 1.0 - lightDef.spot.innerConeAngle / lightDef.spot.outerConeAngle;
<del> lightNode.target.position.set( 0, 0, -1 );
<add> lightNode.target.position.set( 0, 0, - 1 );
<ide> lightNode.add( lightNode.target );
<ide> break;
<ide>
<ide> THREE.GLTFLoader = ( function () {
<ide> uniforms.refractionRatio.value = material.refractionRatio;
<ide>
<ide> uniforms.maxMipLevel.value = renderer.properties.get( material.envMap ).__maxMipLevel;
<add>
<ide> }
<ide>
<ide> uniforms.specular.value.copy( material.specular );
<ide> THREE.GLTFLoader = ( function () {
<ide> var accessor = target.POSITION !== undefined
<ide> ? parser.getDependency( 'accessor', target.POSITION )
<ide> .then( function ( accessor ) {
<add>
<ide> // Cloning not to pollute original accessor below
<ide> return cloneBufferAttribute( accessor );
<add>
<ide> } )
<ide> : geometry.attributes.position;
<ide>
<ide> THREE.GLTFLoader = ( function () {
<ide> var accessor = target.NORMAL !== undefined
<ide> ? parser.getDependency( 'accessor', target.NORMAL )
<ide> .then( function ( accessor ) {
<add>
<ide> return cloneBufferAttribute( accessor );
<add>
<ide> } )
<ide> : geometry.attributes.normal;
<ide>
<ide> THREE.GLTFLoader = ( function () {
<ide>
<ide> }
<ide>
<del> function createGeometryKey( geometry ) {
<add> function createGeometryKey( primitiveDef ) {
<ide>
<del> var dracoExtension = geometry.extensions && geometry.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ];
<add> var dracoExtension = primitiveDef.extensions && primitiveDef.extensions[ EXTENSIONS.KHR_DRACO_MESH_COMPRESSION ];
<ide> var geometryKey;
<ide>
<ide> if ( dracoExtension ) {
<ide> THREE.GLTFLoader = ( function () {
<ide>
<ide> } else {
<ide>
<del> geometryKey = geometry.indices + ':' + createAttributesKey( geometry.attributes );
<add> geometryKey = primitiveDef.indices + ':' + createAttributesKey( primitiveDef.attributes );
<ide>
<ide> }
<ide>
<ide> THREE.GLTFLoader = ( function () {
<ide> return attributesKey;
<ide>
<ide> }
<add>
<ide> function createArrayKey( a ) {
<ide>
<ide> var arrayKey = '';
<ide>
<ide> for ( var i = 0, il = a.length; i < il; i ++ ) {
<ide>
<del> arrayKey += i + a[ i ];
<add> arrayKey += i;
<add>
<add> for ( var j = 0, jl = a[ i ].lenght; j < jl; j ++ ) {
<add>
<add> arrayKey += j + a[ i ][ j ];
<add>
<add> }
<ide>
<ide> }
<ide>
<ide> THREE.GLTFLoader = ( function () {
<ide>
<ide> case 'light':
<ide> dependency = this.extensions[ EXTENSIONS.KHR_LIGHTS_PUNCTUAL ].loadLight( index );
<del> break
<add> break;
<ide>
<ide> default:
<ide> throw new Error( 'Unknown type: ' + type );
<ide> THREE.GLTFLoader = ( function () {
<ide>
<ide> var nodeDef = json.nodes[ nodeIndex ];
<ide>
<del> return ( function() {
<add> return ( function () {
<ide>
<ide> // .isBone isn't in glTF spec. See .markDefs
<ide> if ( nodeDef.isBone === true ) {
<ide> THREE.GLTFLoader = ( function () {
<ide>
<ide> mesh.bind( new THREE.Skeleton( bones, boneInverses ), mesh.matrixWorld );
<ide>
<del> };
<add> }
<ide>
<ide> return node;
<ide> | 1 |
Ruby | Ruby | handle corrupt checkouts | a380265e90e36a6a9e60123f300bdc4094318b98 | <ide><path>Library/Homebrew/download_strategy.rb
<ide> def bzrpath
<ide> ].find { |p| File.executable? p }
<ide> end
<ide>
<add> def repo_valid?
<add> @clone.join(".bzr").directory?
<add> end
<add>
<ide> def fetch
<ide> ohai "Cloning #{@url}"
<del> unless @clone.exist?
<del> [email protected](%r[^bzr://], '')
<del> # 'lightweight' means history-less
<del> safe_system bzrpath, 'checkout', '--lightweight', url, @clone
<del> else
<add>
<add> if @clone.exist? && repo_valid?
<ide> puts "Updating #{@clone}"
<del> Dir.chdir(@clone) { safe_system bzrpath, 'update' }
<add> @clone.cd { safe_system bzrpath, 'update' }
<add> elsif @clone.exist?
<add> puts "Removing invalid bzr repo from cache"
<add> @clone.rmtree
<add> clone_repo
<add> else
<add> clone_repo
<ide> end
<ide> end
<ide>
<add> def clone_repo
<add> url = @url.sub(%r[^bzr://], '')
<add> # 'lightweight' means history-less
<add> safe_system bzrpath, 'checkout', '--lightweight', url, @clone
<add> end
<add>
<ide> def stage
<ide> # FIXME: The export command doesn't work on checkouts
<ide> # See https://bugs.launchpad.net/bzr/+bug/897511 | 1 |
Java | Java | fix race condition in sendblockingmessage | 20e6ca3601d2748b049d918a5f8c50e5f4191207 | <ide><path>spring-websocket/src/test/java/org/springframework/web/socket/handler/ConcurrentWebSocketSessionDecoratorTests.java
<ide> public void closeStatusChangesToSessionNotReliable() throws Exception {
<ide> }
<ide>
<ide> private void sendBlockingMessage(ConcurrentWebSocketSessionDecorator session) throws InterruptedException {
<add> BlockingSession delegate = (BlockingSession) session.getDelegate();
<add> CountDownLatch sentMessageLatch = delegate.getSentMessageLatch();
<ide> Executors.newSingleThreadExecutor().submit(() -> {
<ide> TextMessage message = new TextMessage("slow message");
<ide> try {
<ide> private void sendBlockingMessage(ConcurrentWebSocketSessionDecorator session) th
<ide> e.printStackTrace();
<ide> }
<ide> });
<del> BlockingSession delegate = (BlockingSession) session.getDelegate();
<del> assertThat(delegate.getSentMessageLatch().await(5, TimeUnit.SECONDS)).isTrue();
<add> assertThat(sentMessageLatch.await(5, TimeUnit.SECONDS)).isTrue();
<ide> }
<ide>
<ide>
<ide>
<ide> private static class BlockingSession extends TestWebSocketSession {
<ide>
<del> private AtomicReference<CountDownLatch> nextMessageLatch = new AtomicReference<>();
<add> private final AtomicReference<CountDownLatch> nextMessageLatch = new AtomicReference<>();
<ide>
<del> private AtomicReference<CountDownLatch> releaseLatch = new AtomicReference<>();
<add> private final AtomicReference<CountDownLatch> releaseLatch = new AtomicReference<>();
<ide>
<ide>
<ide> public CountDownLatch getSentMessageLatch() { | 1 |
Text | Text | add mongoose section | 56cac693799240045fec8b11d60769ab32b5eb55 | <ide><path>README.md
<ide> And that's it, we are done!
<ide>
<ide> If you want to see a really cool real-time dashboard check out this [live example](http://hackathonstarter.herokuapp.com/dashboard). Refer to the [pull request #23](https://github.com/sahat/hackathon-starter/pull/23/files) to see how it is implemented.
<ide>
<add>Mongoose Cheatsheet
<add>-------------------
<add>TODO
<add>
<ide> Deployment
<ide> ----------
<ide> | 1 |
PHP | PHP | add method in router to fetch middleware by path | 45b97d10d46f4c9086b3510b6aa20dc916649926 | <ide><path>src/Routing/Router.php
<ide>
<ide> use Cake\Core\Configure;
<ide> use Cake\Http\ServerRequest;
<add>use Cake\Http\MiddlewareQueue;
<ide> use Cake\Utility\Inflector;
<ide> use InvalidArgumentException;
<ide> use Psr\Http\Message\ServerRequestInterface;
<ide> public static function routes()
<ide> return static::$_collection->routes();
<ide> }
<ide>
<add> public static function getMatchingMiddleware($path)
<add> {
<add> if (!static::$initialized) {
<add> static::_loadRoutes();
<add> }
<add>
<add> $middleware = static::$_collection->getMatchingMiddleware($path);
<add> if ($middleware) {
<add> return new MiddlewareQueue($middleware);
<add> }
<add> return null;
<add> }
<add>
<ide> /**
<ide> * Loads route configuration
<ide> *
<ide><path>tests/TestCase/Routing/RouterTest.php
<ide>
<ide> use Cake\Core\Configure;
<ide> use Cake\Core\Plugin;
<add>use Cake\Http\MiddlewareQueue;
<ide> use Cake\Http\ServerRequest;
<ide> use Cake\Http\ServerRequestFactory;
<ide> use Cake\Routing\Router;
<ide> public function testSetRequestContextInvalid()
<ide> Router::setRequestContext(new \stdClass);
<ide> }
<ide>
<add> /**
<add> * Test getting path specific middleware.
<add> *
<add> * @return void
<add> */
<add> public function testGetMatchingMiddleware()
<add> {
<add> Router::scope('/', function ($routes) {
<add> $routes->connect('/articles', ['controller' => 'Articles']);
<add> $routes->registerMiddleware('noop', function () {
<add> });
<add> });
<add> Router::scope('/api/v1', function ($routes) {
<add> $routes->applyMiddleware('noop');
<add> $routes->connect('/articles', ['controller' => 'Articles', 'prefix' => 'Api']);
<add> });
<add> $result = Router::getMatchingMiddleware('/articles');
<add> $this->assertNull($result);
<add>
<add> $result = Router::getMatchingMiddleware('/api/v1/articles');
<add> $this->assertInstanceOf(MiddlewareQueue::class, $result);
<add> $this->assertCount(1, $result);
<add> }
<add>
<ide> /**
<ide> * Connect some fallback routes for testing router behavior.
<ide> * | 2 |
Javascript | Javascript | remove platformos from rn | 5c0b9071e77d1d2da920768bc8472496a750a0c6 | <ide><path>Libraries/Utilities/PlatformOS.android.js
<del>/**
<del> * Copyright (c) Facebook, Inc. and its affiliates.
<del> *
<del> * This source code is licensed under the MIT license found in the
<del> * LICENSE file in the root directory of this source tree.
<del> *
<del> * @format
<del> * @flow strict
<del> */
<del>
<del>'use strict';
<del>
<del>export type PlatformSelectSpec<A, I> = {|
<del> android: A,
<del> ios: I,
<del>|};
<del>
<del>const PlatformOS = {
<del> OS: 'android',
<del> select: <A, I>(spec: PlatformSelectSpec<A, I>): A | I => spec.android,
<del>};
<del>
<del>module.exports = PlatformOS;
<ide><path>Libraries/Utilities/PlatformOS.ios.js
<del>/**
<del> * Copyright (c) Facebook, Inc. and its affiliates.
<del> *
<del> * This source code is licensed under the MIT license found in the
<del> * LICENSE file in the root directory of this source tree.
<del> *
<del> * @format
<del> * @flow strict
<del> */
<del>
<del>'use strict';
<del>
<del>export type PlatformSelectSpec<A, I> = {|
<del> android: A,
<del> ios: I,
<del>|};
<del>
<del>const PlatformOS = {
<del> OS: 'ios',
<del> select: <A, I>(spec: PlatformSelectSpec<A, I>): A | I => spec.ios,
<del>};
<del>
<del>module.exports = PlatformOS; | 2 |
Text | Text | fix links [ci skip] | 83cb835ff70044d4a0cf0b56acf9503161f8ba8c | <ide><path>guides/source/security.md
<ide> rotations going at any one time.
<ide> For more details on key rotation with encrypted and signed messages as
<ide> well as the various options the `rotate` method accepts, please refer to
<ide> the
<del>[MessageEncryptor API](api.rubyonrails.org/classes/ActiveSupport/MessageEncryptor.html)
<add>[MessageEncryptor API](http://api.rubyonrails.org/classes/ActiveSupport/MessageEncryptor.html)
<ide> and
<del>[MessageVerifier API](api.rubyonrails.org/classes/ActiveSupport/MessageVerifier.html)
<add>[MessageVerifier API](http://api.rubyonrails.org/classes/ActiveSupport/MessageVerifier.html)
<ide> documentation.
<ide>
<ide> ### Replay Attacks for CookieStore Sessions | 1 |
Python | Python | add debug information to gfortran builds | 4c4212b792ffce841ae31d842c0e0a52e285e071 | <ide><path>numpy/distutils/fcompiler/gnu.py
<ide> def version_match(self, version_string):
<ide> possible_executables = ['gfortran', 'f95']
<ide> executables = {
<ide> 'version_cmd' : ["<F90>", "--version"],
<del> 'compiler_f77' : [None, "-Wall", "-ffixed-form",
<add> 'compiler_f77' : [None, "-Wall", "-g", "-ffixed-form",
<ide> "-fno-second-underscore"] + _EXTRAFLAGS,
<del> 'compiler_f90' : [None, "-Wall", "-fno-second-underscore"] + _EXTRAFLAGS,
<del> 'compiler_fix' : [None, "-Wall", "-ffixed-form",
<add> 'compiler_f90' : [None, "-Wall", "-g",
<ide> "-fno-second-underscore"] + _EXTRAFLAGS,
<del> 'linker_so' : ["<F90>", "-Wall"],
<add> 'compiler_fix' : [None, "-Wall", "-g","-ffixed-form",
<add> "-fno-second-underscore"] + _EXTRAFLAGS,
<add> 'linker_so' : ["<F90>", "-Wall", "-g"],
<ide> 'archiver' : ["ar", "-cr"],
<ide> 'ranlib' : ["ranlib"],
<ide> 'linker_exe' : [None, "-Wall"] | 1 |
Text | Text | update validation guide for acceptance method | 29acc869db72570822c9774aec3ab6ac4a229890 | <ide><path>guides/source/active_record_validations.md
<ide> available helpers.
<ide> This method validates that a checkbox on the user interface was checked when a
<ide> form was submitted. This is typically used when the user needs to agree to your
<ide> application's terms of service, confirm that some text is read, or any similar
<del>concept. This validation is very specific to web applications and this
<del>'acceptance' does not need to be recorded anywhere in your database (if you
<del>don't have a field for it, the helper will just create a virtual attribute).
<add>concept.
<add>
<add>This validation is very specific to web applications and this
<add>'acceptance' does not need to be recorded anywhere in your database. If you
<add>don't have a field for it, the helper will just create a virtual attribute. If
<add>the field does exist in your database, the `accept` option must be set to
<add>`true` or else the validation will not run.
<ide>
<ide> ```ruby
<ide> class Person < ActiveRecord::Base | 1 |
Text | Text | optimize some descriptions for swarm nodes.md | fa52bd5cf8f8e40189a0a9ba1bac61bd5ace4a91 | <ide><path>docs/swarm/how-swarm-mode-works/nodes.md
<ide> manager. If the manager in a single-manager swarm fails, your services will
<ide> continue to run, but you will need to create a new cluster to recover.
<ide>
<ide> To take advantage of swarm mode's fault-tolerance features, Docker recommends
<del>you implement an odd number of nodes nodes according to your organization's
<add>you implement an odd number of nodes according to your organization's
<ide> high-availability requirements. When you have multiple managers you can recover
<ide> from the failure of a manager node without downtime.
<ide>
<ide> * A three-manager swarm tolerates a maximum loss of one manager.
<del>* A five-manager swarm tolerates a maximum simultaneous loss two
<add>* A five-manager swarm tolerates a maximum simultaneous loss of two
<ide> manager nodes.
<ide> * An `N` manager cluster will tolerate the loss of at most
<ide> `(N-1)/2` managers. | 1 |
Python | Python | add coarse layers for faster rcnn fpn keras model | 70c974d36e63955b82f4df23989d5aa902e1d51b | <ide><path>research/object_detection/models/faster_rcnn_resnet_v1_fpn_keras_feature_extractor.py
<ide> def __init__(self,
<ide> self._resnet_block_names = ['block1', 'block2', 'block3', 'block4']
<ide> self.classification_backbone = None
<ide> self._fpn_features_generator = None
<add> self._coarse_feature_layers = []
<ide>
<ide> def preprocess(self, resized_inputs):
<ide> """Faster R-CNN Resnet V1 preprocessing.
<ide> def get_proposal_feature_extractor_model(self, name=None):
<ide> (feature_block, feature_block_map[feature_block])
<ide> for feature_block in feature_block_list]
<ide> fpn_features = self._fpn_features_generator(fpn_input_image_features)
<del> features_maps = [fpn_feature for _, fpn_feature in fpn_features.items()]
<add>
<add> # Construct coarse feature layers
<add> for i in range(self._base_fpn_max_level, self._fpn_max_level):
<add> layers = []
<add> layer_name = 'bottom_up_block{}'.format(i)
<add> layers.append(
<add> tf.keras.layers.Conv2D(
<add> self._additional_layer_depth,
<add> [3, 3],
<add> padding='SAME',
<add> strides=2,
<add> name=layer_name + '_conv',
<add> **self._conv_hyperparams.params()))
<add> layers.append(
<add> self._conv_hyperparams.build_batch_norm(
<add> training=(self._is_training and not self._freeze_batchnorm),
<add> name=layer_name + '_batchnorm'))
<add> layers.append(
<add> self._conv_hyperparams.build_activation_layer(
<add> name=layer_name))
<add> self._coarse_feature_layers.append(layers)
<add>
<add> feature_maps = []
<add> for level in range(self._fpn_min_level, self._base_fpn_max_level + 1):
<add> feature_maps.append(fpn_features['top_down_block{}'.format(level-1)])
<add> last_feature_map = fpn_features['top_down_block{}'.format(
<add> self._base_fpn_max_level - 1)]
<add>
<add> for coarse_feature_layers in self._coarse_feature_layers:
<add> for layer in coarse_feature_layers:
<add> last_feature_map = layer(last_feature_map)
<add> feature_maps.append(last_feature_map)
<ide>
<ide> feature_extractor_model = tf.keras.models.Model(
<del> inputs=full_resnet_v1_model.inputs, outputs=features_maps)
<add> inputs=full_resnet_v1_model.inputs, outputs=feature_maps)
<ide> return feature_extractor_model
<ide>
<ide> def get_box_classifier_feature_extractor_model(self, name=None):
<ide><path>research/object_detection/models/faster_rcnn_resnet_v1_fpn_keras_feature_extractor_tf2_test.py
<ide> def test_extract_proposal_features_returns_expected_size(self):
<ide> self.assertAllEqual(features_shapes[1].numpy(), [2, 56, 56, 256])
<ide> self.assertAllEqual(features_shapes[2].numpy(), [2, 28, 28, 256])
<ide> self.assertAllEqual(features_shapes[3].numpy(), [2, 14, 14, 256])
<add> self.assertAllEqual(features_shapes[4].numpy(), [2, 7, 7, 256])
<add> self.assertAllEqual(features_shapes[5].numpy(), [2, 4, 4, 256])
<ide>
<ide> def test_extract_proposal_features_half_size_input(self):
<ide> feature_extractor = self._build_feature_extractor()
<ide> def test_extract_proposal_features_half_size_input(self):
<ide> self.assertAllEqual(features_shapes[1].numpy(), [2, 28, 28, 256])
<ide> self.assertAllEqual(features_shapes[2].numpy(), [2, 14, 14, 256])
<ide> self.assertAllEqual(features_shapes[3].numpy(), [2, 7, 7, 256])
<add> self.assertAllEqual(features_shapes[4].numpy(), [2, 4, 4, 256])
<add> self.assertAllEqual(features_shapes[5].numpy(), [2, 2, 2, 256])
<ide>
<ide> def test_extract_box_classifier_features_returns_expected_size(self):
<ide> feature_extractor = self._build_feature_extractor() | 2 |
Javascript | Javascript | remove ios gating to fix sticky header | 6eed1e4f80572de038f67496e51ae0fea7e66617 | <ide><path>Libraries/Components/ScrollView/ScrollViewStickyHeader.js
<ide> class ScrollViewStickyHeader extends React.Component<Props, State> {
<ide> this.setState({
<ide> translateY: value,
<ide> });
<del> // This fixes jank on iOS, especially around paging,
<del> // but causes jank on Android.
<del> // It seems that Native Animated Driver on iOS has
<del> // more conflicts with values passed through the ShadowTree
<del> // especially when connecting new Animated nodes + disconnecting
<del> // old ones, compared to Android where that process seems fine.
<del> if (Platform.OS === 'ios') {
<del> setTimeout(() => {
<del> this.setState({
<del> translateY: null,
<del> });
<del> }, 0);
<del> }
<ide> }
<ide> }, this._debounceTimeout);
<ide> }; | 1 |
Go | Go | set bigger grpc limit for array requests | 489b8eda6674523df8b82a210399b7d2954427d0 | <ide><path>daemon/cluster/cluster.go
<ide> package cluster // import "github.com/docker/docker/daemon/cluster"
<ide> import (
<ide> "context"
<ide> "fmt"
<add> "math"
<ide> "net"
<ide> "os"
<ide> "path/filepath"
<ide> const stateFile = "docker-state.json"
<ide> const defaultAddr = "0.0.0.0:2377"
<ide>
<ide> const (
<del> initialReconnectDelay = 100 * time.Millisecond
<del> maxReconnectDelay = 30 * time.Second
<del> contextPrefix = "com.docker.swarm"
<add> initialReconnectDelay = 100 * time.Millisecond
<add> maxReconnectDelay = 30 * time.Second
<add> contextPrefix = "com.docker.swarm"
<add> defaultRecvSizeForListResponse = math.MaxInt32 // the max recv limit grpc <1.4.0
<ide> )
<ide>
<ide> // NetworkSubnetsProvider exposes functions for retrieving the subnets
<ide><path>daemon/cluster/services.go
<ide> import (
<ide> gogotypes "github.com/gogo/protobuf/types"
<ide> "github.com/pkg/errors"
<ide> "github.com/sirupsen/logrus"
<add> "google.golang.org/grpc"
<ide> )
<ide>
<ide> // GetServices returns all services of a managed swarm cluster.
<ide> func (c *Cluster) GetServices(options apitypes.ServiceListOptions) ([]types.Serv
<ide>
<ide> r, err := state.controlClient.ListServices(
<ide> ctx,
<del> &swarmapi.ListServicesRequest{Filters: filters})
<add> &swarmapi.ListServicesRequest{Filters: filters},
<add> grpc.MaxCallRecvMsgSize(defaultRecvSizeForListResponse),
<add> )
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide><path>daemon/cluster/tasks.go
<ide> import (
<ide> types "github.com/docker/docker/api/types/swarm"
<ide> "github.com/docker/docker/daemon/cluster/convert"
<ide> swarmapi "github.com/docker/swarmkit/api"
<add> "google.golang.org/grpc"
<ide> )
<ide>
<ide> // GetTasks returns a list of tasks matching the filter options.
<ide> func (c *Cluster) GetTasks(options apitypes.TaskListOptions) ([]types.Task, erro
<ide>
<ide> r, err = state.controlClient.ListTasks(
<ide> ctx,
<del> &swarmapi.ListTasksRequest{Filters: filters})
<add> &swarmapi.ListTasksRequest{Filters: filters},
<add> grpc.MaxCallRecvMsgSize(defaultRecvSizeForListResponse),
<add> )
<ide> return err
<ide> }); err != nil {
<ide> return nil, err | 3 |
Text | Text | remove travis ci badge from readme | 7f026d12b8115bff4b6ad59cabb3df4f3f0a3872 | <ide><path>README.md
<ide>
<ide> [](https://badge.fury.io/py/apache-airflow)
<ide> 
<del>[](https://travis-ci.org/apache/airflow)
<ide> [](https://codecov.io/github/apache/airflow?branch=master)
<ide> [](https://airflow.readthedocs.io/en/latest/?badge=latest)
<ide> [](http://www.apache.org/licenses/LICENSE-2.0.txt)
<ide> [](https://pypi.org/project/apache-airflow/)
<add>
<ide> [](https://twitter.com/ApacheAirflow)
<ide> [](https://apache-airflow-slack.herokuapp.com/)
<ide> | 1 |
Ruby | Ruby | add missing colon to internal | 9b9dec5b5f713edbe0d7556558fd0fe4216053d5 | <ide><path>activerecord/lib/active_record/properties.rb
<ide> module Properties # :nodoc:
<ide> Type = ActiveRecord::Type
<ide>
<ide> included do
<del> class_attribute :user_provided_columns, instance_accessor: false # :internal
<add> class_attribute :user_provided_columns, instance_accessor: false # :internal:
<ide> self.user_provided_columns = {}
<ide> end
<ide> | 1 |
Text | Text | remove coverage badge | ede46717523d8c0a9839758e4857c7e4b92d0eb5 | <ide><path>README.md
<ide> Second, read the [Troubleshooting Checklist](https://docs.brew.sh/Troubleshootin
<ide>
<ide> ## Contributing
<ide> [](https://dev.azure.com/Homebrew/Homebrew/_build/latest?definitionId=1)
<del>[](https://codecov.io/gh/Homebrew/brew)
<ide>
<ide> We'd love you to contribute to Homebrew. First, please read our [Contribution Guide](CONTRIBUTING.md) and [Code of Conduct](CODE_OF_CONDUCT.md#code-of-conduct).
<ide> | 1 |
PHP | PHP | fix coding standards in case/core | b4a444fb14f514b68f8a9e84d9de761e5ee9656f | <ide><path>lib/Cake/Test/Case/Core/AppTest.php
<ide> public function testCompatibleBuild() {
<ide> App::build(array('helpers' => array('/path/to/helpers/')));
<ide> $expected = array(
<ide> '/path/to/helpers/',
<del> APP . 'View' . DS . 'Helper' .DS
<add> APP . 'View' . DS . 'Helper' . DS
<ide> );
<ide> $result = App::path('helpers');
<ide> $this->assertEquals($expected, $result);
<ide> public function testPluginImporting() {
<ide> * @link http://cakephp.lighthouseapp.com/projects/42648/tickets/410
<ide> */
<ide> public function testImportingHelpersFromAlternatePaths() {
<del>
<ide> $this->assertFalse(class_exists('BananaHelper', false), 'BananaHelper exists, cannot test importing it.');
<ide> App::build(array(
<ide> 'View/Helper' => array(
<ide> public function testImportingHelpersFromAlternatePaths() {
<ide> * @return void
<ide> */
<ide> public function testFileLoading() {
<del> $file = App::import('File', 'RealFile', false, array(), CAKE . 'Config' . DS . 'config.php');
<add> $file = App::import('File', 'RealFile', false, array(), CAKE . 'Config' . DS . 'config.php');
<ide> $this->assertTrue($file);
<ide>
<del> $file = App::import('File', 'NoFile', false, array(), CAKE . 'Config' . DS . 'cake' . DS . 'config.php');
<add> $file = App::import('File', 'NoFile', false, array(), CAKE . 'Config' . DS . 'cake' . DS . 'config.php');
<ide> $this->assertFalse($file);
<ide> }
<ide>
<ide> public function testFileLoading() {
<ide> * @return void
<ide> */
<ide> public function testFileLoadingWithArray() {
<del> $type = array('type' => 'File', 'name' => 'SomeName', 'parent' => false,
<del> 'file' => CAKE . DS . 'Config' . DS . 'config.php');
<add> $type = array(
<add> 'type' => 'File',
<add> 'name' => 'SomeName',
<add> 'parent' => false,
<add> 'file' => CAKE . DS . 'Config' . DS . 'config.php'
<add> );
<ide> $file = App::import($type);
<ide> $this->assertTrue($file);
<ide>
<del> $type = array('type' => 'File', 'name' => 'NoFile', 'parent' => false,
<del> 'file' => CAKE . 'Config' . DS . 'cake' . DS . 'config.php');
<add> $type = array(
<add> 'type' => 'File',
<add> 'name' => 'NoFile',
<add> 'parent' => false,
<add> 'file' => CAKE . 'Config' . DS . 'cake' . DS . 'config.php'
<add> );
<ide> $file = App::import($type);
<ide> $this->assertFalse($file);
<ide> }
<ide> public function testFileLoadingWithArray() {
<ide> * @return void
<ide> */
<ide> public function testFileLoadingReturnValue() {
<del> $file = App::import('File', 'Name', false, array(), CAKE . 'Config' . DS . 'config.php', true);
<add> $file = App::import('File', 'Name', false, array(), CAKE . 'Config' . DS . 'config.php', true);
<ide> $this->assertTrue(!empty($file));
<ide>
<ide> $this->assertTrue(isset($file['Cake.version']));
<ide>
<del> $type = array('type' => 'File', 'name' => 'OtherName', 'parent' => false,
<del> 'file' => CAKE . 'Config' . DS . 'config.php', 'return' => true);
<add> $type = array(
<add> 'type' => 'File',
<add> 'name' => 'OtherName',
<add> 'parent' => false,
<add> 'file' => CAKE . 'Config' . DS . 'config.php', 'return' => true
<add> );
<ide> $file = App::import($type);
<ide> $this->assertTrue(!empty($file));
<ide>
<ide> public function testMultipleLoading() {
<ide>
<ide> $classes = array_flip(get_declared_classes());
<ide>
<del>
<ide> $this->assertTrue(isset($classes['PersisterOne']));
<ide> $this->assertTrue(isset($classes['PersisterTwo']));
<ide>
<ide> $load = App::import('Model', array('PersisterOne', 'SomeNotFoundClass', 'PersisterTwo'));
<ide> $this->assertFalse($load);
<ide> }
<ide>
<del>
<ide> public function testLoadingVendor() {
<ide> App::build(array(
<ide> 'plugins' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Plugin' . DS),
<del> 'vendors' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Vendor'. DS),
<add> 'vendors' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Vendor' . DS),
<ide> ), App::RESET);
<ide> CakePlugin::load(array('TestPlugin', 'TestPluginTwo'));
<ide>
<ide><path>lib/Cake/Test/Case/Core/CakePluginTest.php
<ide> public function testMultipleBootstrapFiles() {
<ide> $this->assertEquals('loaded plugin bootstrap', Configure::read('CakePluginTest.test_plugin.bootstrap'));
<ide> }
<ide>
<del>
<ide> /**
<ide> * Tests that it is possible to load plugin bootstrap by calling a callback function
<ide> *
<ide> public function testLoadNotFound() {
<ide> CakePlugin::load('MissingPlugin');
<ide> }
<ide>
<del>
<ide> /**
<ide> * Tests that CakePlugin::path() returns the correct path for the loaded plugins
<ide> *
<ide><path>lib/Cake/Test/Case/Core/ObjectTest.php
<ide> class RequestActionPost extends CakeTestModel {
<ide> class RequestActionController extends Controller {
<ide>
<ide> /**
<del>* uses property
<del>*
<del>* @var array
<del>* @access public
<del>*/
<add> * uses property
<add> *
<add> * @var array
<add> * @access public
<add> */
<ide> public $uses = array('RequestActionPost');
<ide>
<ide> /**
<del>* test_request_action method
<del>*
<del>* @access public
<del>* @return void
<del>*/
<add> * test_request_action method
<add> *
<add> * @access public
<add> * @return void
<add> */
<ide> public function test_request_action() {
<ide> return 'This is a test';
<ide> }
<ide>
<ide> /**
<del>* another_ra_test method
<del>*
<del>* @param mixed $id
<del>* @param mixed $other
<del>* @access public
<del>* @return void
<del>*/
<add> * another_ra_test method
<add> *
<add> * @param mixed $id
<add> * @param mixed $other
<add> * @access public
<add> * @return void
<add> */
<ide> public function another_ra_test($id, $other) {
<ide> return $id + $other;
<ide> }
<ide> public function param_check() {
<ide> }
<ide> $this->response->body($content);
<ide> }
<del>}
<ide>
<add>}
<ide>
<ide> /**
<ide> * TestObject class
<ide> public function oneParamMethod($param) {
<ide> * twoParamMethod method
<ide> *
<ide> * @param mixed $param
<del> * @param mixed $param2
<add> * @param mixed $paramTwo
<ide> * @return void
<ide> */
<del> public function twoParamMethod($param, $param2) {
<del> $this->methodCalls[] = array('twoParamMethod' => array($param, $param2));
<add> public function twoParamMethod($param, $paramTwo) {
<add> $this->methodCalls[] = array('twoParamMethod' => array($param, $paramTwo));
<ide> }
<ide>
<ide> /**
<ide> * threeParamMethod method
<ide> *
<ide> * @param mixed $param
<del> * @param mixed $param2
<del> * @param mixed $param3
<add> * @param mixed $paramTwo
<add> * @param mixed $paramThree
<ide> * @return void
<ide> */
<del> public function threeParamMethod($param, $param2, $param3) {
<del> $this->methodCalls[] = array('threeParamMethod' => array($param, $param2, $param3));
<add> public function threeParamMethod($param, $paramTwo, $paramThree) {
<add> $this->methodCalls[] = array('threeParamMethod' => array($param, $paramTwo, $paramThree));
<ide> }
<del> /**
<add>
<add>/**
<ide> * fourParamMethod method
<ide> *
<ide> * @param mixed $param
<del> * @param mixed $param2
<del> * @param mixed $param3
<del> * @param mixed $param4
<add> * @param mixed $paramTwo
<add> * @param mixed $paramThree
<add> * @param mixed $paramFour
<ide> * @return void
<ide> */
<del> public function fourParamMethod($param, $param2, $param3, $param4) {
<del> $this->methodCalls[] = array('fourParamMethod' => array($param, $param2, $param3, $param4));
<add> public function fourParamMethod($param, $paramTwo, $paramThree, $paramFour) {
<add> $this->methodCalls[] = array('fourParamMethod' => array($param, $paramTwo, $paramThree, $paramFour));
<ide> }
<del> /**
<add>
<add>/**
<ide> * fiveParamMethod method
<ide> *
<ide> * @param mixed $param
<del> * @param mixed $param2
<del> * @param mixed $param3
<del> * @param mixed $param4
<del> * @param mixed $param5
<add> * @param mixed $paramTwo
<add> * @param mixed $paramThree
<add> * @param mixed $paramFour
<add> * @param mixed $paramFive
<ide> * @return void
<ide> */
<del> public function fiveParamMethod($param, $param2, $param3, $param4, $param5) {
<del> $this->methodCalls[] = array('fiveParamMethod' => array($param, $param2, $param3, $param4, $param5));
<add> public function fiveParamMethod($param, $paramTwo, $paramThree, $paramFour, $paramFive) {
<add> $this->methodCalls[] = array('fiveParamMethod' => array($param, $paramTwo, $paramThree, $paramFour, $paramFive));
<ide> }
<ide>
<ide> /**
<ide> * crazyMethod method
<ide> *
<ide> * @param mixed $param
<del> * @param mixed $param2
<del> * @param mixed $param3
<del> * @param mixed $param4
<del> * @param mixed $param5
<del> * @param mixed $param6
<del> * @param mixed $param7
<add> * @param mixed $paramTwo
<add> * @param mixed $paramThree
<add> * @param mixed $paramFour
<add> * @param mixed $paramFive
<add> * @param mixed $paramSix
<add> * @param mixed $paramSeven
<ide> * @return void
<ide> */
<del> public function crazyMethod($param, $param2, $param3, $param4, $param5, $param6, $param7 = null) {
<del> $this->methodCalls[] = array('crazyMethod' => array($param, $param2, $param3, $param4, $param5, $param6, $param7));
<add> public function crazyMethod($param, $paramTwo, $paramThree, $paramFour, $paramFive, $paramSix, $paramSeven = null) {
<add> $this->methodCalls[] = array('crazyMethod' => array($param, $paramTwo, $paramThree, $paramFour, $paramFive, $paramSix, $paramSeven));
<ide> }
<ide>
<ide> /**
<ide> public function methodWithOptionalParam($param = null) {
<ide> public function set($properties = array()) {
<ide> return parent::_set($properties);
<ide> }
<add>
<ide> }
<ide>
<ide> /**
<ide> public function set($properties = array()) {
<ide> * @package Cake.Test.Case.Core
<ide> */
<ide> class ObjectTestModel extends CakeTestModel {
<add>
<ide> public $useTable = false;
<add>
<ide> public $name = 'ObjectTestModel';
<add>
<ide> }
<ide>
<ide> /**
<ide> public function testRequestActionArray() {
<ide> 'Model' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Model' . DS),
<ide> 'View' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'View' . DS),
<ide> 'Controller' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Controller' . DS),
<del> 'Plugin' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Plugin'. DS)
<add> 'Plugin' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Plugin' . DS)
<ide> ), App::RESET);
<ide> CakePlugin::load(array('TestPlugin'));
<ide> | 3 |
Python | Python | fix error with recarry. patch by c. gohlke. closes | 5e4a4075a8476a6c6d46481f33a4c873f545d4c7 | <ide><path>numpy/compat/py3k.py
<ide> def asstr(s):
<ide> return s
<ide> return s.decode('latin1')
<ide> def isfileobj(f):
<del> return isinstance(f, io.FileIO)
<add> return isinstance(f, (io.FileIO, io.BufferedReader))
<ide> def open_latin1(filename, mode='r'):
<ide> return open(filename, mode=mode, encoding='iso-8859-1')
<ide> strchar = 'U'
<ide><path>numpy/core/tests/test_records.py
<ide> def test_recarray_fromfile(self):
<ide> fd = open(filename, 'rb')
<ide> fd.seek(2880 * 2)
<ide> r = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big')
<add> fd.seek(2880 * 2)
<add> r = np.rec.array(fd, formats='f8,i4,a5', shape=3, byteorder='big')
<ide>
<ide> def test_recarray_from_obj(self):
<ide> count = 10 | 2 |
Text | Text | update 2.x maintenance date, include 3.0 wiki | 1c17f3669bcfc7651925f7dc977a9561ace84d7d | <ide><path>README.md
<ide> It extends the [observer pattern](http://en.wikipedia.org/wiki/Observer_pattern)
<ide>
<ide> Learn more about RxJava in general on the <a href="https://github.com/ReactiveX/RxJava/wiki">Wiki Home</a>.
<ide>
<add>:information_source: Please read the [What's different in 3.0](https://github.com/ReactiveX/RxJava/wiki/What's-different-in-3.0) for details on the changes and migration information when upgrading from 2.x.
<add>
<ide> #### Version 2.x
<ide>
<del>The [2.x version](https://github.com/ReactiveX/RxJava/tree/2.x) will be supported with bugfixes and important documentation updates until
<del>**December 31, 2020**. No new features will be added to 2.x.
<add>The [2.x version](https://github.com/ReactiveX/RxJava/tree/2.x) is in maintenance mode and will be supported with only through bugfixes until **February 28, 2021**. No new features will be added to 2.x.
<ide>
<ide> #### Version 1.x
<ide> | 1 |
Javascript | Javascript | remove unused methods | b40dab553f888070f1eaae6f749dc511d7c281fd | <ide><path>lib/internal/linkedlist.js
<ide> function init(list) {
<ide> list._idlePrev = list;
<ide> }
<ide>
<del>// create a new linked list
<del>function create() {
<del> const list = { _idleNext: null, _idlePrev: null };
<del> init(list);
<del> return list;
<del>}
<del>
<ide> // show the most idle item
<ide> function peek(list) {
<ide> if (list._idlePrev === list) return null;
<ide> return list._idlePrev;
<ide> }
<ide>
<del>// remove the most idle item from the list
<del>function shift(list) {
<del> const first = list._idlePrev;
<del> remove(first);
<del> return first;
<del>}
<del>
<ide> // remove a item from its list
<ide> function remove(item) {
<ide> if (item._idleNext) {
<ide> function isEmpty(list) {
<ide>
<ide> module.exports = {
<ide> init,
<del> create,
<ide> peek,
<del> shift,
<ide> remove,
<ide> append,
<ide> isEmpty
<ide><path>test/parallel/test-timers-linked-list.js
<ide> L.append(list, D);
<ide> // list -> A -> B -> C -> D
<ide> assert.strictEqual(A, L.peek(list));
<ide>
<del>assert.strictEqual(A, L.shift(list));
<del>// list -> B -> C -> D
<del>assert.strictEqual(B, L.peek(list));
<del>
<del>assert.strictEqual(B, L.shift(list));
<del>// list -> C -> D
<del>assert.strictEqual(C, L.peek(list));
<del>
<add>L.remove(A);
<add>L.remove(B);
<ide> // B is already removed, so removing it again shouldn't hurt.
<ide> L.remove(B);
<ide> // list -> C -> D
<ide> L.append(list, A);
<ide>
<ide> // Append should REMOVE C from the list and append it to the end.
<ide> L.append(list, C);
<del>
<ide> // list -> D -> B -> A -> C
<del>assert.strictEqual(D, L.shift(list));
<del>// list -> B -> A -> C
<del>assert.strictEqual(B, L.peek(list));
<del>assert.strictEqual(B, L.shift(list));
<del>// list -> A -> C
<del>assert.strictEqual(A, L.peek(list));
<del>assert.strictEqual(A, L.shift(list));
<del>// list -> C
<del>assert.strictEqual(C, L.peek(list));
<del>assert.strictEqual(C, L.shift(list));
<del>// list
<del>assert.ok(L.isEmpty(list));
<del>
<del>const list2 = L.create();
<del>const list3 = L.create();
<del>assert.ok(L.isEmpty(list2));
<del>assert.ok(L.isEmpty(list3));
<ide>
<del>// Objects should have identical keys/properties, but be different objects.
<del>assert.deepStrictEqual(list2, list3);
<del>assert.notStrictEqual(list2, list3);
<add>assert.strictEqual(D, L.peek(list));
<add>assert.strictEqual(B, L.peek(D));
<add>assert.strictEqual(A, L.peek(B));
<add>assert.strictEqual(C, L.peek(A));
<add>assert.strictEqual(list, L.peek(C)); | 2 |
Ruby | Ruby | fix mysql/mysql2 failing with fk constraint errors | c031016558d7be757c9216186670bf3418a1a6ae | <ide><path>activerecord/test/cases/persistence_test.rb
<ide> require 'rexml/document'
<ide>
<ide> class PersistenceTest < ActiveRecord::TestCase
<del> fixtures :topics, :companies, :developers, :projects, :computers, :accounts, :minimalistics, 'warehouse-things', :authors, :categorizations, :categories, :posts, :minivans, :pets, :toys
<add> fixtures :topics, :companies, :developers, :projects, :computers, :accounts, :minimalistics, 'warehouse-things', :authors, :author_addresses, :categorizations, :categories, :posts, :minivans, :pets, :toys
<ide>
<ide> # Oracle UPDATE does not support ORDER BY
<ide> unless current_adapter?(:OracleAdapter) | 1 |
Python | Python | add a warning for broken prophetnet fine-tuning | 45dcfdec52e28ef65c52d0a3b4e5120e134c5d49 | <ide><path>src/transformers/models/prophetnet/modeling_prophetnet.py
<ide> def forward(
<ide> >>> last_hidden_states_ngram = outputs.last_hidden_state_ngram # predict hidden states
<ide> """
<ide>
<add> if self.training:
<add> logger.warning(
<add> "There is a known issue with ProphetNet training/fine-tuning that hasn't been fixed yet:"
<add> "https://github.com/huggingface/transformers/issues/9804. Please try to use an off-the-shelf"
<add> "checkpoint from the model hub or fine-tune another architecture instead."
<add> )
<add>
<ide> use_cache == use_cache if use_cache is not None else self.config.use_cache
<ide> output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
<ide> output_hidden_states = ( | 1 |
Python | Python | fix md5 import for python 2.6 in paver file | 6dce85229c750519f75ac09c34997545b8586d98 | <ide><path>pavement.py
<ide> import subprocess
<ide> import re
<ide> try:
<del> from hash import md5
<add> from hashlib import md5
<ide> except ImportError:
<del> import md5
<add> from md5 import md5
<ide>
<ide> import paver
<ide> from paver.easy import \
<ide> def compute_md5(idirs):
<ide> released = paver.path.path(idirs).listdir()
<ide> checksums = []
<ide> for f in released:
<del> m = md5.md5(open(f, 'r').read())
<add> m = md5(open(f, 'r').read())
<ide> checksums.append('%s %s' % (m.hexdigest(), f))
<ide>
<ide> return checksums | 1 |
Ruby | Ruby | allow skipping analytics message | d41e2ea5e8714d80948033c04165137f0d46e9ac | <ide><path>Library/Homebrew/cmd/update-report.rb
<ide> def update_report
<ide> analytics_disabled = \
<ide> Utils.popen_read("git", "config", "--local", "--get", "homebrew.analyticsdisabled").chuzzle
<ide> if analytics_message_displayed != "true" && analytics_disabled != "true" &&
<del> !ENV["HOMEBREW_NO_ANALYTICS"]
<add> !ENV["HOMEBREW_NO_ANALYTICS"] && !ENV["HOMEBREW_NO_ANALYTICS_MESSAGE_OUTPUT"]
<ide> ENV["HOMEBREW_NO_ANALYTICS_THIS_RUN"] = "1"
<ide> # Use the shell's audible bell.
<ide> print "\a" | 1 |
Go | Go | fix stats cli | 2894d07696a3160c5f6f372e69b241386bb82c94 | <ide><path>cli/command/container/stats.go
<ide> func runStats(dockerCli *command.DockerCli, opts *statsOptions) error {
<ide> }
<ide> }
<ide>
<add> // Get the daemonOSType if not set already
<add> if daemonOSType == "" {
<add> svctx := context.Background()
<add> sv, err := dockerCli.Client().ServerVersion(svctx)
<add> if err != nil {
<add> return err
<add> }
<add> daemonOSType = sv.Os
<add> }
<add>
<ide> // waitFirst is a WaitGroup to wait first stat data's reach for each container
<ide> waitFirst := &sync.WaitGroup{}
<ide>
<ide> func runStats(dockerCli *command.DockerCli, opts *statsOptions) error {
<ide> Output: dockerCli.Out(),
<ide> Format: formatter.NewStatsFormat(f, daemonOSType),
<ide> }
<del>
<ide> cleanScreen := func() {
<ide> if !opts.noStream {
<ide> fmt.Fprint(dockerCli.Out(), "\033[2J")
<ide><path>cli/command/formatter/stats.go
<ide> import (
<ide> const (
<ide> winOSType = "windows"
<ide> defaultStatsTableFormat = "table {{.Container}}\t{{.CPUPerc}}\t{{.MemUsage}}\t{{.MemPerc}}\t{{.NetIO}}\t{{.BlockIO}}\t{{.PIDs}}"
<del> winDefaultStatsTableFormat = "table {{.Container}}\t{{.CPUPerc}}\t{{{.MemUsage}}\t{.NetIO}}\t{{.BlockIO}}"
<del> emptyStatsTableFormat = "Waiting for statistics..."
<del>
<del> containerHeader = "CONTAINER"
<del> cpuPercHeader = "CPU %"
<del> netIOHeader = "NET I/O"
<del> blockIOHeader = "BLOCK I/O"
<del> winMemPercHeader = "PRIV WORKING SET" // Used only on Window
<del> memPercHeader = "MEM %" // Used only on Linux
<del> memUseHeader = "MEM USAGE / LIMIT" // Used only on Linux
<del> pidsHeader = "PIDS" // Used only on Linux
<add> winDefaultStatsTableFormat = "table {{.Container}}\t{{.CPUPerc}}\t{{.MemUsage}}\t{{.NetIO}}\t{{.BlockIO}}"
<add>
<add> containerHeader = "CONTAINER"
<add> cpuPercHeader = "CPU %"
<add> netIOHeader = "NET I/O"
<add> blockIOHeader = "BLOCK I/O"
<add> memPercHeader = "MEM %" // Used only on Linux
<add> winMemUseHeader = "PRIV WORKING SET" // Used only on Windows
<add> memUseHeader = "MEM USAGE / LIMIT" // Used only on Linux
<add> pidsHeader = "PIDS" // Used only on Linux
<ide> )
<ide>
<ide> // StatsEntry represents represents the statistics data collected from a container
<ide> func (c *containerStatsContext) CPUPerc() string {
<ide> }
<ide>
<ide> func (c *containerStatsContext) MemUsage() string {
<del> c.AddHeader(memUseHeader)
<del> if c.s.IsInvalid || c.s.OSType == winOSType {
<add> header := memUseHeader
<add> if c.s.OSType == winOSType {
<add> header = winMemUseHeader
<add> }
<add> c.AddHeader(header)
<add> if c.s.IsInvalid {
<ide> return fmt.Sprintf("-- / --")
<ide> }
<add> if c.s.OSType == winOSType {
<add> return fmt.Sprintf("%s", units.BytesSize(c.s.Memory))
<add> }
<ide> return fmt.Sprintf("%s / %s", units.BytesSize(c.s.Memory), units.BytesSize(c.s.MemoryLimit))
<ide> }
<ide>
<ide> func (c *containerStatsContext) MemPerc() string {
<ide> header := memPercHeader
<del> if c.s.OSType == winOSType {
<del> header = winMemPercHeader
<del> }
<ide> c.AddHeader(header)
<ide> if c.s.IsInvalid {
<ide> return fmt.Sprintf("--") | 2 |
Python | Python | fix one more distributed_tensorflow | a19e90f38d574d60517c065afd0e1983f32b9816 | <ide><path>samples/outreach/blogs/blog_custom_estimators.py
<ide> # http://www.apache.org/licenses/LICENSE-2.0
<ide> #
<ide> # Unless required by applicable law or agreed to in writing, software
<del># distributed under the License is Distributed_TensorFlow on an "AS IS" BASIS,
<add># distributed under the License is distributed on an "AS IS" BASIS,
<ide> # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License. | 1 |
Python | Python | fix mypy violation | 2c3e38087e18de9f452e856d10fdf8ccc5c8b926 | <ide><path>libcloud/common/base.py
<ide> def _retryable_request(self, url: str, data: bytes,
<ide> try:
<ide> # @TODO: Should we just pass File object as body to request method
<ide> # instead of dealing with splitting and sending the file ourselves?
<add> assert self.connection is not None
<add>
<ide> if raw:
<ide> self.connection.prepared_request(
<ide> method=method,
<ide> def _retryable_request(self, url: str, data: bytes,
<ide> '(%s.connection) is set to an invalid, non-hostname '
<ide> 'value (%s)?' %
<ide> (message, class_name, self.host))
<del> raise socket.gaierror(msg)
<add> raise socket.gaierror(msg) # type: ignore
<ide> self.reset_context()
<ide> raise e
<ide> except ssl.SSLError as e: | 1 |
Text | Text | fix content-type of response for `/commit` | 78de066a28c5e01524401840ff4d1ae76e8e0fc2 | <ide><path>docs/reference/api/docker_remote_api_v1.12.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.13.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.14.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.15.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.16.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.17.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.18.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.19.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.20.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.21.md
<ide> Create a new image from a container's changes
<ide> **Example response**:
<ide>
<ide> HTTP/1.1 201 Created
<del> Content-Type: application/vnd.docker.raw-stream
<add> Content-Type: application/json
<ide>
<ide> {"Id": "596069db4bf5"}
<ide> | 10 |
Python | Python | add head masking and pruning to gpt-2 | b860e47cf5a61b76b480657504de2588a9385b53 | <ide><path>pytorch_pretrained_bert/modeling_gpt2.py
<ide> PRETRAINED_CONFIG_ARCHIVE_MAP = {"gpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-config.json",
<ide> "gpt2-medium": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-config.json"}
<ide>
<add>def prune_conv1d_layer(layer, index, dim=1):
<add> """ Prune a Conv1D layer (a model parameters) to keep only entries in index.
<add> A Conv1D work as a Linear layer (see e.g. BERT) but the weights are transposed.
<add> Return the pruned layer as a new layer with requires_grad=True.
<add> Used to remove heads.
<add> """
<add> index = index.to(layer.weight.device)
<add> W = layer.weight.index_select(dim, index).clone().detach()
<add> if dim == 0:
<add> b = layer.bias.clone().detach()
<add> else:
<add> b = layer.bias[index].clone().detach()
<add> new_size = list(layer.weight.size())
<add> new_size[dim] = len(index)
<add> new_layer = Conv1D(new_size[1], new_size[0])
<add> new_layer.weight.requires_grad = False
<add> new_layer.weight.copy_(W.contiguous())
<add> new_layer.weight.requires_grad = True
<add> new_layer.bias.requires_grad = False
<add> new_layer.bias.copy_(b.contiguous())
<add> new_layer.bias.requires_grad = True
<add> return new_layer
<add>
<add>
<ide> def load_tf_weights_in_gpt2(model, gpt2_checkpoint_path):
<ide> """ Load tf checkpoints in a pytorch model
<ide> """
<ide> def forward(self, x):
<ide>
<ide>
<ide> class Attention(nn.Module):
<del> def __init__(self, nx, n_ctx, config, scale=False, output_attentions=False):
<add> def __init__(self, nx, n_ctx, config, scale=False, output_attentions=False, keep_multihead_output=False):
<ide> super(Attention, self).__init__()
<ide> n_state = nx # in Attention: n_state=768 (nx=n_embd)
<ide> # [switch nx => n_state from Block to Attention to keep identical to TF implem]
<ide> def __init__(self, nx, n_ctx, config, scale=False, output_attentions=False):
<ide> self.n_head = config.n_head
<ide> self.split_size = n_state
<ide> self.scale = scale
<add>
<ide> self.output_attentions = output_attentions
<add> self.keep_multihead_output = keep_multihead_output
<add> self.multihead_output = None
<add>
<ide> self.c_attn = Conv1D(n_state * 3, nx)
<ide> self.c_proj = Conv1D(n_state, nx)
<ide> self.attn_dropout = nn.Dropout(config.attn_pdrop)
<ide> self.resid_dropout = nn.Dropout(config.resid_pdrop)
<ide>
<del> def _attn(self, q, k, v):
<add> def prune_heads(self, heads):
<add> mask = torch.ones(self.n_head, self.split_size // self.n_head)
<add> for head in heads:
<add> mask[head] = 0
<add> mask = mask.view(-1).contiguous().eq(1)
<add> index = torch.arange(len(mask))[mask].long()
<add> index_attn = torch.cat([index, index + self.split_size, index + (2*self.split_size)])
<add> # Prune conv1d layers
<add> self.c_attn = prune_conv1d_layer(self.c_attn, index_attn, dim=1)
<add> self.c_proj = prune_conv1d_layer(self.c_proj, index, dim=0)
<add> # Update hyper params
<add> self.split_size = (self.split_size // self.n_head) * (self.n_head - len(heads))
<add> self.n_head = self.n_head - len(heads)
<add>
<add> def _attn(self, q, k, v, head_mask=None):
<ide> w = torch.matmul(q, k)
<ide> if self.scale:
<ide> w = w / math.sqrt(v.size(-1))
<ide> def _attn(self, q, k, v):
<ide>
<ide> w = nn.Softmax(dim=-1)(w)
<ide> w = self.attn_dropout(w)
<add>
<add> # Mask heads if we want to
<add> if head_mask is not None:
<add> w = w * head_mask
<add>
<ide> if self.output_attentions:
<ide> return w, torch.matmul(w, v)
<ide> return torch.matmul(w, v)
<ide> def split_heads(self, x, k=False):
<ide> else:
<ide> return x.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features)
<ide>
<del> def forward(self, x, layer_past=None):
<add> def forward(self, x, layer_past=None, head_mask=None):
<ide> x = self.c_attn(x)
<ide> query, key, value = x.split(self.split_size, dim=2)
<ide> query = self.split_heads(query)
<ide> def forward(self, x, layer_past=None):
<ide> key = torch.cat((past_key, key), dim=-1)
<ide> value = torch.cat((past_value, value), dim=-2)
<ide> present = torch.stack((key.transpose(-2, -1), value)) # transpose to have same shapes for stacking
<del> a = self._attn(query, key, value)
<add>
<add> a = self._attn(query, key, value, head_mask)
<add> if self.keep_multihead_output:
<add> self.multihead_output = a
<add> self.multihead_output.retain_grad()
<add>
<ide> if self.output_attentions:
<ide> attentions, a = a
<ide> a = self.merge_heads(a)
<ide> def forward(self, x):
<ide>
<ide>
<ide> class Block(nn.Module):
<del> def __init__(self, n_ctx, config, scale=False, output_attentions=False):
<add> def __init__(self, n_ctx, config, scale=False, output_attentions=False, keep_multihead_output=False):
<ide> super(Block, self).__init__()
<ide> nx = config.n_embd
<ide> self.output_attentions = output_attentions
<ide> self.ln_1 = LayerNorm(nx, eps=config.layer_norm_epsilon)
<del> self.attn = Attention(nx, n_ctx, config, scale, output_attentions)
<add> self.attn = Attention(nx, n_ctx, config, scale, output_attentions, keep_multihead_output)
<ide> self.ln_2 = LayerNorm(nx, eps=config.layer_norm_epsilon)
<ide> self.mlp = MLP(4 * nx, config)
<ide>
<del> def forward(self, x, layer_past=None):
<del> output_attn = self.attn(self.ln_1(x), layer_past=layer_past)
<add> def forward(self, x, layer_past=None, head_mask=None):
<add> output_attn = self.attn(self.ln_1(x), layer_past=layer_past, head_mask=head_mask)
<ide> if self.output_attentions:
<ide> attentions, a, present = output_attn
<ide> else:
<ide> class GPT2Model(GPT2PreTrainedModel):
<ide> ```
<ide> """
<ide>
<del> def __init__(self, config, output_attentions=False):
<add> def __init__(self, config, output_attentions=False, keep_multihead_output=False):
<ide> super(GPT2Model, self).__init__(config)
<ide> self.output_attentions = output_attentions
<ide> self.wte = nn.Embedding(config.total_tokens_embeddings, config.n_embd)
<ide> self.wpe = nn.Embedding(config.n_positions, config.n_embd)
<ide> self.drop = nn.Dropout(config.embd_pdrop)
<del> block = Block(config.n_ctx, config, scale=True, output_attentions=output_attentions)
<add> block = Block(config.n_ctx, config, scale=True, output_attentions=output_attentions,
<add> keep_multihead_output=keep_multihead_output)
<ide> self.h = nn.ModuleList([copy.deepcopy(block) for _ in range(config.n_layer)])
<ide> self.ln_f = LayerNorm(config.n_embd, eps=config.layer_norm_epsilon)
<ide>
<ide> def set_num_special_tokens(self, num_special_tokens):
<ide> # Copy word embeddings from the previous weights
<ide> self.wte.weight.data[:self.config.vocab_size, :] = old_embed.weight.data[:self.config.vocab_size, :]
<ide>
<del> def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None):
<add> def prune_heads(self, heads_to_prune):
<add> """ Prunes heads of the model.
<add> heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
<add> """
<add> for layer, heads in heads_to_prune.items():
<add> self.h[layer].attn.prune_heads(heads)
<add>
<add> def get_multihead_outputs(self):
<add> """ Gather all multi-head outputs.
<add> Return: list (layers) of multihead module outputs with gradients
<add> """
<add> return [h.attn.multihead_output for h in self.h]
<add>
<add> def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None, head_mask=None):
<ide> if past is None:
<ide> past_length = 0
<ide> past = [None] * len(self.h)
<ide> def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None):
<ide> position_ids = torch.arange(past_length, input_ids.size(-1) + past_length, dtype=torch.long, device=input_ids.device)
<ide> position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
<ide>
<add> # Prepare head mask if needed
<add> # 1.0 in head_mask indicate we mask the head
<add> # attention_probs has shape bsz x n_heads x N x N
<add> if head_mask is not None:
<add> if head_mask.dim() == 1:
<add> head_mask = head_mask.unsqueeze(0).unsqueeze(-1).unsqueeze(-1)
<add> elif head_mask.dim() == 2:
<add> head_mask = head_mask.unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each instance in batch
<add> head_mask = head_mask.to(dtype=next(self.parameters()).dtype) # switch to fload if need + fp16 compatibility
<add> head_mask = (1.0 - head_mask)
<add>
<ide> input_shape = input_ids.size()
<ide> input_ids = input_ids.view(-1, input_ids.size(-1))
<ide> position_ids = position_ids.view(-1, position_ids.size(-1))
<ide> def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None):
<ide> presents = []
<ide> all_attentions = []
<ide> for block, layer_past in zip(self.h, past):
<add> outputs = block(hidden_states, layer_past, head_mask)
<ide> if self.output_attentions:
<del> attentions, hidden_states, present = block(hidden_states, layer_past)
<add> attentions, hidden_states, present = outputs
<ide> all_attentions.append(attentions)
<ide> else:
<del> hidden_states, present = block(hidden_states, layer_past)
<add> hidden_states, present = outputs
<ide> presents.append(present)
<ide> hidden_states = self.ln_f(hidden_states)
<ide> output_shape = input_shape + (hidden_states.size(-1),)
<ide> class GPT2LMHeadModel(GPT2PreTrainedModel):
<ide> ```
<ide> """
<ide>
<del> def __init__(self, config, output_attentions=False):
<add> def __init__(self, config, output_attentions=False, keep_multihead_output=False):
<ide> super(GPT2LMHeadModel, self).__init__(config)
<del> self.transformer = GPT2Model(config, output_attentions=output_attentions)
<add> self.transformer = GPT2Model(config, output_attentions=output_attentions,
<add> keep_multihead_output=keep_multihead_output)
<ide> self.lm_head = GPT2LMHead(self.transformer.wte.weight, config)
<ide> self.apply(self.init_weights)
<ide>
<ide> def set_num_special_tokens(self, num_special_tokens, predict_special_tokens=True
<ide> self.transformer.set_num_special_tokens(num_special_tokens)
<ide> self.lm_head.set_embeddings_weights(self.transformer.wte.weight, predict_special_tokens=predict_special_tokens)
<ide>
<del> def forward(self, input_ids, position_ids=None, token_type_ids=None, lm_labels=None, past=None):
<del> transformer_output = self.transformer(input_ids, position_ids, token_type_ids, past)
<add> def forward(self, input_ids, position_ids=None, token_type_ids=None, lm_labels=None, past=None, head_mask=None):
<add> transformer_output = self.transformer(input_ids, position_ids, token_type_ids, past, head_mask)
<ide> if self.transformer.output_attentions:
<ide> all_attentions, hidden_states, presents = transformer_output
<ide> else:
<ide> class GPT2DoubleHeadsModel(GPT2PreTrainedModel):
<ide> ```
<ide> """
<ide>
<del> def __init__(self, config, output_attentions=False):
<add> def __init__(self, config, output_attentions=False, keep_multihead_output=False):
<ide> super(GPT2DoubleHeadsModel, self).__init__(config)
<del> self.transformer = GPT2Model(config, output_attentions=output_attentions)
<add> self.transformer = GPT2Model(config, output_attentions=output_attentions,
<add> keep_multihead_output=keep_multihead_output)
<ide> self.lm_head = GPT2LMHead(self.transformer.wte.weight, config)
<ide> self.multiple_choice_head = GPT2MultipleChoiceHead(config)
<ide> self.apply(self.init_weights)
<ide> def set_num_special_tokens(self, num_special_tokens, predict_special_tokens=True
<ide> self.transformer.set_num_special_tokens(num_special_tokens)
<ide> self.lm_head.set_embeddings_weights(self.transformer.wte.weight, predict_special_tokens=predict_special_tokens)
<ide>
<del> def forward(self, input_ids, mc_token_ids, lm_labels=None, mc_labels=None, token_type_ids=None, position_ids=None, past=None):
<del> transformer_output = self.transformer(input_ids, position_ids, token_type_ids, past)
<add> def forward(self, input_ids, mc_token_ids, lm_labels=None, mc_labels=None, token_type_ids=None,
<add> position_ids=None, past=None, head_mask=None):
<add> transformer_output = self.transformer(input_ids, position_ids, token_type_ids, past, head_mask)
<ide> if self.transformer.output_attentions:
<ide> all_attentions, hidden_states, presents = transformer_output
<ide> else:
<ide><path>tests/modeling_gpt2_test.py
<ide> def check_gpt2_double_heads_loss_output(self, result):
<ide> [list(l.size()) for l in result["loss"]],
<ide> [[], []])
<ide>
<add> def create_and_check_gpt2_for_headmasking(self, config, input_ids, token_type_ids, position_ids,
<add> mc_labels, lm_labels, mc_token_ids):
<add> for model_class in (GPT2Model, GPT2LMHeadModel, GPT2DoubleHeadsModel):
<add> model = model_class(config=config, keep_multihead_output=True)
<add> model.eval()
<add> head_mask = torch.ones(self.n_head).to(input_ids.device)
<add> head_mask[0] = 0.0
<add> head_mask[-1] = 0.0 # Mask all but the first and last heads
<add> if isinstance(model, GPT2DoubleHeadsModel):
<add> output = model(input_ids, mc_token_ids, head_mask=head_mask)
<add> else:
<add> output = model(input_ids, head_mask=head_mask)
<add>
<add> output = sum(t.sum() for t in output[:-1])
<add> output = output.sum()
<add> output.backward()
<add> multihead_outputs = (model if isinstance(model, GPT2Model) else model.transformer).get_multihead_outputs()
<add>
<add> self.parent.assertEqual(len(multihead_outputs), self.n_layer)
<add> self.parent.assertListEqual(
<add> list(multihead_outputs[0].size()),
<add> [self.batch_size * self.n_choices, self.n_head,
<add> self.seq_length, self.n_embd // self.n_head])
<add> self.parent.assertEqual(
<add> len(multihead_outputs[0][:, 1:(self.n_head-1), :, :].nonzero()),
<add> 0)
<add> self.parent.assertEqual(
<add> len(multihead_outputs[0][:, 0, :, :].nonzero()),
<add> self.batch_size * self.n_choices * self.seq_length * self.n_embd // self.n_head)
<add> self.parent.assertEqual(
<add> len(multihead_outputs[0][:, self.n_head-1, :, :].nonzero()),
<add> self.batch_size * self.n_choices * self.seq_length * self.n_embd // self.n_head)
<add>
<add> def create_and_check_gpt2_for_head_pruning(self, config, input_ids, token_type_ids, position_ids,
<add> mc_labels, lm_labels, mc_token_ids):
<add> for model_class in (GPT2Model, GPT2LMHeadModel, GPT2DoubleHeadsModel):
<add> model = model_class(config=config, keep_multihead_output=True)
<add> model.eval()
<add> transformer = model if isinstance(model, GPT2Model) else model.transformer
<add> heads_to_prune = {0: list(range(1, self.n_head)),
<add> -1: [0]}
<add> transformer.prune_heads(heads_to_prune)
<add> if isinstance(model, GPT2DoubleHeadsModel):
<add> output = model(input_ids, mc_token_ids)
<add> else:
<add> output = model(input_ids)
<add>
<add> output = sum(t.sum() for t in output[:-1])
<add> output = output.sum()
<add> output.backward()
<add> multihead_outputs = transformer.get_multihead_outputs()
<add>
<add> self.parent.assertEqual(len(multihead_outputs), self.n_layer)
<add> self.parent.assertListEqual(
<add> list(multihead_outputs[0].size()),
<add> [self.batch_size * self.n_choices, 1,
<add> self.seq_length, self.n_embd // self.n_head])
<add> self.parent.assertListEqual(
<add> list(multihead_outputs[1].size()),
<add> [self.batch_size * self.n_choices, self.n_head,
<add> self.seq_length, self.n_embd // self.n_head])
<add> self.parent.assertListEqual(
<add> list(multihead_outputs[-1].size()),
<add> [self.batch_size * self.n_choices, self.n_head-1,
<add> self.seq_length, self.n_embd // self.n_head])
<add>
<add>
<ide> def test_default(self):
<ide> self.run_tester(GPT2ModelTest.GPT2ModelTester(self))
<ide>
<ide> def run_tester(self, tester):
<ide> tester.check_gpt2_double_heads_output(output_result)
<ide> tester.check_gpt2_double_heads_loss_output(output_result)
<ide>
<add> tester.create_and_check_gpt2_for_headmasking(*config_and_inputs)
<add> tester.create_and_check_gpt2_for_head_pruning(*config_and_inputs)
<add>
<ide> @classmethod
<ide> def ids_tensor(cls, shape, vocab_size, rng=None, name=None):
<ide> """Creates a random int32 tensor of the shape within the vocab size.""" | 2 |
Ruby | Ruby | remove slashes from documentation | d0c138ad985fa70f55792288f73a31e01656e37b | <ide><path>Library/Homebrew/cmd/--cache.rb
<ide> module Homebrew
<ide> def __cache_args
<ide> Homebrew::CLI::Parser.new do
<ide> usage_banner <<~EOS
<del> `--cache` [<options>] [<formula/cask>]
<add> `--cache` [<options>] [<formula>]
<ide>
<ide> Display Homebrew's download cache. See also `HOMEBREW_CACHE`.
<ide>
<del> If <formula/cask> is provided, display the file or directory used to cache <formula/cask>.
<add> If <formula> is provided, display the file or directory used to cache <formula>.
<ide> EOS
<ide> switch "-s", "--build-from-source",
<ide> description: "Show the cache file used when building from source." | 1 |
Ruby | Ruby | drop errors constant | b2f7c460767c2ce544d80399ce7d0d2cbc3aa7cb | <ide><path>activerecord/lib/active_record/coders/yaml_column.rb
<ide> module ActiveRecord
<ide> module Coders # :nodoc:
<ide> class YAMLColumn # :nodoc:
<del> RESCUE_ERRORS = [ ArgumentError, Psych::SyntaxError ]
<ide>
<ide> attr_accessor :object_class
<ide>
<ide> def load(yaml)
<ide> obj ||= object_class.new if object_class != Object
<ide>
<ide> obj
<del> rescue *RESCUE_ERRORS
<add> rescue ArgumentError, Psych::SyntaxError
<ide> yaml
<ide> end
<ide> end
<ide><path>activerecord/lib/active_record/fixture_set/file.rb
<ide> def each(&block)
<ide> rows.each(&block)
<ide> end
<ide>
<del> RESCUE_ERRORS = [ ArgumentError, Psych::SyntaxError ] # :nodoc:
<ide>
<ide> private
<ide> def rows
<ide> return @rows if @rows
<ide>
<ide> begin
<ide> data = YAML.load(render(IO.read(@file)))
<del> rescue *RESCUE_ERRORS => error
<add> rescue ArgumentError, Psych::SyntaxError => error
<ide> raise Fixture::FormatError, "a YAML error occurred parsing #{@file}. Please note that YAML must be consistently indented using spaces. Tabs are not allowed. Please have a look at http://www.yaml.org/faq.html\nThe exact error was:\n #{error.class}: #{error}", error.backtrace
<ide> end
<ide> @rows = data ? validate(data).to_a : [] | 2 |
Javascript | Javascript | fix typo in atom.confirm documentation | aa8d3d6ee8031777679c5d9303effbcf86ca85b2 | <ide><path>src/atom-environment.js
<ide> class AtomEnvironment {
<ide> // window.alert('bummer')
<ide> // }
<ide> // })
<add> // ```
<ide> //
<ide> // ```js
<ide> // // Legacy sync version | 1 |
Java | Java | add test case for writefunction error signal | 4c08863776df9cf6da99b871ba1b5766a4772948 | <ide><path>spring-web/src/test/java/org/springframework/http/server/reactive/ChannelSendOperatorTests.java
<ide> package org.springframework.http.server.reactive;
<ide>
<ide> import java.nio.charset.StandardCharsets;
<add>import java.time.Duration;
<ide> import java.util.ArrayList;
<ide> import java.util.Arrays;
<ide> import java.util.List;
<ide> import reactor.core.publisher.Flux;
<ide> import reactor.core.publisher.Mono;
<ide> import reactor.core.publisher.Signal;
<add>import reactor.test.StepVerifier;
<ide>
<ide> import org.springframework.core.io.buffer.DataBuffer;
<ide> import org.springframework.core.io.buffer.LeakAwareDataBufferFactory;
<ide> public void cancelWhileItemCached() {
<ide> }
<ide>
<ide> @Test // gh-22720
<del> public void errorWhileItemCached() {
<add> public void errorFromWriteSourceWhileItemCached() {
<add>
<add> // 1. First item received
<add> // 2. writeFunction applied and writeCompletionBarrier subscribed to it
<add> // 3. Write Publisher fails right after that and before request(n) from server
<add>
<ide> NettyDataBufferFactory delegate = new NettyDataBufferFactory(ByteBufAllocator.DEFAULT);
<ide> LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory(delegate);
<ide> ZeroDemandSubscriber writeSubscriber = new ZeroDemandSubscriber();
<ide> public void errorWhileItemCached() {
<ide> bufferFactory.checkForLeaks();
<ide> }
<ide>
<add> @Test // gh-22720
<add> public void errorFromWriteFunctionWhileItemCached() {
<add>
<add> // 1. First item received
<add> // 2. writeFunction applied and writeCompletionBarrier subscribed to it
<add> // 3. writeFunction fails, e.g. to flush status and headers, before request(n) from server
<add>
<add> NettyDataBufferFactory delegate = new NettyDataBufferFactory(ByteBufAllocator.DEFAULT);
<add> LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory(delegate);
<add>
<add> ChannelSendOperator<DataBuffer> operator = new ChannelSendOperator<>(
<add> Flux.create(sink -> {
<add> DataBuffer dataBuffer = bufferFactory.allocateBuffer();
<add> dataBuffer.write("foo", StandardCharsets.UTF_8);
<add> sink.next(dataBuffer);
<add> }),
<add> publisher -> {
<add> publisher.subscribe(new ZeroDemandSubscriber());
<add> return Mono.error(new IllegalStateException("err"));
<add> });
<add>
<add> StepVerifier.create(operator).expectErrorMessage("err").verify(Duration.ofSeconds(5));
<add> bufferFactory.checkForLeaks();
<add> }
<ide>
<ide> private <T> Mono<Void> sendOperator(Publisher<String> source){
<ide> return new ChannelSendOperator<>(source, writer::send); | 1 |
Ruby | Ruby | add support for uniqueness validations | a61692cf4152e4dcc6850207a55b6d8bab4280dd | <ide><path>activerecord/lib/active_record/encryption.rb
<ide> module Encryption
<ide> autoload :EnvelopeEncryptionKeyProvider
<ide> autoload :Errors
<ide> autoload :ExtendedDeterministicQueries
<add> autoload :ExtendedDeterministicUniquenessValidator
<ide> autoload :Key
<ide> autoload :KeyGenerator
<ide> autoload :KeyProvider
<ide><path>activerecord/lib/active_record/encryption/extended_deterministic_uniqueness_validator.rb
<add># frozen_string_literal: true
<add>
<add>module ActiveRecord
<add> module Encryption
<add> module ExtendedDeterministicUniquenessValidator
<add> def self.install_support
<add> ActiveRecord::Validations::UniquenessValidator.prepend(EncryptedUniquenessValidator)
<add> end
<add>
<add> module EncryptedUniquenessValidator
<add> def validate_each(record, attribute, value)
<add> super(record, attribute, value)
<add>
<add> klass = record.class
<add> if klass.deterministic_encrypted_attributes&.each do |attribute_name|
<add> encrypted_type = klass.type_for_attribute(attribute_name)
<add> [ encrypted_type, *encrypted_type.previous_encrypted_types ].each do |type|
<add> encrypted_value = type.serialize(value)
<add> ActiveRecord::Encryption.without_encryption do
<add> super(record, attribute, encrypted_value)
<add> end
<add> end
<add> end
<add> end
<add> end
<add> end
<add> end
<add> end
<add>end
<ide><path>activerecord/lib/active_record/railtie.rb
<ide> class Railtie < Rails::Railtie # :nodoc:
<ide> ActiveRecord::Fixture.prepend ActiveRecord::Encryption::EncryptedFixtures
<ide> end
<ide>
<del> # Support extended queries for deterministic attributes
<add> # Support extended queries for deterministic attributes and validations
<ide> if ActiveRecord::Encryption.config.extend_queries
<ide> ActiveRecord::Encryption::ExtendedDeterministicQueries.install_support
<add> ActiveRecord::Encryption::ExtendedDeterministicUniquenessValidator.install_support
<ide> end
<ide> end
<ide>
<ide><path>activerecord/test/cases/encryption/uniqueness_validations_test.rb
<add># frozen_string_literal: true
<add>
<add>require "cases/encryption/helper"
<add>require "models/book_encrypted"
<add>require "models/author_encrypted"
<add>
<add>class ActiveRecord::Encryption::UniquenessValidationsTest < ActiveRecord::EncryptionTestCase
<add> fixtures :books
<add>
<add> test "uniqueness validations work" do
<add> EncryptedBookWithDowncaseName.create!(name: "dune")
<add> assert_raises ActiveRecord::RecordInvalid do
<add> EncryptedBookWithDowncaseName.create!(name: "dune")
<add> end
<add> end
<add>
<add> test "uniqueness validations work when mixing encrypted an unencrypted data" do
<add> ActiveRecord::Encryption.config.support_unencrypted_data = true
<add>
<add> ActiveRecord::Encryption.without_encryption { EncryptedBookWithDowncaseName.create! name: "dune" }
<add>
<add> assert_raises ActiveRecord::RecordInvalid do
<add> EncryptedBookWithDowncaseName.create!(name: "dune")
<add> end
<add> end
<add>
<add> test "uniqueness validations work when using old encryption schemes" do
<add> ActiveRecord::Encryption.config.previous = [ { downcase: true } ]
<add>
<add> OldEncryptionBook = Class.new(Book) do
<add> self.table_name = "books"
<add>
<add> validates :name, uniqueness: true
<add> encrypts :name, deterministic: true, downcase: false
<add> end
<add>
<add> OldEncryptionBook.create! name: "dune"
<add>
<add> assert_raises ActiveRecord::RecordInvalid do
<add> OldEncryptionBook.create! name: "DUNE"
<add> end
<add> end
<add>end
<ide><path>activerecord/test/cases/helper.rb
<ide> def in_time_zone(zone)
<ide> key_derivation_salt: "testing key derivation salt"
<ide>
<ide> ActiveRecord::Encryption::ExtendedDeterministicQueries.install_support
<add>ActiveRecord::Encryption::ExtendedDeterministicUniquenessValidator.install_support
<ide><path>activerecord/test/models/author_encrypted.rb
<ide> class EncryptedAuthor < Author
<ide> self.table_name = "authors"
<ide>
<add> validates :name, uniqueness: true
<ide> encrypts :name, previous: { deterministic: true }
<ide> end
<ide>
<ide><path>activerecord/test/models/book_encrypted.rb
<ide> class EncryptedBook < ActiveRecord::Base
<ide> class EncryptedBookWithDowncaseName < ActiveRecord::Base
<ide> self.table_name = "books"
<ide>
<add> validates :name, uniqueness: true
<ide> encrypts :name, deterministic: true, downcase: true
<ide> end
<ide> | 7 |
Text | Text | fix wordy sentence | bb87c16ad92c13ca91d8e349c0f0b277431b81af | <ide><path>guides/source/getting_started.md
<ide> styling for it afterwards.
<ide>
<ide> ### Laying down the ground work
<ide>
<del>The first thing that you are going to need to create a new article within the
<del>application is a place to do that. A great place for that would be at
<del>`/articles/new`.
<del>With the route already defined, requests can now be made to `/articles/new` in
<del>the application. Navigate to <http://localhost:3000/articles/new> and you'll see
<del>a routing error:
<add>Firstly, you need a place within the application to create a new article. A
<add>great place for that would be at `/articles/new`. With the route already
<add>defined, requests can now be made to `/articles/new` in the application.
<add>Navigate to <http://localhost:3000/articles/new> and you'll see a routing
<add>error:
<ide>
<ide> 
<ide> | 1 |
Javascript | Javascript | release callback and buffer after processing | 313ef544173965309a5b24e3bf3a7f57af397c01 | <ide><path>lib/zlib.js
<ide> Zlib.prototype._processChunk = function(chunk, flushFlag, cb) {
<ide> req.callback = callback;
<ide>
<ide> function callback(availInAfter, availOutAfter) {
<add> // When the callback is used in an async write, the callback's
<add> // context is the `req` object that was created. The req object
<add> // is === this._handle, and that's why it's important to null
<add> // out the values after they are done being used. `this._handle`
<add> // can stay in memory longer than the callback and buffer are needed.
<add> if (this) {
<add> this.buffer = null;
<add> this.callback = null;
<add> }
<add>
<ide> if (self._hadError)
<ide> return;
<ide> | 1 |
Javascript | Javascript | improve assertions in pummel/test-timers | 87c433e120604b2c44b87d5f6bcd8f8e00da6af6 | <ide><path>test/pummel/test-timers.js
<ide> const WINDOW = 200; // Why does this need to be so big?
<ide> assert.ok(diff > 0);
<ide> console.error(`diff: ${diff}`);
<ide>
<del> assert.strictEqual(1000 - WINDOW < diff && diff < 1000 + WINDOW, true);
<add> assert.ok(1000 <= diff && diff < 1000 + WINDOW);
<ide> }), 1000);
<ide> }
<ide>
<ide> const WINDOW = 200; // Why does this need to be so big?
<ide>
<ide> const t = interval_count * 1000;
<ide>
<del> assert.ok(t - WINDOW < diff && diff < t + WINDOW, `t: ${t}`);
<add> assert.ok(t <= diff && diff < t + (WINDOW * interval_count));
<ide>
<ide> assert.ok(interval_count <= 3, `interval_count: ${interval_count}`);
<ide> if (interval_count === 3) | 1 |
Go | Go | increase test coverage of pkg/authorization | f1eb0c0ebb85af2ae5373f16fd529588c07815cc | <ide><path>pkg/authorization/api_test.go
<add>package authorization
<add>
<add>import (
<add> "crypto/rand"
<add> "crypto/rsa"
<add> "crypto/tls"
<add> "crypto/x509"
<add> "crypto/x509/pkix"
<add> "math/big"
<add> "net/http"
<add> "testing"
<add> "time"
<add>
<add> "github.com/stretchr/testify/require"
<add>)
<add>
<add>func TestPeerCertificateMarshalJSON(t *testing.T) {
<add> template := &x509.Certificate{
<add> IsCA: true,
<add> BasicConstraintsValid: true,
<add> SubjectKeyId: []byte{1, 2, 3},
<add> SerialNumber: big.NewInt(1234),
<add> Subject: pkix.Name{
<add> Country: []string{"Earth"},
<add> Organization: []string{"Mother Nature"},
<add> },
<add> NotBefore: time.Now(),
<add> NotAfter: time.Now().AddDate(5, 5, 5),
<add>
<add> ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth, x509.ExtKeyUsageServerAuth},
<add> KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign,
<add> }
<add> // generate private key
<add> privatekey, err := rsa.GenerateKey(rand.Reader, 2048)
<add> require.NoError(t, err)
<add> publickey := &privatekey.PublicKey
<add>
<add> // create a self-signed certificate. template = parent
<add> var parent = template
<add> raw, err := x509.CreateCertificate(rand.Reader, template, parent, publickey, privatekey)
<add> require.NoError(t, err)
<add>
<add> cert, err := x509.ParseCertificate(raw)
<add> require.NoError(t, err)
<add>
<add> var certs = []*x509.Certificate{cert}
<add> addr := "www.authz.com/auth"
<add> req, err := http.NewRequest("GET", addr, nil)
<add> require.NoError(t, err)
<add>
<add> req.RequestURI = addr
<add> req.TLS = &tls.ConnectionState{}
<add> req.TLS.PeerCertificates = certs
<add> req.Header.Add("header", "value")
<add>
<add> for _, c := range req.TLS.PeerCertificates {
<add> pcObj := PeerCertificate(*c)
<add>
<add> t.Run("Marshalling :", func(t *testing.T) {
<add> raw, err = pcObj.MarshalJSON()
<add> require.NotNil(t, raw)
<add> require.Nil(t, err)
<add> })
<add>
<add> t.Run("UnMarshalling :", func(t *testing.T) {
<add> err := pcObj.UnmarshalJSON(raw)
<add> require.Nil(t, err)
<add> require.Equal(t, "Earth", pcObj.Subject.Country[0])
<add> require.Equal(t, true, pcObj.IsCA)
<add>
<add> })
<add>
<add> }
<add>
<add>}
<ide><path>pkg/authorization/middleware_test.go
<add>package authorization
<add>
<add>import (
<add> "net/http"
<add> "net/http/httptest"
<add> "strings"
<add> "testing"
<add>
<add> "github.com/docker/docker/pkg/plugingetter"
<add> "github.com/stretchr/testify/require"
<add>)
<add>
<add>func TestMiddleware(t *testing.T) {
<add> pluginNames := []string{"testPlugin1", "testPlugin2"}
<add> var pluginGetter plugingetter.PluginGetter
<add> m := NewMiddleware(pluginNames, pluginGetter)
<add> authPlugins := m.getAuthzPlugins()
<add> require.Equal(t, 2, len(authPlugins))
<add> require.EqualValues(t, pluginNames[0], authPlugins[0].Name())
<add> require.EqualValues(t, pluginNames[1], authPlugins[1].Name())
<add>}
<add>
<add>func TestNewResponseModifier(t *testing.T) {
<add> recorder := httptest.NewRecorder()
<add> modifier := NewResponseModifier(recorder)
<add> modifier.Header().Set("H1", "V1")
<add> modifier.Write([]byte("body"))
<add> require.False(t, modifier.Hijacked())
<add> modifier.WriteHeader(http.StatusInternalServerError)
<add> require.NotNil(t, modifier.RawBody())
<add>
<add> raw, err := modifier.RawHeaders()
<add> require.NotNil(t, raw)
<add> require.Nil(t, err)
<add>
<add> headerData := strings.Split(strings.TrimSpace(string(raw)), ":")
<add> require.EqualValues(t, "H1", strings.TrimSpace(headerData[0]))
<add> require.EqualValues(t, "V1", strings.TrimSpace(headerData[1]))
<add>
<add> modifier.Flush()
<add> modifier.FlushAll()
<add>
<add> if recorder.Header().Get("H1") != "V1" {
<add> t.Fatalf("Header value must exists %s", recorder.Header().Get("H1"))
<add> }
<add>
<add>}
<add>
<add>func setAuthzPlugins(m *Middleware, plugins []Plugin) {
<add> m.mu.Lock()
<add> m.plugins = plugins
<add> m.mu.Unlock()
<add>}
<ide><path>pkg/authorization/middleware_unix_test.go
<add>// +build !windows
<add>
<add>package authorization
<add>
<add>import (
<add> "net/http"
<add> "net/http/httptest"
<add> "testing"
<add>
<add> "github.com/docker/docker/pkg/plugingetter"
<add> "github.com/stretchr/testify/require"
<add> "golang.org/x/net/context"
<add>)
<add>
<add>func TestMiddlewareWrapHandler(t *testing.T) {
<add> server := authZPluginTestServer{t: t}
<add> server.start()
<add> defer server.stop()
<add>
<add> authZPlugin := createTestPlugin(t)
<add> pluginNames := []string{authZPlugin.name}
<add>
<add> var pluginGetter plugingetter.PluginGetter
<add> middleWare := NewMiddleware(pluginNames, pluginGetter)
<add> handler := func(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
<add> return nil
<add> }
<add>
<add> authList := []Plugin{authZPlugin}
<add> middleWare.SetPlugins([]string{"My Test Plugin"})
<add> setAuthzPlugins(middleWare, authList)
<add> mdHandler := middleWare.WrapHandler(handler)
<add> require.NotNil(t, mdHandler)
<add>
<add> addr := "www.example.com/auth"
<add> req, _ := http.NewRequest("GET", addr, nil)
<add> req.RequestURI = addr
<add> req.Header.Add("header", "value")
<add>
<add> resp := httptest.NewRecorder()
<add> ctx := context.Background()
<add>
<add> t.Run("Error Test Case :", func(t *testing.T) {
<add> server.replayResponse = Response{
<add> Allow: false,
<add> Msg: "Server Auth Not Allowed",
<add> }
<add> if err := mdHandler(ctx, resp, req, map[string]string{}); err == nil {
<add> require.Error(t, err)
<add> }
<add>
<add> })
<add>
<add> t.Run("Positive Test Case :", func(t *testing.T) {
<add> server.replayResponse = Response{
<add> Allow: true,
<add> Msg: "Server Auth Allowed",
<add> }
<add> if err := mdHandler(ctx, resp, req, map[string]string{}); err != nil {
<add> require.NoError(t, err)
<add> }
<add>
<add> })
<add>
<add>} | 3 |
Python | Python | add regression test for #775 | 19501f3340127c7c874e551632c36e19ba2176d3 | <ide><path>spacy/tests/regression/test_issue775.py
<add># coding: utf-8
<add>from __future__ import unicode_literals
<add>
<add>import pytest
<add>
<add>
<add>@pytest.mark.parametrize('text', ["Shell", "shell"])
<add>def test_issue775(en_tokenizer, text):
<add> """Test that 'Shell' and 'shell' are excluded from the contractions
<add> generated by the English tokenizer exceptions."""
<add> tokens = en_tokenizer(text)
<add> assert len(tokens) == 1
<add> assert tokens[0].text == text | 1 |
Python | Python | improve help string | 492fdab94de8fb77bac9f10f48b196db08852677 | <ide><path>runtests.py
<ide> def main(argv):
<ide> parser.add_argument("--verbose", "-v", action="count", default=1,
<ide> help="more verbosity")
<ide> parser.add_argument("--debug-configure", action="store_true",
<del> help="add -v to build_src to show cconfiguration compiler output")
<add> help=("add -v to build_src to show compiler "
<add> "configuration output while creating "
<add> "_numpyconfig.h and config.h"))
<ide> parser.add_argument("--no-build", "-n", action="store_true", default=False,
<ide> help="do not build the project (use system installed version)")
<ide> parser.add_argument("--build-only", "-b", action="store_true", default=False, | 1 |
Ruby | Ruby | remove uncommented railties boot_test.rb | 2dffe37ce489309aa2b5c6061516e98031a2da05 | <ide><path>railties/test/application/initializers/boot_test.rb
<del>require "isolation/abstract_unit"
<del>
<del>module ApplicationTests
<del> class BootTest < ActiveSupport::TestCase
<del> include ActiveSupport::Testing::Isolation
<del>
<del> def setup
<del> # build_app
<del> # boot_rails
<del> end
<del>
<del> def teardown
<del> # teardown_app
<del> end
<del>
<del> test "booting rails sets the load paths correctly" do
<del> # This test is pending reworking the boot process
<del> end
<del> end
<del>end | 1 |
Python | Python | add timehistory callback to bert | b39958cff891fcf31a4bab4ca55a95e401086ab3 | <ide><path>official/benchmark/bert_benchmark_utils.py
<ide> def on_batch_begin(self, batch, logs=None):
<ide> self.batch_start_times[batch] = time.time()
<ide>
<ide> def on_batch_end(self, batch, logs=None):
<add> # If there are multiple steps_per_loop, the end batch index will not be the
<add> # same as the starting index. Use the last starting index instead.
<add> if batch not in self.batch_start_times:
<add> batch = max(self.batch_start_times.keys())
<add>
<ide> self.batch_stop_times[batch] = time.time()
<ide>
<ide> def get_examples_per_sec(self, batch_size, num_batches_to_skip=1):
<ide><path>official/modeling/model_training_utils.py
<ide> def _run_callbacks_on_batch_end(batch, logs):
<ide> train_steps(train_iterator,
<ide> tf.convert_to_tensor(steps, dtype=tf.int32))
<ide> train_loss = _float_metric_value(train_loss_metric)
<del> _run_callbacks_on_batch_end(current_step, {'loss': train_loss})
<ide> current_step += steps
<add> _run_callbacks_on_batch_end(current_step - 1, {'loss': train_loss})
<ide>
<ide> # Updates training logging.
<ide> training_status = 'Train Step: %d/%d / loss = %s' % (
<ide><path>official/nlp/bert/common_flags.py
<ide> def define_common_bert_flags():
<ide> flags.DEFINE_bool('hub_module_trainable', True,
<ide> 'True to make keras layers in the hub module trainable.')
<ide>
<add> flags_core.define_log_steps()
<add>
<ide> # Adds flags for mixed precision and multi-worker training.
<ide> flags_core.define_performance(
<ide> num_parallel_calls=False,
<ide><path>official/nlp/bert/run_classifier.py
<ide> def metric_fn():
<ide> epochs,
<ide> steps_per_epoch,
<ide> eval_steps,
<del> custom_callbacks=None)
<add> custom_callbacks=custom_callbacks)
<ide>
<ide> # Use user-defined loop to start training.
<ide> logging.info('Training using customized training loop TF 2.0 with '
<ide> def run_bert(strategy,
<ide> if not strategy:
<ide> raise ValueError('Distribution strategy has not been specified.')
<ide>
<add> if FLAGS.log_steps:
<add> custom_callbacks = [keras_utils.TimeHistory(
<add> batch_size=FLAGS.train_batch_size,
<add> log_steps=FLAGS.log_steps,
<add> logdir=FLAGS.model_dir,
<add> )]
<add> else:
<add> custom_callbacks = None
<add>
<ide> trained_model = run_bert_classifier(
<ide> strategy,
<ide> model_config,
<ide> def run_bert(strategy,
<ide> train_input_fn,
<ide> eval_input_fn,
<ide> run_eagerly=FLAGS.run_eagerly,
<del> use_keras_compile_fit=FLAGS.use_keras_compile_fit)
<add> use_keras_compile_fit=FLAGS.use_keras_compile_fit,
<add> custom_callbacks=custom_callbacks)
<ide>
<ide> if FLAGS.model_export_path:
<ide> # As Keras ModelCheckpoint callback used with Keras compile/fit() API
<ide><path>official/nlp/bert/run_squad.py
<ide> from official.nlp.bert import tokenization
<ide> from official.nlp.data import squad_lib as squad_lib_wp
<ide> from official.utils.misc import distribution_utils
<add>from official.utils.misc import keras_utils
<ide>
<ide>
<ide> flags.DEFINE_string('vocab_file', None,
<ide> def main(_):
<ide> all_reduce_alg=FLAGS.all_reduce_alg,
<ide> tpu_address=FLAGS.tpu)
<ide> if FLAGS.mode in ('train', 'train_and_predict'):
<del> train_squad(strategy, input_meta_data, run_eagerly=FLAGS.run_eagerly)
<add> if FLAGS.log_steps:
<add> custom_callbacks = [keras_utils.TimeHistory(
<add> batch_size=FLAGS.train_batch_size,
<add> log_steps=FLAGS.log_steps,
<add> logdir=FLAGS.model_dir,
<add> )]
<add> else:
<add> custom_callbacks = None
<add>
<add> train_squad(
<add> strategy,
<add> input_meta_data,
<add> custom_callbacks=custom_callbacks,
<add> run_eagerly=FLAGS.run_eagerly,
<add> )
<ide> if FLAGS.mode in ('predict', 'train_and_predict'):
<ide> predict_squad(strategy, input_meta_data)
<ide>
<ide><path>official/utils/misc/keras_utils.py
<ide> def on_batch_end(self, batch, logs=None):
<ide>
<ide> self.timestamp_log.append(BatchTimestamp(self.global_steps, now))
<ide> logging.info(
<del> 'TimeHistory: %.2f examples/second between steps %d and %d',
<del> examples_per_second, self.last_log_step, self.global_steps)
<add> 'TimeHistory: %.2f seconds, %.2f examples/second between steps %d '
<add> 'and %d', elapsed_time, examples_per_second, self.last_log_step,
<add> self.global_steps)
<ide>
<ide> if self.summary_writer:
<ide> with self.summary_writer.as_default(): | 6 |
Text | Text | fix typo from | 7fd22bcda513c4cb1db30de8afccdc31f44d9f73 | <ide><path>docs/C++-Standard-Libraries.md
<ide> so Homebrew will refuse to install software if a dependency was built with an
<ide> incompatible C++ library. It's recommended that you install the dependency tree
<ide> using a compatible compiler.
<ide>
<del>**If you've upgraded to or later 10.9 from an earlier version** - because the default C++
<add>**If you've upgraded to 10.9 or later from an earlier version** - because the default C++
<ide> standard library is now libc++, you may not be able to build software using
<ide> dependencies that you built on 10.8 or lower. If you're reading this page because
<ide> you were directed here by a build error, you can most likely fix the issue if | 1 |
Javascript | Javascript | remove unneeded async | f4af03b9c8f684662c473f60c11349c82deaecf2 | <ide><path>lib/router/router.js
<ide> export default class Router {
<ide> }
<ide> }
<ide>
<del> async onPopState (e) {
<add> onPopState (e) {
<ide> if (!e.state) {
<ide> // We get state as undefined for two reasons.
<ide> // 1. With older safari (< 8) and older chrome (< 34) | 1 |
Javascript | Javascript | remove unused code | 020694907500e363c2ca11428afa458b8696eb01 | <ide><path>lib/Chunk.js
<ide> class Chunk {
<ide> return false;
<ide> }
<ide>
<del> hasChunkInGraph(filterFn) {
<del> const queue = new Set(this.groupsIterable);
<del> const chunksProcessed = new Set();
<del>
<del> for(const chunkGroup of queue) {
<del> for(const chunk of chunkGroup.chunks) {
<del> if(!chunksProcessed.has(chunk)) {
<del> chunksProcessed.add(chunk);
<del> if(filterFn(chunk))
<del> return true;
<del> }
<del> }
<del> for(const child of chunkGroup.childrenIterable)
<del> queue.add(child);
<del> }
<del> return false;
<del> }
<del>
<ide> toString() {
<ide> return `Chunk[${Array.from(this._modules).join()}]`;
<ide> }
<ide><path>lib/ChunkGroup.js
<ide> class ChunkGroup {
<ide> this.origins = [];
<ide> }
<ide>
<add> /* istanbul ignore next */
<ide> get debugId() {
<ide> return Array.from(this.chunks, x => x.debugId).join("+");
<ide> }
<ide> class ChunkGroup {
<ide> return false;
<ide> }
<ide>
<del> hasEntryModule() {
<del> for(const chunk of this.chunks)
<del> if(chunk.hasEntryModule())
<del> return true;
<del> return false;
<del> }
<del>
<ide> addChild(chunk) {
<ide> if(this._children.has(chunk)) {
<ide> return false;
<ide> class ChunkGroup {
<ide> this._parents.add(p);
<ide> }
<ide>
<del> // TODO remove and replace calls with Array.from
<del> mapParents(fn) {
<del> return Array.from(this._parents, fn);
<del> }
<del>
<ide> getNumberOfParents() {
<ide> return this._parents.size;
<ide> }
<ide> class ChunkGroup {
<ide> return this._blocks.getFromCache(getArray);
<ide> }
<ide>
<del> setBlocks(newBlocks) {
<del> this._blocks.clear();
<del> for(const p of newBlocks)
<del> this._blocks.add(p);
<del> }
<del>
<del> // TODO remove and replace calls with Array.from
<del> mapBlocks(fn) {
<del> return Array.from(this._blocks, fn);
<del> }
<del>
<ide> getNumberOfBlocks() {
<ide> return this._blocks.size;
<ide> }
<ide> class ChunkGroup {
<ide>
<ide> sortItems() {
<ide> this.origins.sort(sortOrigin);
<del> this.origins.forEach(origin => {
<del> if(origin.reasons)
<del> origin.reasons.sort();
<del> });
<ide> this._parents.sort();
<ide> this._children.sort();
<ide> }
<ide><path>lib/ChunkTemplate.js
<ide> module.exports = class ChunkTemplate extends Tapable {
<ide>
<ide> const result = [];
<ide>
<del> let filenameTemplate;
<del> if(chunk.filenameTemplate)
<del> filenameTemplate = chunk.filenameTemplate;
<del> else
<del> filenameTemplate = outputOptions.chunkFilename;
<add> const filenameTemplate = outputOptions.chunkFilename;
<ide>
<ide> result.push({
<ide> render: () => this.renderJavascript(chunk, moduleTemplates.javascript, dependencyTemplates),
<ide><path>lib/GraphHelpers.js
<del>// TODO remove this function
<del>function assert(value, typeName, name) {
<del> const Type = require(`./${typeName}`);
<del> if(value instanceof Type) return;
<del> throw new Error(`${name} is not a ${Type.name}`);
<del>}
<del>
<ide> exports.connectChunkGroupAndChunk = (chunkGroup, chunk) => {
<del> assert(chunkGroup, "ChunkGroup", "chunkGroup");
<del> assert(chunk, "Chunk", "chunk");
<ide> if(chunkGroup.pushChunk(chunk)) {
<ide> chunk.addGroup(chunkGroup);
<ide> }
<ide> };
<ide>
<ide> exports.connectChunkGroupParentAndChild = (parent, child) => {
<del> assert(parent, "ChunkGroup", "parent");
<del> assert(child, "ChunkGroup", "child");
<ide> if(parent.addChild(child)) {
<ide> child.addParent(parent);
<ide> }
<ide> };
<ide>
<ide> exports.connectChunkAndModule = (chunk, module) => {
<del> assert(chunk, "Chunk", "chunk");
<del> assert(module, "Module", "module");
<ide> if(module.addChunk(chunk)) {
<ide> chunk.addModule(module);
<ide> }
<ide> };
<ide>
<ide> exports.disconnectChunkAndModule = (chunk, module) => {
<del> assert(chunk, "Chunk", "chunk");
<del> assert(module, "Module", "module");
<ide> chunk.removeModule(module);
<ide> module.removeChunk(chunk);
<ide> };
<ide>
<ide> exports.connectDependenciesBlockAndChunkGroup = (depBlock, chunkGroup) => {
<del> assert(depBlock, "DependenciesBlock", "depBlock");
<del> assert(chunkGroup, "ChunkGroup", "chunkGroup");
<ide> if(chunkGroup.addBlock(depBlock)) {
<ide> depBlock.chunkGroup = chunkGroup;
<ide> }
<ide><path>lib/MainTemplate.js
<ide> module.exports = class MainTemplate extends Tapable {
<ide> return this.hooks.currentHash.call(JSON.stringify(hash.substr(0, length)), length);
<ide> }
<ide>
<del> // TODO remove and call hasChunkInGraph directly
<del> entryPointInChildren(chunk) {
<del> return chunk.hasChunkInGraph(chunk => chunk.hasEntryModule());
<del> }
<del>
<ide> getPublicPath(options) {
<ide> return this.hooks.assetPath.call(this.outputOptions.publicPath || "", options);
<ide> }
<ide><path>lib/Module.js
<ide> const sortByDebugId = (a, b) => {
<ide>
<ide> const getFrozenArray = set => Object.freeze(Array.from(set));
<ide>
<add>/* istanbul ignore next */
<ide> const getDebugIdent = set => {
<ide> set.sortWith(sortByDebugId);
<ide> const chunks = set;
<ide> class Module extends DependenciesBlock {
<ide> return this._chunks.has(chunk);
<ide> }
<ide>
<del> getChunkIdsIdent() {
<del> return this._chunks.getFromUnorderedCache(getDebugIdent);
<del> }
<del>
<ide> isEntryModule() {
<ide> for(const chunk of this._chunks) {
<ide> if(chunk.entryModule === this)
<ide><path>lib/util/createHash.js
<ide> class BulkUpdateDecorator {
<ide> }
<ide> }
<ide>
<add>/* istanbul ignore next */
<ide> class DebugHash {
<ide> constructor() {
<ide> this.string = ""; | 7 |
Text | Text | add optional callback to socket.end() | 9697c0820f015ccf898a3662305a0caa3cd9c208 | <ide><path>doc/api/net.md
<ide> listeners for that event will receive `exception` as an argument.
<ide> * {boolean} Indicates if the connection is destroyed or not. Once a
<ide> connection is destroyed no further data can be transferred using it.
<ide>
<del>### socket.end([data][, encoding])
<add>### socket.end([data][, encoding][, callback])
<ide> <!-- YAML
<ide> added: v0.1.90
<ide> -->
<ide>
<ide> * `data` {string|Buffer|Uint8Array}
<ide> * `encoding` {string} Only used when data is `string`. **Default:** `'utf8'`.
<add>* `callback` {Function} Optional callback for when the socket is finished.
<ide> * Returns: {net.Socket} The socket itself.
<ide>
<ide> Half-closes the socket. i.e., it sends a FIN packet. It is possible the
<ide> Returns `true` if input is a version 6 IP address, otherwise returns `false`.
<ide> [`socket.connect(path)`]: #net_socket_connect_path_connectlistener
<ide> [`socket.connect(port, host)`]: #net_socket_connect_port_host_connectlistener
<ide> [`socket.destroy()`]: #net_socket_destroy_exception
<del>[`socket.end()`]: #net_socket_end_data_encoding
<add>[`socket.end()`]: #net_socket_end_data_encoding_callback
<ide> [`socket.pause()`]: #net_socket_pause
<ide> [`socket.resume()`]: #net_socket_resume
<ide> [`socket.setEncoding()`]: #net_socket_setencoding_encoding | 1 |
Python | Python | fix funny printing | b2281119259681f04ffe178579efcb16a9b06f81 | <ide><path>spacy/cli/project.py
<ide> def run_commands(
<ide> elif len(command) and command[0] in ("pip", "pip3"):
<ide> command = [sys.executable, "-m", "pip", *command[1:]]
<ide> if not silent:
<del> print(" ".join(command))
<add> print(f"Running command: {command}")
<ide> run_command(command)
<ide>
<ide> | 1 |
Javascript | Javascript | export path.normalizearray for the uri module | 120492e5c222f8d1b0890ea0e8170dfe7370d74c | <ide><path>src/node.js
<ide> var pathModule = createInternalModule("path", function (exports) {
<ide> return exports.normalize(Array.prototype.join.call(arguments, "/"));
<ide> };
<ide>
<del> function normalizeArray (parts) {
<add> exports.normalizeArray = function (parts) {
<ide> var directories = [];
<ide> for (var i = 0; i < parts.length; i++) {
<ide> var directory = parts[i];
<ide> var pathModule = createInternalModule("path", function (exports) {
<ide> }
<ide> }
<ide> return directories;
<del> }
<add> };
<ide>
<ide> exports.normalize = function (path) {
<del> return normalizeArray(path.split("/")).join("/");
<add> return exports.normalizeArray(path.split("/")).join("/");
<ide> };
<ide>
<ide> exports.dirname = function (path) { | 1 |
Text | Text | remove references to autoflow | 78958fe0f54b7e118db033481b1bd04aab0a7f8f | <ide><path>README.md
<ide> React is a JavaScript library for building user interfaces.
<ide>
<ide> [Learn how to use React in your own project.](http://facebook.github.io/react/docs/getting-started.html)
<ide>
<del>## The `react` npm package has recently changed!
<del>
<del>If you're looking for jeffbski's [React.js](https://github.com/jeffbski/autoflow) project, it's now in `npm` as `autoflow` rather than `react`.
<del>
<ide> ## Examples
<ide>
<ide> We have several examples [on the website](http://facebook.github.io/react/). Here is the first one to get you started: | 1 |
Ruby | Ruby | remove old fixme comment | e6b585e838e475b005b73a8b1a4b4cc3edb09474 | <ide><path>lib/arel/select_manager.rb
<ide> def group *columns
<ide>
<ide> def from table
<ide> table = Nodes::SqlLiteral.new(table) if String === table
<del> # FIXME: this is a hack to support
<del> # test_with_two_tables_in_from_without_getting_double_quoted
<del> # from the AR tests.
<ide>
<ide> case table
<ide> when Nodes::Join | 1 |
PHP | PHP | add order assertions and test coverage | 75d7e077905f7ebba7a0b957934442d680699bda | <ide><path>src/Illuminate/Foundation/Testing/TestResponse.php
<ide> public function assertSee($value)
<ide> return $this;
<ide> }
<ide>
<add> /**
<add> * Assert that the given strings are contained in order within the response.
<add> *
<add> * @param array $values
<add> * @return $this
<add> */
<add> public function assertSeeInOrder(array $values)
<add> {
<add> $position = -1;
<add>
<add> foreach ($values as $value) {
<add> $valuePosition = mb_strpos($this->getContent(), $value);
<add>
<add> if ($valuePosition === false || $valuePosition < $position) {
<add> PHPUnit::fail(
<add> 'Failed asserting that \''.$this->getContent().
<add> '\' contains "'.$value.'" in specified order.'
<add> );
<add> }
<add>
<add> $position = $valuePosition;
<add> }
<add>
<add> return $this;
<add> }
<add>
<ide> /**
<ide> * Assert that the given string is contained within the response text.
<ide> *
<ide> public function assertSeeText($value)
<ide> return $this;
<ide> }
<ide>
<add> /**
<add> * Assert that the given strings are contained in order within the response text.
<add> *
<add> * @param array $values
<add> * @return $this
<add> */
<add> public function assertSeeTextInOrder(array $values)
<add> {
<add> $position = -1;
<add>
<add> foreach ($values as $value) {
<add> $valuePosition = mb_strpos(strip_tags($this->getContent()), $value);
<add>
<add> if ($valuePosition === false || $valuePosition < $position) {
<add> PHPUnit::fail(
<add> 'Failed asserting that \''.strip_tags($this->getContent()).
<add> '\' contains "'.$value.'" in specified order.'
<add> );
<add> }
<add>
<add> $position = $valuePosition;
<add> }
<add>
<add> return $this;
<add> }
<add>
<ide> /**
<ide> * Assert that the given string is not contained within the response.
<ide> *
<ide><path>tests/Foundation/FoundationTestResponseTest.php
<ide> public function testAssertViewHas()
<ide> $response->assertViewHas('foo');
<ide> }
<ide>
<add> public function testAssertSeeInOrder()
<add> {
<add> $baseResponse = tap(new Response, function ($response) {
<add> $response->setContent(\Mockery::mock(View::class, [
<add> 'render' => '<ul><li>foo</li><li>bar</li><li>baz</li></ul>',
<add> ]));
<add> });
<add>
<add> $response = TestResponse::fromBaseResponse($baseResponse);
<add>
<add> $response->assertSeeInOrder(['foo', 'bar', 'baz']);
<add>
<add> try {
<add> $response->assertSeeInOrder(['baz', 'bar', 'foo']);
<add> $response->assertSeeInOrder(['foo', 'qux', 'bar', 'baz']);
<add> } catch (\PHPUnit\Framework\AssertionFailedError $e) {
<add> return;
<add> }
<add>
<add> TestCase::fail('Assertion was expected to fail.');
<add> }
<add>
<ide> public function testAssertSeeText()
<ide> {
<ide> $baseResponse = tap(new Response, function ($response) {
<ide> public function testAssertSeeText()
<ide> $response->assertSeeText('foobar');
<ide> }
<ide>
<add> public function testAssertSeeTextInOrder()
<add> {
<add> $baseResponse = tap(new Response, function ($response) {
<add> $response->setContent(\Mockery::mock(View::class, [
<add> 'render' => 'foo<strong>bar</strong> baz',
<add> ]));
<add> });
<add>
<add> $response = TestResponse::fromBaseResponse($baseResponse);
<add>
<add> $response->assertSeeTextInOrder(['foobar', 'baz']);
<add>
<add> try {
<add> $response->assertSeeTextInOrder(['baz', 'foobar']);
<add> $response->assertSeeTextInOrder(['foobar', 'qux', 'baz']);
<add> } catch (\PHPUnit\Framework\AssertionFailedError $e) {
<add> return;
<add> }
<add>
<add> TestCase::fail('Assertion was expected to fail.');
<add> }
<add>
<ide> public function testAssertHeader()
<ide> {
<ide> $baseResponse = tap(new Response, function ($response) { | 2 |
Ruby | Ruby | fix comma splice in outdated brew message | a316a072517e4ea82d619c6b0f97104474d7f479 | <ide><path>Library/Homebrew/cmd/doctor.rb
<ide> def check_for_outdated_homebrew
<ide>
<ide> if Time.now.to_i - timestamp > 60 * 60 * 24 then <<-EOS.undent
<ide> Your Homebrew is outdated.
<del> You haven't updated for at least 24 hours, this is a long time in brewland!
<add> You haven't updated for at least 24 hours. This is a long time in brewland!
<ide> To update Homebrew, run `brew update`.
<ide> EOS
<ide> end | 1 |
PHP | PHP | update blade.php | e9e805f36f767ace53791e88830042fcf69b7513 | <ide><path>src/Illuminate/Support/Facades/Blade.php
<ide> * @method static void compile(string|null $path = null)
<ide> * @method static void component(string $class, string|null $alias = null, string $prefix = '')
<ide> * @method static void components(array $components, string $prefix = '')
<del> * @method static void anonymousComponentNamespace(string $directory, string $prefix)
<add> * @method static void anonymousComponentNamespace(string $directory, string $prefix = null)
<ide> * @method static void componentNamespace(string $prefix, string $directory = null)
<ide> * @method static void directive(string $name, callable $handler)
<ide> * @method static void extend(callable $compiler) | 1 |
Python | Python | fix xla fp16 and bf16 error checking | 639422187157bb19339e0ecdd6eeff7e1011a801 | <ide><path>src/transformers/training_args.py
<ide> def get_int_from_env(env_keys, default):
<ide> return default
<ide>
<ide>
<add>def get_xla_device_type(device: "torch.device") -> Optional[str]:
<add> """
<add> Returns the xla device type (CPU|GPU|TPU) or None if the device is a non-xla device.
<add> """
<add> if is_torch_tpu_available():
<add> return xm.xla_real_devices([device])[0].split(":")[0]
<add> return None
<add>
<add>
<ide> class OptimizerNames(ExplicitEnum):
<ide> """
<ide> Stores the acceptable string identifiers for optimizers.
<ide> def __post_init__(self):
<ide> self.framework == "pt"
<ide> and is_torch_available()
<ide> and (self.device.type != "cuda")
<del> and not (self.device.type == "xla" and "GPU_NUM_DEVICES" in os.environ)
<add> and (get_xla_device_type(self.device) != "GPU")
<ide> and (self.fp16 or self.fp16_full_eval)
<ide> ):
<ide> raise ValueError(
<ide> def __post_init__(self):
<ide> self.framework == "pt"
<ide> and is_torch_available()
<ide> and (self.device.type != "cuda")
<del> and not (self.device.type == "xla" and "GPU_NUM_DEVICES" in os.environ)
<add> and (get_xla_device_type(self.device) != "GPU")
<ide> and (self.device.type != "cpu")
<ide> and (self.bf16 or self.bf16_full_eval)
<ide> ): | 1 |
Go | Go | fix golint issues | 35f7e7c50a131494c9d530800e80bf2da42fb89c | <ide><path>builder/builder-next/adapters/snapshot/snapshot.go
<ide> func (s *snapshotter) Prepare(ctx context.Context, key, parent string, opts ...s
<ide> if err := s.opt.GraphDriver.Create(key, parent, nil); err != nil {
<ide> return err
<ide> }
<del> if err := s.db.Update(func(tx *bolt.Tx) error {
<add> return s.db.Update(func(tx *bolt.Tx) error {
<ide> b, err := tx.CreateBucketIfNotExists([]byte(key))
<ide> if err != nil {
<ide> return err
<ide> }
<del>
<del> if err := b.Put(keyParent, []byte(origParent)); err != nil {
<del> return err
<del> }
<del> return nil
<del> }); err != nil {
<del> return err
<del> }
<del> return nil
<add> return b.Put(keyParent, []byte(origParent))
<add> })
<ide> }
<ide>
<ide> func (s *snapshotter) chainID(key string) (layer.ChainID, bool) {
<ide> func (s *snapshotter) Commit(ctx context.Context, name, key string, opts ...snap
<ide> if err != nil {
<ide> return err
<ide> }
<del> if err := b.Put(keyCommitted, []byte(key)); err != nil {
<del> return err
<del> }
<del> return nil
<add> return b.Put(keyCommitted, []byte(key))
<ide> })
<ide> }
<ide>
<ide><path>integration/internal/container/container.go
<ide> type TestContainerConfig struct {
<ide> }
<ide>
<ide> // Create creates a container with the specified options
<add>// nolint: golint
<ide> func Create(t *testing.T, ctx context.Context, client client.APIClient, ops ...func(*TestContainerConfig)) string { // nolint: golint
<ide> t.Helper()
<ide> config := &TestContainerConfig{
<ide> func Create(t *testing.T, ctx context.Context, client client.APIClient, ops ...f
<ide> }
<ide>
<ide> // Run creates and start a container with the specified options
<add>// nolint: golint
<ide> func Run(t *testing.T, ctx context.Context, client client.APIClient, ops ...func(*TestContainerConfig)) string { // nolint: golint
<ide> t.Helper()
<ide> id := Create(t, ctx, client, ops...)
<ide><path>integration/internal/network/network.go
<ide> func Create(ctx context.Context, client client.APIClient, name string, ops ...fu
<ide> }
<ide>
<ide> // CreateNoError creates a network with the specified options and verifies there were no errors
<add>// nolint: golint
<ide> func CreateNoError(t *testing.T, ctx context.Context, client client.APIClient, name string, ops ...func(*types.NetworkCreate)) string { // nolint: golint
<ide> t.Helper()
<ide>
<ide><path>integration/plugin/graphdriver/external_test.go
<ide> func TestGraphdriverPluginV2(t *testing.T) {
<ide> testGraphDriver(t, client, ctx, plugin, nil)
<ide> }
<ide>
<add>// nolint: golint
<ide> func testGraphDriver(t *testing.T, c client.APIClient, ctx context.Context, driverName string, afterContainerRunFn func(*testing.T)) { //nolint: golint
<ide> id := container.Run(t, ctx, c, container.WithCmd("sh", "-c", "echo hello > /hello"))
<ide> | 4 |
Javascript | Javascript | apply strict mode in test-repl | f3b9a85f869beb2c3bc66cb796860f8b42f36dad | <ide><path>test/parallel/test-repl.js
<del>/* eslint-disable max-len, strict */
<add>/* eslint-disable max-len */
<add>'use strict';
<add>
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> | 1 |
PHP | PHP | remove old fixture | b454d5c343ca44a07c781be953fd8997e8c35eaa | <ide><path>tests/Routing/fixtures/annotations/BasicController.php
<del><?php namespace App\Http\Controllers;
<del>
<del>/**
<del> * @Resource("foobar/photos", only={"index", "update"}, names={"index": "index.name"})
<del> * @Controller(domain="{id}.account.com")
<del> * @Middleware("FooMiddleware")
<del> * @Middleware("BarMiddleware")
<del> * @Middleware("BoomMiddleware", only={"index"})
<del> * @Where({"id": "regex"})
<del> */
<del>class BasicController {
<del>
<del> /**
<del> * @Middleware("BazMiddleware")
<del> * @return Response
<del> */
<del> public function index() {}
<del>
<del> /**
<del> * @return Response
<del> */
<del> public function update($id) {}
<del>
<del> /**
<del> * @Put("/more/{id}", after="log")
<del> * @Middleware("QuxMiddleware")
<del> */
<del> public function doMore($id) {}
<del>
<del>} | 1 |
Ruby | Ruby | remove node support | 36cef3c9744c7bc5d7215c5b79ea304a3aa2cfc2 | <ide><path>Library/Homebrew/dependency_collector.rb
<ide> class DependencyCollector
<ide> # Define the languages that we can handle as external dependencies.
<ide> LANGUAGE_MODULES = Set[
<del> :jruby, :lua, :node, :ocaml, :perl, :python, :python3, :ruby
<add> :jruby, :lua, :ocaml, :perl, :python, :python3, :ruby
<ide> ].freeze
<ide>
<ide> CACHE = {}
<ide><path>Library/Homebrew/requirements/language_module_requirement.rb
<ide> def the_test
<ide> when :jruby then %W[/usr/bin/env jruby -rubygems -e require\ '#{@import_name}']
<ide> when :lua then %W[/usr/bin/env luarocks-5.2 show #{@import_name}]
<ide> when :lua51 then %W[/usr/bin/env luarocks-5.1 show #{@import_name}]
<del> when :node then %W[/usr/bin/env node -e require('#{@import_name}');]
<ide> when :ocaml then %W[/usr/bin/env opam list --installed #{@import_name}]
<ide> when :perl then %W[/usr/bin/env perl -e use\ #{@import_name}]
<ide> when :python then %W[/usr/bin/env python -c import\ #{@import_name}]
<ide> def command_line
<ide> when :jruby then "jruby -S gem install"
<ide> when :lua then "luarocks-5.2 install"
<ide> when :lua51 then "luarocks-5.1 install"
<del> when :node then "npm install"
<ide> when :ocaml then "opam install"
<ide> when :perl then "cpan -i"
<ide> when :python then "pip install"
<ide><path>Library/Homebrew/test/test_language_module_requirement.rb
<ide> def test_bad_ruby_deps
<ide> def test_good_ruby_deps
<ide> assert_deps_pass "date" => :ruby
<ide> end
<del>
<del> if which("node")
<del> def test_bad_node_deps
<del> assert_deps_fail "notapackage" => :node
<del> end
<del>
<del> def test_good_node_deps
<del> assert_deps_pass "util" => :node
<del> end
<del> end
<ide> end | 3 |
Ruby | Ruby | fix rubocop style | 0c472ea6d9a5f96775c52d484064dddb871043e7 | <ide><path>Library/Homebrew/test/test_create.rb
<ide> def test_create
<ide>
<ide> formula_file = CoreTap.new.formula_dir/"testball.rb"
<ide> assert formula_file.exist?, "The formula source should have been created"
<del> assert_match %(sha256 "#{TESTBALL_SHA256}"), formula_file.read
<add> assert_match %Q(sha256 "#{TESTBALL_SHA256}"), formula_file.read
<ide> end
<ide> end | 1 |
Ruby | Ruby | extract output methods | 6790f6adb05206b005a3b7aeb13c4de6379568a7 | <ide><path>Library/Homebrew/cmd/deps.rb
<ide> module Homebrew extend self
<ide> def deps
<ide> if ARGV.include? '--installed'
<del> Formula.installed.each do |f|
<del> puts "#{f.name}: #{f.deps*' '}"
<del> end
<add> puts_deps Formula.installed
<ide> elsif ARGV.include? '--all'
<del> Formula.each do |f|
<del> puts "#{f.name}: #{f.deps*' '}"
<del> end
<add> puts_deps Formula
<ide> elsif ARGV.include? '--tree'
<ide> raise FormulaUnspecifiedError if ARGV.named.empty?
<del> ARGV.formulae.each do |f|
<del> puts f
<del> recursive_deps_tree(f, 1)
<del> puts
<del> end
<add> puts_deps_tree ARGV.formulae
<ide> else
<ide> raise FormulaUnspecifiedError if ARGV.named.empty?
<ide> all_deps = ARGV.formulae.map do |f|
<ide> def deps
<ide> end
<ide> end
<ide>
<add> def puts_deps(formulae)
<add> formulae.each { |f| puts "#{f.name}: #{f.deps*' '}" }
<add> end
<add>
<add> def puts_deps_tree(formulae)
<add> formulae.each do |f|
<add> puts f.name
<add> recursive_deps_tree(f, 1)
<add> puts
<add> end
<add> end
<add>
<ide> def recursive_deps_tree f, level
<ide> f.deps.default.each do |dep|
<ide> puts "| "*(level-1)+"|- "+dep.to_s | 1 |
Ruby | Ruby | apply suggestions from code review | 3e1c8ea8778d1a33f15212e33a701c969e5dfe34 | <ide><path>Library/Homebrew/upgrade.rb
<ide> def upgrade_formulae(
<ide> formula_installers = formulae_to_install.map do |formula|
<ide> Migrator.migrate_if_needed(formula, force: force)
<ide> begin
<del> fi = create_formula_installer(
<add> fi = create_and_fetch_formula_installer(
<ide> formula,
<ide> flags: flags,
<ide> installed_on_request: installed_on_request,
<ide> def print_upgrade_message(formula, fi_options)
<ide> EOS
<ide> end
<ide>
<del> def create_formula_installer(
<add> def create_and_fetch_formula_installer(
<ide> formula,
<ide> flags:,
<ide> installed_on_request: false,
<ide> def create_formula_installer(
<ide> }.compact,
<ide> )
<ide> end
<del> private_class_method :create_formula_installer
<add> private_class_method :create_and_fetch_formula_installer
<ide>
<ide> def upgrade_formula(formula_installer, verbose: false)
<ide> formula = formula_installer.formula | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.