content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Go
Go
remove group name from identity mapping
7ad0da705144c2a8a223fa0f8d0cad2fbffe4554
<ide><path>daemon/daemon_unix.go <ide> func setupRemappedRoot(config *config.Config) (*idtools.IdentityMapping, error) <ide> logrus.Warn("User namespaces: root cannot be remapped with itself; user namespaces are OFF") <ide> return &idtools.IdentityMapping{}, nil <ide> } <del> logrus.Infof("User namespaces: ID ranges will be mapped to subuid/subgid ranges of: %s:%s", username, groupname) <add> logrus.Infof("User namespaces: ID ranges will be mapped to subuid/subgid ranges of: %s", username) <ide> // update remapped root setting now that we have resolved them to actual names <ide> config.RemappedRoot = fmt.Sprintf("%s:%s", username, groupname) <ide> <del> // try with username:groupname, uid:groupname, username:gid, uid:gid, <del> // but keep the original error message (err) <del> mappings, err := idtools.NewIdentityMapping(username, groupname) <del> if err == nil { <del> return mappings, nil <del> } <del> user, lookupErr := idtools.LookupUser(username) <del> if lookupErr != nil { <add> mappings, err := idtools.NewIdentityMapping(username) <add> if err != nil { <ide> return nil, errors.Wrap(err, "Can't create ID mappings") <ide> } <del> logrus.Infof("Can't create ID mappings with username:groupname %s:%s, try uid:groupname %d:%s", username, groupname, user.Uid, groupname) <del> mappings, lookupErr = idtools.NewIdentityMapping(fmt.Sprintf("%d", user.Uid), groupname) <del> if lookupErr == nil { <del> return mappings, nil <del> } <del> logrus.Infof("Can't create ID mappings with uid:groupname %d:%s, try username:gid %s:%d", user.Uid, groupname, username, user.Gid) <del> mappings, lookupErr = idtools.NewIdentityMapping(username, fmt.Sprintf("%d", user.Gid)) <del> if lookupErr == nil { <del> return mappings, nil <del> } <del> logrus.Infof("Can't create ID mappings with username:gid %s:%d, try uid:gid %d:%d", username, user.Gid, user.Uid, user.Gid) <del> mappings, lookupErr = idtools.NewIdentityMapping(fmt.Sprintf("%d", user.Uid), fmt.Sprintf("%d", user.Gid)) <del> if lookupErr == nil { <del> return mappings, nil <del> } <del> return nil, errors.Wrap(err, "Can't create ID mappings") <add> return mappings, nil <ide> } <ide> return &idtools.IdentityMapping{}, nil <ide> } <ide><path>pkg/idtools/idtools.go <ide> type IdentityMapping struct { <ide> gids []IDMap <ide> } <ide> <del>// NewIdentityMapping takes a requested user and group name and <del>// using the data from /etc/sub{uid,gid} ranges, creates the <del>// proper uid and gid remapping ranges for that user/group pair <del>func NewIdentityMapping(username, groupname string) (*IdentityMapping, error) { <del> subuidRanges, err := parseSubuid(username) <del> if err != nil { <del> return nil, err <del> } <del> subgidRanges, err := parseSubgid(groupname) <del> if err != nil { <del> return nil, err <del> } <del> if len(subuidRanges) == 0 { <del> return nil, fmt.Errorf("No subuid ranges found for user %q", username) <del> } <del> if len(subgidRanges) == 0 { <del> return nil, fmt.Errorf("No subgid ranges found for group %q", groupname) <del> } <del> <del> return &IdentityMapping{ <del> uids: createIDMap(subuidRanges), <del> gids: createIDMap(subgidRanges), <del> }, nil <del>} <del> <ide> // NewIDMappingsFromMaps creates a new mapping from two slices <ide> // Deprecated: this is a temporary shim while transitioning to IDMapping <ide> func NewIDMappingsFromMaps(uids []IDMap, gids []IDMap) *IdentityMapping { <ide><path>pkg/idtools/idtools_unix.go <ide> import ( <ide> "io" <ide> "os" <ide> "path/filepath" <add> "strconv" <ide> "strings" <ide> "sync" <ide> "syscall" <ide> <ide> "github.com/docker/docker/pkg/system" <ide> "github.com/opencontainers/runc/libcontainer/user" <add> "github.com/pkg/errors" <ide> ) <ide> <ide> var ( <ide> func lazyChown(p string, uid, gid int, stat *system.StatT) error { <ide> } <ide> return os.Chown(p, uid, gid) <ide> } <add> <add>// NewIdentityMapping takes a requested username and <add>// using the data from /etc/sub{uid,gid} ranges, creates the <add>// proper uid and gid remapping ranges for that user/group pair <add>func NewIdentityMapping(username string) (*IdentityMapping, error) { <add> usr, err := LookupUser(username) <add> if err != nil { <add> return nil, fmt.Errorf("Could not get user for username %s: %v", username, err) <add> } <add> <add> uid := strconv.Itoa(usr.Uid) <add> <add> subuidRangesWithUserName, err := parseSubuid(username) <add> if err != nil { <add> return nil, err <add> } <add> subgidRangesWithUserName, err := parseSubgid(username) <add> if err != nil { <add> return nil, err <add> } <add> <add> subuidRangesWithUID, err := parseSubuid(uid) <add> if err != nil { <add> return nil, err <add> } <add> subgidRangesWithUID, err := parseSubgid(uid) <add> if err != nil { <add> return nil, err <add> } <add> <add> subuidRanges := append(subuidRangesWithUserName, subuidRangesWithUID...) <add> subgidRanges := append(subgidRangesWithUserName, subgidRangesWithUID...) <add> <add> if len(subuidRanges) == 0 { <add> return nil, errors.Errorf("no subuid ranges found for user %q", username) <add> } <add> if len(subgidRanges) == 0 { <add> return nil, errors.Errorf("no subgid ranges found for user %q", username) <add> } <add> <add> return &IdentityMapping{ <add> uids: createIDMap(subuidRanges), <add> gids: createIDMap(subgidRanges), <add> }, nil <add>} <ide><path>pkg/idtools/idtools_unix_test.go <ide> func TestNewIDMappings(t *testing.T) { <ide> tempUser, err := user.Lookup(tempUser) <ide> assert.Check(t, err) <ide> <del> gids, err := tempUser.GroupIds() <del> assert.Check(t, err) <del> group, err := user.LookupGroupId(gids[0]) <del> assert.Check(t, err) <del> <del> idMapping, err := NewIdentityMapping(tempUser.Username, group.Name) <add> idMapping, err := NewIdentityMapping(tempUser.Username) <ide> assert.Check(t, err) <ide> <ide> rootUID, rootGID, err := GetRootUIDGID(idMapping.UIDs(), idMapping.GIDs())
4
Text
Text
add lukekarrys to collaborators
e749bbda2873c42816be9ffb6b0858561b553be5
<ide><path>README.md <ide> For information about the governance of the Node.js project, see <ide> **LiviaMedeiros** <<[email protected]>> <ide> * [lpinca](https://github.com/lpinca) - <ide> **Luigi Pinca** <<[email protected]>> (he/him) <add>* [lukekarrys](https://github.com/lukekarrys) - <add> **Luke Karrys** <<[email protected]>> (he/him) <ide> * [Lxxyx](https://github.com/Lxxyx) - <ide> **Zijian Liu** <<[email protected]>> (he/him) <ide> * [marsonya](https://github.com/marsonya) -
1
Javascript
Javascript
make padding optional
58cd3349db447a392c8b5c34352460fe217fad19
<ide><path>src/core/crypto.js <ide> var AES128Cipher = (function AES128CipherClosure() { <ide> if (finalize) { <ide> // undo a padding that is described in RFC 2898 <ide> var lastBlock = result[result.length - 1]; <del> outputLength -= lastBlock[15]; <del> result[result.length - 1] = lastBlock.subarray(0, 16 - lastBlock[15]); <add> var psLen = lastBlock[15]; <add> if (psLen <= 16) { <add> for (i = 15, ii = 16 - psLen; i >= ii; --i) { <add> if (lastBlock[i] !== psLen) { <add> // Invalid padding, assume that the block has no padding. <add> psLen = 0; <add> break; <add> } <add> } <add> outputLength -= psLen; <add> result[result.length - 1] = lastBlock.subarray(0, 16 - psLen); <add> } <ide> } <ide> var output = new Uint8Array(outputLength); <ide> for (i = 0, j = 0, ii = result.length; i < ii; ++i, j += 16) { <ide> var AES256Cipher = (function AES256CipherClosure() { <ide> if (bufferLength < 16) { <ide> continue; <ide> } <del> // buffer is full, encrypting <add> // buffer is full, decrypting <ide> var plain = decrypt256(buffer, this.key); <ide> // xor-ing the IV vector to get plain text <ide> for (j = 0; j < 16; ++j) { <ide> var AES256Cipher = (function AES256CipherClosure() { <ide> if (finalize) { <ide> // undo a padding that is described in RFC 2898 <ide> var lastBlock = result[result.length - 1]; <del> outputLength -= lastBlock[15]; <del> result[result.length - 1] = lastBlock.subarray(0, 16 - lastBlock[15]); <add> var psLen = lastBlock[15]; <add> if (psLen <= 16) { <add> for (i = 15, ii = 16 - psLen; i >= ii; --i) { <add> if (lastBlock[i] !== psLen) { <add> // Invalid padding, assume that the block has no padding. <add> psLen = 0; <add> break; <add> } <add> } <add> outputLength -= psLen; <add> result[result.length - 1] = lastBlock.subarray(0, 16 - psLen); <add> } <ide> } <ide> var output = new Uint8Array(outputLength); <ide> for (i = 0, j = 0, ii = result.length; i < ii; ++i, j += 16) {
1
Ruby
Ruby
fix name_for_action in routing
8a8dac80bb9aa173617a456f187b3fff56b4c347
<ide><path>actionpack/lib/action_dispatch/routing/mapper.rb <ide> def name_for_action(as, action) #:nodoc: <ide> member_name = parent_resource.member_name <ide> end <ide> <del> name = @scope.action_name(name_prefix, prefix, collection_name, member_name) <add> action_name = @scope.action_name(name_prefix, prefix, collection_name, member_name) <add> candidate = action_name.select(&:present?).join('_') <ide> <del> if candidate = name.compact.join("_").presence <add> unless candidate.empty? <ide> # If a name was not explicitly given, we check if it is valid <ide> # and return nil in case it isn't. Otherwise, we pass the invalid name <ide> # forward so the underlying router engine treats it and raises an exception. <ide><path>actionpack/test/dispatch/routing_test.rb <ide> def test_scope_shallow_path_is_not_overwritten_by_path <ide> assert_equal '/bar/comments/1', comment_path('1') <ide> end <ide> <add> def test_resource_where_as_is_empty <add> draw do <add> resource :post, as: '' <add> <add> scope 'post', as: 'post' do <add> resource :comment, as: '' <add> end <add> end <add> <add> assert_equal '/post/new', new_path <add> assert_equal '/post/comment/new', new_post_path <add> end <add> <add> def test_resources_where_as_is_empty <add> draw do <add> resources :posts, as: '' <add> <add> scope 'posts', as: 'posts' do <add> resources :comments, as: '' <add> end <add> end <add> <add> assert_equal '/posts/new', new_path <add> assert_equal '/posts/comments/new', new_posts_path <add> end <add> <add> def test_scope_where_as_is_empty <add> draw do <add> scope 'post', as: '' do <add> resource :user <add> resources :comments <add> end <add> end <add> <add> assert_equal '/post/user/new', new_user_path <add> assert_equal '/post/comments/new', new_comment_path <add> end <add> <ide> private <ide> <ide> def draw(&block)
2
Ruby
Ruby
remove warning on arm
b02acb37c080a6f697a6b534bdc768bf4d8ce6c9
<ide><path>Library/Homebrew/extend/os/mac/diagnostic.rb <ide> def fatal_setup_build_environment_checks <ide> <ide> def supported_configuration_checks <ide> %w[ <del> check_for_unsupported_arch <ide> check_for_unsupported_macos <ide> ].freeze <ide> end <ide> def check_for_non_prefixed_findutils <ide> nil <ide> end <ide> <del> def check_for_unsupported_arch <del> return if Homebrew::EnvConfig.developer? <del> return unless Hardware::CPU.arm? <del> <del> <<~EOS <del> You are running macOS on a #{Hardware::CPU.arch} CPU architecture. <del> We do not provide support for this (yet). <del> Reinstall Homebrew under Rosetta 2 until we support it. <del> #{please_create_pull_requests} <del> EOS <del> end <del> <ide> def check_for_unsupported_macos <ide> return if Homebrew::EnvConfig.developer? <ide> <ide><path>Library/Homebrew/install.rb <ide> def check_prefix <ide> end <ide> <ide> def check_cpu <del> return if Hardware::CPU.intel? && Hardware::CPU.is_64_bit? <del> <del> # Handled by check_for_unsupported_arch in extend/os/mac/diagnostic.rb <del> return if Hardware::CPU.arm? <del> <ide> return unless Hardware::CPU.ppc? <ide> <ide> odie <<~EOS
2
PHP
PHP
use proper assertions.
21cc812403d343f46dc6ebe34b12a98fa99a7246
<ide><path>tests/Container/ContainerTest.php <ide> public function testContainerKnowsEntry() <ide> { <ide> $container = new Container; <ide> $container->bind('Illuminate\Tests\Container\IContainerContractStub', 'Illuminate\Tests\Container\ContainerImplementationStub'); <del> $this->assertEquals(true, $container->has('Illuminate\Tests\Container\IContainerContractStub')); <add> $this->assertTrue($container->has('Illuminate\Tests\Container\IContainerContractStub')); <ide> } <ide> <ide> public function testContainerCanBindAnyWord()
1
Javascript
Javascript
use more `for...of` loops in the code-base
37ebc2875603c19e600f5851ef83f15c2637839e
<ide><path>extensions/chromium/extension-router.js <ide> limitations under the License. <ide> url: CRX_BASE_URL + "*:*", <ide> }, <ide> function (tabsFromLastSession) { <del> for (var i = 0; i < tabsFromLastSession.length; ++i) { <del> chrome.tabs.reload(tabsFromLastSession[i].id); <add> for (const { id } of tabsFromLastSession) { <add> chrome.tabs.reload(id); <ide> } <ide> } <ide> ); <ide><path>extensions/chromium/pdfHandler.js <ide> function isPdfDownloadable(details) { <ide> * @returns {undefined|{name: string, value: string}} The header, if found. <ide> */ <ide> function getHeaderFromHeaders(headers, headerName) { <del> for (var i = 0; i < headers.length; ++i) { <del> var header = headers[i]; <add> for (const header of headers) { <ide> if (header.name.toLowerCase() === headerName) { <ide> return header; <ide> } <ide><path>extensions/chromium/telemetry.js <ide> limitations under the License. <ide> id = ""; <ide> var buf = new Uint8Array(5); <ide> crypto.getRandomValues(buf); <del> for (var i = 0; i < buf.length; ++i) { <del> var c = buf[i]; <add> for (const c of buf) { <ide> id += (c >>> 4).toString(16) + (c & 0xf).toString(16); <ide> } <ide> localStorage.telemetryDeduplicationId = id; <ide><path>external/cmapscompress/compress.js <ide> function parseCMap(binaryData) { <ide> const sign = fromHexDigit(num[num.length - 1]) & 1 ? 15 : 0; <ide> let c = 0; <ide> let result = ""; <del> for (let i = 0; i < num.length; i++) { <del> c = (c << 4) | fromHexDigit(num[i]); <add> for (const digit of num) { <add> c = (c << 4) | fromHexDigit(digit); <ide> result += toHexDigit(sign ? (c >> 1) ^ sign : c >> 1); <ide> c &= 1; <ide> } <ide><path>external/cmapscompress/optimize.js <ide> exports.optimizeCMap = function (data) { <ide> i++; <ide> data.body.splice(i, 0, newItem); <ide> } <del> for (let j = 0; j < subitems.length; j++) { <del> const code = subitems[j].code; <add> for (const subitem of subitems) { <add> const { code } = subitem; <ide> let q = 0; <ide> while (q < groups.length && groups[q] <= code) { <ide> q++; <ide> } <del> buckets[q].push(subitems[j]); <add> buckets[q].push(subitem); <ide> } <ide> } <ide> } <ide><path>gulpfile.js <ide> gulp.task("locale", function () { <ide> subfolders.sort(); <ide> let viewerOutput = ""; <ide> const locales = []; <del> for (let i = 0; i < subfolders.length; i++) { <del> const locale = subfolders[i]; <add> for (const locale of subfolders) { <ide> const dirPath = L10N_DIR + locale; <ide> if (!checkDir(dirPath)) { <ide> continue; <ide><path>src/core/cff_parser.js <ide> class CFFCompiler { <ide> compileDict(dict, offsetTracker) { <ide> const out = []; <ide> // The dictionary keys must be in a certain order. <del> const order = dict.order; <del> for (let i = 0; i < order.length; ++i) { <del> const key = order[i]; <add> for (const key of dict.order) { <ide> if (!(key in dict.values)) { <ide> continue; <ide> } <ide><path>src/core/type1_font.js <ide> class Type1Font { <ide> <ide> getCharset() { <ide> const charset = [".notdef"]; <del> const charstrings = this.charstrings; <del> for (let glyphId = 0; glyphId < charstrings.length; glyphId++) { <del> charset.push(charstrings[glyphId].glyphName); <add> for (const { glyphName } of this.charstrings) { <add> charset.push(glyphName); <ide> } <ide> return charset; <ide> } <ide><path>src/core/type1_parser.js <ide> const Type1Parser = (function Type1ParserClosure() { <ide> privateData, <ide> }, <ide> }; <del> let token, length, data, lenIV, encoded; <add> let token, length, data, lenIV; <ide> while ((token = this.getToken()) !== null) { <ide> if (token !== "/") { <ide> continue; <ide> const Type1Parser = (function Type1ParserClosure() { <ide> this.getToken(); // read in 'RD' or '-|' <ide> data = length > 0 ? stream.getBytes(length) : new Uint8Array(0); <ide> lenIV = program.properties.privateData.lenIV; <del> encoded = this.readCharStrings(data, lenIV); <add> const encoded = this.readCharStrings(data, lenIV); <ide> this.nextChar(); <ide> token = this.getToken(); // read in 'ND' or '|-' <ide> if (token === "noaccess") { <ide> const Type1Parser = (function Type1ParserClosure() { <ide> this.getToken(); // read in 'RD' or '-|' <ide> data = length > 0 ? stream.getBytes(length) : new Uint8Array(0); <ide> lenIV = program.properties.privateData.lenIV; <del> encoded = this.readCharStrings(data, lenIV); <add> const encoded = this.readCharStrings(data, lenIV); <ide> this.nextChar(); <ide> token = this.getToken(); // read in 'NP' or '|' <ide> if (token === "noaccess") { <ide> const Type1Parser = (function Type1ParserClosure() { <ide> } <ide> } <ide> <del> for (let i = 0; i < charstrings.length; i++) { <del> const glyph = charstrings[i].glyph; <del> encoded = charstrings[i].encoded; <add> for (const { encoded, glyph } of charstrings) { <ide> const charString = new Type1CharString(); <ide> const error = charString.convert( <ide> encoded, <ide><path>src/core/worker.js <ide> class WorkerMessageHandler { <ide> // There may be a chance that `newPdfManager` is not initialized for <ide> // the first few runs of `readchunk` block of code. Be sure to send <ide> // all cached chunks, if any, to chunked_stream via pdf_manager. <del> for (let i = 0; i < cachedChunks.length; i++) { <del> newPdfManager.sendProgressiveData(cachedChunks[i]); <add> for (const chunk of cachedChunks) { <add> newPdfManager.sendProgressiveData(chunk); <ide> } <ide> <ide> cachedChunks = []; <ide><path>src/display/canvas.js <ide> class CanvasGraphics { <ide> <ide> ctx.save(); <ide> ctx.beginPath(); <del> for (let i = 0; i < paths.length; i++) { <del> const path = paths[i]; <add> for (const path of paths) { <ide> ctx.setTransform.apply(ctx, path.transform); <ide> ctx.translate(path.x, path.y); <ide> path.addToPath(ctx, path.fontSize); <ide><path>test/resources/reftest-analyzer.js <ide> window.onload = function () { <ide> <ide> // Bind an event handler to each image link <ide> const images = document.getElementsByClassName("image"); <del> for (let i = 0; i < images.length; i++) { <del> images[i].addEventListener( <add> for (const image of images) { <add> image.addEventListener( <ide> "click", <ide> function (e) { <ide> showImages(e.target.id); <ide> window.onload = function () { <ide> function flashPixels(on) { <ide> const stroke = on ? "#FF0000" : "#CCC"; <ide> const strokeWidth = on ? "2px" : "1px"; <del> for (let i = 0; i < gFlashingPixels.length; i++) { <del> gFlashingPixels[i].setAttribute("stroke", stroke); <del> gFlashingPixels[i].setAttribute("stroke-width", strokeWidth); <add> for (const pixel of gFlashingPixels) { <add> pixel.setAttribute("stroke", stroke); <add> pixel.setAttribute("stroke-width", strokeWidth); <ide> } <ide> } <ide> <ide><path>test/stats/statcmp.js <ide> function parseOptions() { <ide> <ide> function group(stats, groupBy) { <ide> const vals = []; <del> for (let i = 0; i < stats.length; i++) { <del> const curStat = stats[i]; <add> for (const curStat of stats) { <ide> const keyArr = []; <del> for (let j = 0; j < groupBy.length; j++) { <del> keyArr.push(curStat[groupBy[j]]); <add> for (const entry of groupBy) { <add> keyArr.push(curStat[entry]); <ide> } <ide> const key = keyArr.join(","); <del> if (vals[key] === undefined) { <del> vals[key] = []; <del> } <del> vals[key].push(curStat.time); <add> (vals[key] ||= []).push(curStat.time); <ide> } <ide> return vals; <ide> } <ide> function stat(baseline, current) { <ide> return s.length; <ide> }); <ide> rows.push(labels); <del> for (let k = 0; k < keys.length; k++) { <del> const key = keys[k]; <add> for (const key of keys) { <ide> const baselineMean = mean(baselineGroup[key]); <ide> const currentMean = mean(currentGroup[key]); <ide> const row = key.split(","); <ide> function stat(baseline, current) { <ide> // print output <ide> console.log("-- Grouped By " + options.groupBy.join(", ") + " --"); <ide> const groupCount = options.groupBy.length; <del> for (let r = 0; r < rows.length; r++) { <del> const row = rows[r]; <add> for (const row of rows) { <ide> for (let i = 0; i < row.length; i++) { <ide> row[i] = pad(row[i], width[i], i < groupCount ? "right" : "left"); <ide> } <ide><path>test/unit/function_spec.js <ide> describe("function", function () { <ide> "destOffset", <ide> compiledCode <ide> ); <del> for (let i = 0; i < samples.length; i++) { <del> const out = new Float32Array(samples[i].output.length); <del> fn(samples[i].input, 0, out, 0); <del> expect(Array.prototype.slice.call(out, 0)).toEqual(samples[i].output); <add> for (const { input, output } of samples) { <add> const out = new Float32Array(output.length); <add> fn(input, 0, out, 0); <add> expect(Array.prototype.slice.call(out, 0)).toEqual(output); <ide> } <ide> } <ide> }
14
Javascript
Javascript
create components lazily
322d0e00e237a240377995a71aedba99301c3780
<ide><path>Libraries/Animated/src/Animated.js <ide> * This source code is licensed under the MIT license found in the <ide> * LICENSE file in the root directory of this source tree. <ide> * <del> * @flow strict-local <add> * @flow <ide> * @format <ide> */ <ide> <ide> 'use strict'; <ide> <ide> const AnimatedImplementation = require('AnimatedImplementation'); <del>const FlatList = require('FlatList'); <del>const Image = require('Image'); <del>const ScrollView = require('ScrollView'); <del>const SectionList = require('SectionList'); <del>const Text = require('Text'); <del>const View = require('View'); <ide> <ide> module.exports = { <add> get FlatList() { <add> return require('AnimatedFlatList'); <add> }, <add> get Image() { <add> return require('AnimatedImage'); <add> }, <add> get ScrollView() { <add> return require('AnimatedScrollView'); <add> }, <add> get SectionList() { <add> return require('AnimatedSectionList'); <add> }, <add> get Text() { <add> return require('AnimatedText'); <add> }, <add> get View() { <add> return require('AnimatedView'); <add> }, <ide> ...AnimatedImplementation, <del> View: AnimatedImplementation.createAnimatedComponent(View), <del> Text: AnimatedImplementation.createAnimatedComponent(Text), <del> Image: AnimatedImplementation.createAnimatedComponent(Image), <del> ScrollView: AnimatedImplementation.createAnimatedComponent(ScrollView), <del> FlatList: AnimatedImplementation.createAnimatedComponent(FlatList), <del> SectionList: AnimatedImplementation.createAnimatedComponent(SectionList), <ide> }; <ide><path>Libraries/Animated/src/components/AnimatedFlatList.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict-local <add> * @format <add> */ <add> <add>'use strict'; <add> <add>const FlatList = require('FlatList'); <add> <add>const createAnimatedComponent = require('createAnimatedComponent'); <add> <add>module.exports = createAnimatedComponent(FlatList); <ide><path>Libraries/Animated/src/components/AnimatedImage.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict-local <add> * @format <add> */ <add> <add>'use strict'; <add> <add>const Image = require('Image'); <add> <add>const createAnimatedComponent = require('createAnimatedComponent'); <add> <add>module.exports = createAnimatedComponent(Image); <ide><path>Libraries/Animated/src/components/AnimatedScrollView.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict-local <add> * @format <add> */ <add> <add>'use strict'; <add> <add>const ScrollView = require('ScrollView'); <add> <add>const createAnimatedComponent = require('createAnimatedComponent'); <add> <add>module.exports = createAnimatedComponent(ScrollView); <ide><path>Libraries/Animated/src/components/AnimatedSectionList.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict-local <add> * @format <add> */ <add> <add>'use strict'; <add> <add>const SectionList = require('SectionList'); <add> <add>const createAnimatedComponent = require('createAnimatedComponent'); <add> <add>module.exports = createAnimatedComponent(SectionList); <ide><path>Libraries/Animated/src/components/AnimatedText.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict-local <add> * @format <add> */ <add> <add>'use strict'; <add> <add>const Text = require('Text'); <add> <add>const createAnimatedComponent = require('createAnimatedComponent'); <add> <add>module.exports = createAnimatedComponent(Text); <ide><path>Libraries/Animated/src/components/AnimatedView.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict-local <add> * @format <add> */ <add> <add>'use strict'; <add> <add>const View = require('View'); <add> <add>const createAnimatedComponent = require('createAnimatedComponent'); <add> <add>module.exports = createAnimatedComponent(View);
7
Text
Text
fix typos in “mixins considered harmful”
f02cbba9fd764540d631a326aaf11f9cab1bdfb1
<ide><path>docs/_posts/2016-07-13-mixins-considered-harmful.md <ide> var UserRow = React.createClass({ <ide> return ( <ide> <div> <ide> {this.renderHeader() /* Defined by RowMixin */} <del> <h2>{this.props.user.biography} <add> <h2>{this.props.user.biography}</h2> <ide> </div> <ide> ) <ide> } <ide> function UserRow(props) { <ide> return ( <ide> <div> <ide> <RowHeader text={props.user.fullName} /> <del> <h2>{props.user.biography} <add> <h2>{props.user.biography}</h2> <ide> </div> <ide> ); <ide> } <ide> var ColorMixin = { <ide> var Button = React.createClass({ <ide> mixins: [ColorMixin], <ide> <del> render: function () { <add> render: function() { <ide> var theme = this.getLuminance(this.props.color) > 160 ? 'dark' : 'light'; <ide> return ( <ide> <div className={theme}> <ide> Put utility functions into regular JavaScript modules and import them. This also <ide> var getLuminance = require('../utils/getLuminance'); <ide> <ide> var Button = React.createClass({ <del> render: function () { <add> render: function() { <ide> var theme = getLuminance(this.props.color) > 160 ? 'dark' : 'light'; <ide> return ( <ide> <div className={theme}>
1
Text
Text
update actiontext docs [ci skip]
695b6e0d8693548258cb46b821009102047e8ccb
<ide><path>guides/source/action_text_overview.md <ide> happens after every keystroke, and avoids the need to use execCommand at all. <ide> <ide> ## Installation <ide> <del>Run `rails action_text:install` to add the Yarn package and copy over the necessary migration. <del>Also, you need to set up Active Storage for embedded images and other attachments. <del>Please refer to the [Active Storage Overview](active_storage_overview.html) guide. <add>Run `rails action_text:install` to add the Yarn package and copy over the necessary migration. Also, you need to set up Active Storage for embedded images and other attachments. Please refer to the [Active Storage Overview](active_storage_overview.html) guide. <add> <add>### Installation with Webpacker <add> <add>Both `trix` and `@rails/actiontext` should be required in your JavaScript pack. <add> <add>```js <add>// application.js <add>require("trix") <add>require("@rails/actiontext") <add>``` <add> <add>In order for the built-in CSS styles to work, you'll need to use webpack-compatible import syntax within the generated `actiontext.scss` file. <add> <add>```scss <add>@import "trix/dist/trix"; <add>``` <add> <add>Additionally you'll also need to ensure that the `actiontext.scss` file is imported into your stylesheet pack. <add> <add>``` <add>// application.scss <add>@import "./actiontext.scss"; <add>``` <add> <ide> <ide> ## Examples <ide> <ide> class Message < ApplicationRecord <ide> end <ide> ``` <ide> <add>Note that you don't need to add a `content` field to your `messages` table. <add> <ide> Then refer to this field in the form for the model: <ide> <ide> ```erb
1
Java
Java
move the codes out of the finally block
54d4224297fcd4c1f2c6770b1f0766dc3402c374
<ide><path>src/main/java/rx/internal/operators/OperatorMerge.java <ide> private void handleScalarSynchronousObservable(ScalarSynchronousObservable<? ext <ide> private void handleScalarSynchronousObservableWithoutRequestLimits(ScalarSynchronousObservable<? extends T> t) { <ide> T value = t.get(); <ide> if (getEmitLock()) { <add> boolean moreToDrain; <ide> try { <ide> actual.onNext(value); <del> return; <ide> } finally { <del> if (releaseEmitLock()) { <del> drainQueuesIfNeeded(); <del> } <del> request(1); <add> moreToDrain = releaseEmitLock(); <ide> } <add> if (moreToDrain) { <add> drainQueuesIfNeeded(); <add> } <add> request(1); <add> return; <ide> } else { <ide> initScalarValueQueueIfNeeded(); <ide> try { <ide> private void handleScalarSynchronousObservableWithoutRequestLimits(ScalarSynchro <ide> private void handleScalarSynchronousObservableWithRequestLimits(ScalarSynchronousObservable<? extends T> t) { <ide> if (getEmitLock()) { <ide> boolean emitted = false; <add> boolean moreToDrain; <add> boolean isReturn = false; <ide> try { <ide> long r = mergeProducer.requested; <ide> if (r > 0) { <ide> emitted = true; <ide> actual.onNext(t.get()); <ide> MergeProducer.REQUESTED.decrementAndGet(mergeProducer); <ide> // we handle this Observable without ever incrementing the wip or touching other machinery so just return here <del> return; <add> isReturn = true; <ide> } <ide> } finally { <del> if (releaseEmitLock()) { <del> drainQueuesIfNeeded(); <del> } <del> if (emitted) { <del> request(1); <del> } <add> moreToDrain = releaseEmitLock(); <add> } <add> if (moreToDrain) { <add> drainQueuesIfNeeded(); <add> } <add> if (emitted) { <add> request(1); <add> } <add> if (isReturn) { <add> return; <ide> } <ide> } <ide> <ide> private boolean drainQueuesIfNeeded() { <ide> while (true) { <ide> if (getEmitLock()) { <ide> int emitted = 0; <add> boolean moreToDrain; <ide> try { <ide> emitted = drainScalarValueQueue(); <ide> drainChildrenQueues(); <ide> } finally { <del> boolean moreToDrain = releaseEmitLock(); <del> // request outside of lock <del> if (emitted > 0) { <del> request(emitted); <del> } <del> if (!moreToDrain) { <del> return true; <del> } <del> // otherwise we'll loop and get whatever was added <add> moreToDrain = releaseEmitLock(); <add> } <add> // request outside of lock <add> if (emitted > 0) { <add> request(emitted); <add> } <add> if (!moreToDrain) { <add> return true; <ide> } <add> // otherwise we'll loop and get whatever was added <ide> } else { <ide> return false; <ide> }
1
Text
Text
fix string interpolation in testing guide
e16de199aedf1139f1f8066d385e5fb83d3d1b93
<ide><path>guides/source/testing.md <ide> One good place to store them is `test/lib` or `test/test_helpers`. <ide> # test/test_helpers/multiple_assertions.rb <ide> module MultipleAssertions <ide> def assert_multiple_of_forty_two(number) <del> assert (number % 42 == 0), 'expected #{number} to be a multiple of 42' <add> assert (number % 42 == 0), "expected #{number} to be a multiple of 42" <ide> end <ide> end <ide> ```
1
Javascript
Javascript
defer only keydown, throttle settimeouts
4e83399570391fe4a41ce4dc27c8a191f761d26d
<ide><path>src/widget/input.js <ide> var ngModelInstantDirective = ['$browser', function($browser) { <ide> return { <ide> require: 'ngModel', <ide> link: function(scope, element, attr, ctrl) { <del> element.bind('keydown change input', function(event) { <del> var key = event.keyCode; <del> <del> // command modifiers arrows <del> if (key === 91 || (15 < key && key < 19) || (37 <= key && key <= 40)) return; <del> <del> $browser.defer(function() { <del> var touched = ctrl.touch(), <add> var handler = function() { <add> var touched = ctrl.touch(), <ide> value = trim(element.val()); <ide> <ide> if (ctrl.viewValue !== value) { <ide> var ngModelInstantDirective = ['$browser', function($browser) { <ide> } else if (touched) { <ide> scope.$apply(); <ide> } <del> }); <add> }; <add> <add> var timeout; <add> element.bind('keydown', function(event) { <add> var key = event.keyCode; <add> <add> // command modifiers arrows <add> if (key === 91 || (15 < key && key < 19) || (37 <= key && key <= 40)) return; <add> <add> if (!timeout) { <add> timeout = $browser.defer(function() { <add> handler(); <add> timeout = null; <add> }); <add> } <ide> }); <add> <add> element.bind('change input', handler); <ide> } <ide> }; <ide> }]; <ide><path>test/widget/inputSpec.js <ide> describe('input', function() { <ide> <ide> inputElm.val('value2'); <ide> browserTrigger(inputElm, 'change'); <del> $browser.defer.flush(); <ide> expect(scope.value).toBe('value2'); <ide> <ide> if (msie < 9) return; <ide> <ide> inputElm.val('value3'); <ide> browserTrigger(inputElm, 'input'); <del> $browser.defer.flush(); <ide> expect(scope.value).toBe('value3'); <ide> })); <ide> });
2
Python
Python
fix typo in examples/run_glue.py args declaration
fbf5455a8607fa660aacbf06c16f6fe23758b13d
<ide><path>examples/run_glue.py <ide> def main(): <ide> parser.add_argument("--learning_rate", default=5e-5, type=float, <ide> help="The initial learning rate for Adam.") <ide> parser.add_argument("--weight_decay", default=0.0, type=float, <del> help="Weight deay if we apply some.") <add> help="Weight decay if we apply some.") <ide> parser.add_argument("--adam_epsilon", default=1e-8, type=float, <ide> help="Epsilon for Adam optimizer.") <ide> parser.add_argument("--max_grad_norm", default=1.0, type=float,
1
Javascript
Javascript
fix typo in convert-argv.js
ef4943bff73c28c209e283666b8744fd2b0951bb
<ide><path>bin/convert-argv.js <ide> module.exports = function(optimist, argv, convertOptions) { <ide> } <ide> <ide> if(typeof options !== "object" || options === null) { <del> console.log("Config did not export a object."); <add> console.log("Config did not export an object."); <ide> process.exit(-1); <ide> } <ide>
1
Javascript
Javascript
fix e2e test for example
977e2f55de7075b7dbfbab37e40a632bbaf0252f
<ide><path>src/ng/directive/ngClass.js <ide> function classDirective(name, selector) { <ide> expect(element('.doc-example-live p:first').prop('className')).not().toMatch(/bold/); <ide> expect(element('.doc-example-live p:first').prop('className')).not().toMatch(/red/); <ide> <del> input('bold').check(); <add> input('important').check(); <ide> expect(element('.doc-example-live p:first').prop('className')).toMatch(/bold/); <ide> <del> input('red').check(); <add> input('error').check(); <ide> expect(element('.doc-example-live p:first').prop('className')).toMatch(/red/); <ide> }); <ide>
1
Text
Text
add gireeshpunathil to collaborators
50ba13ef0ef856147b5ffd354afd343a0501fb1c
<ide><path>README.md <ide> more information about the governance of the Node.js project, see <ide> **Wyatt Preul** &lt;[email protected]&gt; <ide> * [gibfahn](https://github.com/gibfahn) - <ide> **Gibson Fahnestock** &lt;[email protected]&gt; (he/him) <add>* [gireeshpunathil](https://github.com/gireeshpunathil) - <add>**Gireesh Punathil** &lt;[email protected]&gt; (he/him) <ide> * [iarna](https://github.com/iarna) - <ide> **Rebecca Turner** &lt;[email protected]&gt; <ide> * [imran-iq](https://github.com/imran-iq) -
1
PHP
PHP
simplify widget resolution code
4018caaada163addb1781077d60dec9606784b22
<ide><path>src/View/Widget/WidgetLocator.php <ide> public function add(array $widgets): void <ide> */ <ide> public function get(string $name) <ide> { <del> if (!isset($this->_widgets[$name]) && empty($this->_widgets['_default'])) { <del> throw new RuntimeException(sprintf('Unknown widget "%s"', $name)); <del> } <ide> if (!isset($this->_widgets[$name])) { <add> if (empty($this->_widgets['_default'])) { <add> throw new RuntimeException(sprintf('Unknown widget `%s`', $name)); <add> } <add> <ide> $name = '_default'; <ide> } <del> $this->_widgets[$name] = $this->_resolveWidget($this->_widgets[$name]); <ide> <del> return $this->_widgets[$name]; <add> if ($this->_widgets[$name] instanceof WidgetInterface) { <add> return $this->_widgets[$name]; <add> } <add> <add> return $this->_widgets[$name] = $this->_resolveWidget($this->_widgets[$name]); <ide> } <ide> <ide> /** <ide> public function clear(): void <ide> */ <ide> protected function _resolveWidget($widget): object <ide> { <del> $type = gettype($widget); <del> if ($type === 'object') { <del> return $widget; <del> } <del> <del> if ($type === 'string') { <add> if (is_string($widget)) { <ide> $widget = [$widget]; <ide> } <ide>
1
Text
Text
update examples and more information
6eb660ba0bfaa3eecf5559ef66c6f0369ff68186
<ide><path>guide/english/mathematics/example-all-the-ways-you-can-flip-a-coin/index.md <ide> title: Example All the Ways You Can Flip a Coin <ide> --- <ide> ## Example All the Ways You Can Flip a Coin <ide> <del>This is a stub. <a href='https://github.com/freecodecamp/guides/tree/master/src/pages/mathematics/example-all-the-ways-you-can-flip-a-coin/index.md' target='_blank' rel='nofollow'>Help our community expand it</a>. <add>In the experiment of flipping a coin, the mutually exclusive outcomes are the coin landing either heads up or tails up. <ide> <del><a href='https://github.com/freecodecamp/guides/blob/master/README.md' target='_blank' rel='nofollow'>This quick style guide will help ensure your pull request gets accepted</a>. <add>When we flip a coin a very large number of times, we find that we get half heads, and half tails. We conclude that the probability to flip a head is 1/2, and the probability to flip a tail is 1/2. <ide> <del><!-- The article goes here, in GitHub-flavored Markdown. Feel free to add YouTube videos, images, and CodePen/JSBin embeds --> <add>Consider the experiment of flipping of **n** coins where we can enumerate all possible outcomes as follows, where H indicates a head, and T a tail: <ide> <del>#### More Information: <del><!-- Please add any articles you think might be helpful to read before writing the article --> <add>* **n = 1** | H T <add>* **n = 2** | HH HT TH TT <add>* **n = 3** | HHH THH HTH HHT TTH THT HTT TTT <add>* **n = 4** | HHHH THHH HHHT THHT HHTH THTH HHTT THTT HTHH TTHH HTHT TTHT HTTH TTTH HTTT TTTT <add> <add>The formula for all possible outcomes of flipping n coins is 2 to the nth power. <ide> <ide> <add>#### More Information: <add>- [Theory of Probability](http://www.pas.rochester.edu/~stte/phy104-F00/notes-2.html) <add>
1
Text
Text
update cson link
6242bd7eb768f50774ad2bf9b148e48f343bcd59
<ide><path>docs/customizing-atom.md <ide> This file can also be named _styles.css_ and contain CSS. <ide> [creating-a-package]: creating-a-package.md <ide> [create-theme]: creating-a-theme.md <ide> [LESS]: http://www.lesscss.org <del>[CSON]: https://github.com/bevry/cson <add>[CSON]: https://github.com/atom/season <ide> [CoffeeScript]: http://coffeescript.org/
1
Python
Python
add layerscale to nat/dinat
11f3ec7224c83c9e5c379a774b9d3984e68d26fa
<ide><path>src/transformers/models/dinat/configuration_dinat.py <ide> class DinatConfig(PretrainedConfig): <ide> The standard deviation of the truncated_normal_initializer for initializing all weight matrices. <ide> layer_norm_eps (`float`, *optional*, defaults to 1e-12): <ide> The epsilon used by the layer normalization layers. <add> layer_scale_init_value (`float`, *optional*, defaults to 0.0): <add> The initial value for the layer scale. Disabled if <=0. <ide> <ide> Example: <ide> <ide> def __init__( <ide> patch_norm=True, <ide> initializer_range=0.02, <ide> layer_norm_eps=1e-5, <add> layer_scale_init_value=0.0, <ide> **kwargs <ide> ): <ide> super().__init__(**kwargs) <ide> def __init__( <ide> # we set the hidden_size attribute in order to make Dinat work with VisionEncoderDecoderModel <ide> # this indicates the channel dimension after the last stage of the model <ide> self.hidden_size = int(embed_dim * 2 ** (len(depths) - 1)) <add> self.layer_scale_init_value = layer_scale_init_value <ide><path>src/transformers/models/dinat/modeling_dinat.py <ide> def __init__(self, config, dim, num_heads, dilation, drop_path_rate=0.0): <ide> self.layernorm_after = nn.LayerNorm(dim, eps=config.layer_norm_eps) <ide> self.intermediate = DinatIntermediate(config, dim) <ide> self.output = DinatOutput(config, dim) <add> self.layer_scale_parameters = ( <add> nn.Parameter(config.layer_scale_init_value * torch.ones((2, dim)), requires_grad=True) <add> if config.layer_scale_init_value > 0 <add> else None <add> ) <ide> <ide> def maybe_pad(self, hidden_states, height, width): <ide> window_size = self.window_size <ide> def forward( <ide> if was_padded: <ide> attention_output = attention_output[:, :height, :width, :].contiguous() <ide> <add> if self.layer_scale_parameters is not None: <add> attention_output = self.layer_scale_parameters[0] * attention_output <add> <ide> hidden_states = shortcut + self.drop_path(attention_output) <ide> <ide> layer_output = self.layernorm_after(hidden_states) <del> layer_output = self.intermediate(layer_output) <del> layer_output = hidden_states + self.output(layer_output) <add> layer_output = self.output(self.intermediate(layer_output)) <add> <add> if self.layer_scale_parameters is not None: <add> layer_output = self.layer_scale_parameters[1] * layer_output <add> <add> layer_output = hidden_states + self.drop_path(layer_output) <ide> <ide> layer_outputs = (layer_output, attention_outputs[1]) if output_attentions else (layer_output,) <ide> return layer_outputs <ide><path>src/transformers/models/nat/__init__.py <ide> # <ide> # Unless required by applicable law or agreed to in writing, software <ide> # distributed under the License is distributed on an "AS IS" BASIS, <del># distributed under the License is distributed on an "AS IS" BASIS, <ide> # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <ide> # See the License for the specific language governing permissions and <ide> # limitations under the License. <ide><path>src/transformers/models/nat/configuration_nat.py <ide> class NatConfig(PretrainedConfig): <ide> The standard deviation of the truncated_normal_initializer for initializing all weight matrices. <ide> layer_norm_eps (`float`, *optional*, defaults to 1e-12): <ide> The epsilon used by the layer normalization layers. <add> layer_scale_init_value (`float`, *optional*, defaults to 0.0): <add> The initial value for the layer scale. Disabled if <=0. <ide> <ide> Example: <ide> <ide> def __init__( <ide> patch_norm=True, <ide> initializer_range=0.02, <ide> layer_norm_eps=1e-5, <add> layer_scale_init_value=0.0, <ide> **kwargs <ide> ): <ide> super().__init__(**kwargs) <ide> def __init__( <ide> # we set the hidden_size attribute in order to make Nat work with VisionEncoderDecoderModel <ide> # this indicates the channel dimension after the last stage of the model <ide> self.hidden_size = int(embed_dim * 2 ** (len(depths) - 1)) <add> self.layer_scale_init_value = layer_scale_init_value <ide><path>src/transformers/models/nat/modeling_nat.py <ide> def __init__(self, config, dim, num_heads, drop_path_rate=0.0): <ide> self.layernorm_after = nn.LayerNorm(dim, eps=config.layer_norm_eps) <ide> self.intermediate = NatIntermediate(config, dim) <ide> self.output = NatOutput(config, dim) <add> self.layer_scale_parameters = ( <add> nn.Parameter(config.layer_scale_init_value * torch.ones((2, dim)), requires_grad=True) <add> if config.layer_scale_init_value > 0 <add> else None <add> ) <ide> <ide> def maybe_pad(self, hidden_states, height, width): <ide> window_size = self.kernel_size <ide> def forward( <ide> if was_padded: <ide> attention_output = attention_output[:, :height, :width, :].contiguous() <ide> <add> if self.layer_scale_parameters is not None: <add> attention_output = self.layer_scale_parameters[0] * attention_output <add> <ide> hidden_states = shortcut + self.drop_path(attention_output) <ide> <ide> layer_output = self.layernorm_after(hidden_states) <del> layer_output = self.intermediate(layer_output) <del> layer_output = hidden_states + self.output(layer_output) <add> layer_output = self.output(self.intermediate(layer_output)) <add> <add> if self.layer_scale_parameters is not None: <add> layer_output = self.layer_scale_parameters[1] * layer_output <add> <add> layer_output = hidden_states + self.drop_path(layer_output) <ide> <ide> layer_outputs = (layer_output, attention_outputs[1]) if output_attentions else (layer_output,) <ide> return layer_outputs
5
Javascript
Javascript
improve resolvebuilddependencies performance
f76ffc0c0106c9c25ef491aa64913d750dd7e2de
<ide><path>lib/FileSystemInfo.js <ide> const AsyncQueue = require("./util/AsyncQueue"); <ide> const createHash = require("./util/createHash"); <ide> const { join, dirname, relative } = require("./util/fs"); <ide> const makeSerializable = require("./util/makeSerializable"); <add>const processAsyncTree = require("./util/processAsyncTree"); <ide> <ide> /** @typedef {import("./WebpackError")} WebpackError */ <ide> /** @typedef {import("./logging/Logger").Logger} Logger */ <ide> /** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */ <ide> <ide> const supportsEsm = +process.versions.modules >= 83; <ide> <del>const resolveContext = createResolver({ <del> resolveToContext: true, <del> exportsFields: [] <del>}); <del>const resolve = createResolver({ <del> extensions: [".js", ".json", ".node"], <del> conditionNames: ["require"] <del>}); <del> <ide> let FS_ACCURACY = 2000; <ide> <ide> const EMPTY_SET = new Set(); <ide> <del>const RBDT_RESOLVE = 0; <del>const RBDT_RESOLVE_DIRECTORY = 1; <del>const RBDT_RESOLVE_FILE = 2; <del>const RBDT_DIRECTORY = 3; <del>const RBDT_FILE = 4; <del>const RBDT_DIRECTORY_DEPENDENCIES = 5; <del>const RBDT_FILE_DEPENDENCIES = 6; <add>const RBDT_RESOLVE_CJS = 0; <add>const RBDT_RESOLVE_ESM = 1; <add>const RBDT_RESOLVE_DIRECTORY = 2; <add>const RBDT_RESOLVE_CJS_FILE = 3; <add>const RBDT_RESOLVE_ESM_FILE = 4; <add>const RBDT_DIRECTORY = 5; <add>const RBDT_FILE = 6; <add>const RBDT_DIRECTORY_DEPENDENCIES = 7; <add>const RBDT_FILE_DEPENDENCIES = 8; <ide> <ide> const INVALID = Symbol("invalid"); <ide> <ide> class FileSystemInfo { <ide> this.contextHashQueue.add(path, callback); <ide> } <ide> <add> _createBuildDependenciesResolvers() { <add> const resolveContext = createResolver({ <add> resolveToContext: true, <add> exportsFields: [], <add> fileSystem: this.fs <add> }); <add> const resolveCjs = createResolver({ <add> extensions: [".js", ".json", ".node"], <add> conditionNames: ["require", "node"], <add> fileSystem: this.fs <add> }); <add> const resolveEsm = createResolver({ <add> extensions: [".js", ".json", ".node"], <add> fullySpecified: true, <add> conditionNames: ["import", "node"], <add> fileSystem: this.fs <add> }); <add> return { resolveContext, resolveEsm, resolveCjs }; <add> } <add> <ide> /** <ide> * @param {string} context context directory <ide> * @param {Iterable<string>} deps dependencies <ide> * @param {function(Error=, ResolveBuildDependenciesResult=): void} callback callback function <ide> * @returns {void} <ide> */ <ide> resolveBuildDependencies(context, deps, callback) { <add> const { <add> resolveContext, <add> resolveEsm, <add> resolveCjs <add> } = this._createBuildDependenciesResolvers(); <add> <ide> /** @type {Set<string>} */ <ide> const files = new Set(); <ide> /** @type {Set<string>} */ <add> const fileSymlinks = new Set(); <add> /** @type {Set<string>} */ <ide> const directories = new Set(); <ide> /** @type {Set<string>} */ <add> const directorySymlinks = new Set(); <add> /** @type {Set<string>} */ <ide> const missing = new Set(); <ide> /** @type {Set<string>} */ <ide> const resolveFiles = new Set(); <ide> class FileSystemInfo { <ide> const resolveMissing = new Set(); <ide> /** @type {Map<string, string>} */ <ide> const resolveResults = new Map(); <del> /** @type {asyncLib.QueueObject<{type: number, path: string, context?: string, expected?: string }, Error>} */ <del> const queue = asyncLib.queue( <del> ({ type, context, path, expected }, callback) => { <add> const invalidResolveResults = new Set(); <add> const resolverContext = { <add> fileDependencies: resolveFiles, <add> contextDependencies: resolveDirectories, <add> missingDependencies: resolveMissing <add> }; <add> processAsyncTree( <add> Array.from(deps, dep => ({ <add> type: RBDT_RESOLVE_CJS, <add> context, <add> path: dep, <add> expected: undefined <add> })), <add> 20, <add> ({ type, context, path, expected }, push, callback) => { <ide> const resolveDirectory = path => { <ide> const key = `d\n${context}\n${path}`; <ide> if (resolveResults.has(key)) { <ide> return callback(); <ide> } <del> resolveContext( <del> context, <del> path, <del> { <del> fileDependencies: resolveFiles, <del> contextDependencies: resolveDirectories, <del> missingDependencies: resolveMissing <del> }, <del> (err, result) => { <add> resolveResults.set(key, undefined); <add> resolveContext(context, path, resolverContext, (err, result) => { <add> if (err) { <add> invalidResolveResults.add(key); <add> if ( <add> err.code === "ENOENT" || <add> err.code === "UNDECLARED_DEPENDENCY" <add> ) { <add> return callback(); <add> } <add> err.message += `\nwhile resolving '${path}' in ${context} to a directory`; <add> return callback(err); <add> } <add> resolveResults.set(key, result); <add> push({ <add> type: RBDT_DIRECTORY, <add> context: undefined, <add> path: result, <add> expected: undefined <add> }); <add> callback(); <add> }); <add> }; <add> const resolveFile = (path, symbol, resolve) => { <add> const key = `${symbol}\n${context}\n${path}`; <add> if (resolveResults.has(key)) { <add> return callback(); <add> } <add> resolveResults.set(key, undefined); <add> resolve(context, path, resolverContext, (err, result) => { <add> if (expected) { <add> if (result === expected) { <add> resolveResults.set(key, result); <add> } else { <add> invalidResolveResults.add(key); <add> this.logger.debug( <add> `Resolving '${path}' in ${context} for build dependencies doesn't lead to expected result '${expected}', but to '${result}' instead. Resolving dependencies are ignored for this path.` <add> ); <add> } <add> } else { <ide> if (err) { <add> invalidResolveResults.add(key); <ide> if ( <ide> err.code === "ENOENT" || <ide> err.code === "UNDECLARED_DEPENDENCY" <ide> ) { <ide> return callback(); <ide> } <del> err.message += `\nwhile resolving '${path}' in ${context} to a directory`; <add> err.message += `\nwhile resolving '${path}' in ${context} as file`; <ide> return callback(err); <ide> } <ide> resolveResults.set(key, result); <del> queue.push({ <del> type: RBDT_DIRECTORY, <del> path: result <add> push({ <add> type: RBDT_FILE, <add> context: undefined, <add> path: result, <add> expected: undefined <ide> }); <del> callback(); <ide> } <del> ); <del> }; <del> const resolveFile = path => { <del> const key = `f\n${context}\n${path}`; <del> if (resolveResults.has(key)) { <del> return callback(); <del> } <del> resolve( <del> context, <del> path, <del> { <del> fileDependencies: resolveFiles, <del> contextDependencies: resolveDirectories, <del> missingDependencies: resolveMissing <del> }, <del> (err, result) => { <del> if (expected) { <del> if (result === expected) { <del> resolveResults.set(key, result); <del> } <del> } else { <del> if (err) { <del> if ( <del> err.code === "ENOENT" || <del> err.code === "UNDECLARED_DEPENDENCY" <del> ) { <del> return callback(); <del> } <del> err.message += `\nwhile resolving '${path}' in ${context} as file`; <del> return callback(err); <del> } <del> resolveResults.set(key, result); <del> queue.push({ <del> type: RBDT_FILE, <del> path: result <del> }); <del> } <del> callback(); <del> } <del> ); <add> callback(); <add> }); <ide> }; <ide> switch (type) { <del> case RBDT_RESOLVE: { <add> case RBDT_RESOLVE_CJS: { <add> const isDirectory = /[\\/]$/.test(path); <add> if (isDirectory) { <add> resolveDirectory(path.slice(0, path.length - 1)); <add> } else { <add> resolveFile(path, "f", resolveCjs); <add> } <add> break; <add> } <add> case RBDT_RESOLVE_ESM: { <ide> const isDirectory = /[\\/]$/.test(path); <ide> if (isDirectory) { <ide> resolveDirectory(path.slice(0, path.length - 1)); <ide> class FileSystemInfo { <ide> resolveDirectory(path); <ide> break; <ide> } <del> case RBDT_RESOLVE_FILE: { <del> resolveFile(path); <add> case RBDT_RESOLVE_CJS_FILE: { <add> resolveFile(path, "f", resolveCjs); <add> break; <add> } <add> case RBDT_RESOLVE_ESM_FILE: { <add> resolveFile(path, "e", resolveEsm); <ide> break; <ide> } <ide> case RBDT_FILE: { <ide> if (files.has(path)) { <ide> callback(); <ide> break; <ide> } <add> files.add(path); <ide> this.fs.realpath(path, (err, _realPath) => { <ide> if (err) return callback(err); <ide> const realPath = /** @type {string} */ (_realPath); <ide> if (realPath !== path) { <add> fileSymlinks.add(path); <ide> resolveFiles.add(path); <del> } <del> if (!files.has(realPath)) { <add> if (files.has(realPath)) return callback(); <ide> files.add(realPath); <del> queue.push({ <del> type: RBDT_FILE_DEPENDENCIES, <del> path: realPath <del> }); <ide> } <add> push({ <add> type: RBDT_FILE_DEPENDENCIES, <add> context: undefined, <add> path: realPath, <add> expected: undefined <add> }); <ide> callback(); <ide> }); <ide> break; <ide> class FileSystemInfo { <ide> callback(); <ide> break; <ide> } <add> directories.add(path); <ide> this.fs.realpath(path, (err, _realPath) => { <ide> if (err) return callback(err); <ide> const realPath = /** @type {string} */ (_realPath); <ide> if (realPath !== path) { <add> directorySymlinks.add(path); <ide> resolveFiles.add(path); <del> } <del> if (!directories.has(realPath)) { <add> if (directories.has(realPath)) return callback(); <ide> directories.add(realPath); <del> queue.push({ <del> type: RBDT_DIRECTORY_DEPENDENCIES, <del> path: realPath <del> }); <ide> } <add> push({ <add> type: RBDT_DIRECTORY_DEPENDENCIES, <add> context: undefined, <add> path: realPath, <add> expected: undefined <add> }); <ide> callback(); <ide> }); <ide> break; <ide> class FileSystemInfo { <ide> children: for (const child of module.children) { <ide> let childPath = child.filename; <ide> if (childPath) { <del> queue.push({ <add> push({ <ide> type: RBDT_FILE, <del> path: childPath <add> context: undefined, <add> path: childPath, <add> expected: undefined <ide> }); <del> if (childPath.endsWith(".js")) <del> childPath = childPath.slice(0, -3); <ide> const context = dirname(this.fs, path); <ide> for (const modulePath of module.paths) { <ide> if (childPath.startsWith(modulePath)) { <del> const request = childPath.slice(modulePath.length + 1); <del> queue.push({ <del> type: RBDT_RESOLVE_FILE, <add> let request = childPath.slice(modulePath.length + 1); <add> if (request.endsWith(".js")) <add> request = request.slice(0, -3); <add> push({ <add> type: RBDT_RESOLVE_CJS_FILE, <ide> context, <ide> path: request, <del> expected: childPath <add> expected: child.filename <ide> }); <ide> continue children; <ide> } <ide> } <ide> let request = relative(this.fs, context, childPath); <add> if (request.endsWith(".js")) request = request.slice(0, -3); <ide> request = request.replace(/\\/g, "/"); <ide> if (!request.startsWith("../")) request = `./${request}`; <del> queue.push({ <del> type: RBDT_RESOLVE_FILE, <add> push({ <add> type: RBDT_RESOLVE_CJS_FILE, <ide> context, <ide> path: request, <ide> expected: child.filename <ide> class FileSystemInfo { <ide> // e.g. import.meta <ide> continue; <ide> } <del> queue.push({ <del> type: RBDT_RESOLVE_FILE, <add> push({ <add> type: RBDT_RESOLVE_ESM_FILE, <ide> context, <del> path: dependency <add> path: dependency, <add> expected: undefined <ide> }); <ide> } catch (e) { <ide> this.logger.warn( <ide> class FileSystemInfo { <ide> resolveMissing.add(packageJson); <ide> const parent = dirname(this.fs, packagePath); <ide> if (parent !== packagePath) { <del> queue.push({ <add> push({ <ide> type: RBDT_DIRECTORY_DEPENDENCIES, <del> path: parent <add> context: undefined, <add> path: parent, <add> expected: undefined <ide> }); <ide> } <ide> callback(); <ide> class FileSystemInfo { <ide> const depsObject = packageData.dependencies; <ide> if (typeof depsObject === "object" && depsObject) { <ide> for (const dep of Object.keys(depsObject)) { <del> queue.push({ <add> push({ <ide> type: RBDT_RESOLVE_DIRECTORY, <ide> context: packagePath, <del> path: dep <add> path: dep, <add> expected: undefined <ide> }); <ide> } <ide> } <ide> class FileSystemInfo { <ide> } <ide> } <ide> }, <del> 50 <add> err => { <add> if (err) return callback(err); <add> for (const l of fileSymlinks) files.delete(l); <add> for (const l of directorySymlinks) directories.delete(l); <add> for (const k of invalidResolveResults) resolveResults.delete(k); <add> callback(null, { <add> files, <add> directories, <add> missing, <add> resolveResults, <add> resolveDependencies: { <add> files: resolveFiles, <add> directories: resolveDirectories, <add> missing: resolveMissing <add> } <add> }); <add> } <ide> ); <del> queue.drain = () => { <del> callback(null, { <del> files, <del> directories, <del> missing, <del> resolveResults, <del> resolveDependencies: { <del> files: resolveFiles, <del> directories: resolveDirectories, <del> missing: resolveMissing <del> } <del> }); <del> }; <del> queue.error = err => { <del> callback(err); <del> callback = () => {}; <del> }; <del> let jobQueued = false; <del> for (const dep of deps) { <del> queue.push({ <del> type: RBDT_RESOLVE, <del> context, <del> path: dep <del> }); <del> jobQueued = true; <del> } <del> if (!jobQueued) { <del> // queue won't call drain when no jobs are queue <del> queue.drain(); <del> } <ide> } <ide> <ide> /** <ide> class FileSystemInfo { <ide> * @returns {void} <ide> */ <ide> checkResolveResultsValid(resolveResults, callback) { <add> const { <add> resolveCjs, <add> resolveEsm, <add> resolveContext <add> } = this._createBuildDependenciesResolvers(); <ide> asyncLib.eachLimit( <ide> resolveResults, <ide> 20, <ide> class FileSystemInfo { <ide> }); <ide> break; <ide> case "f": <del> resolve(context, path, {}, (err, result) => { <add> resolveCjs(context, path, {}, (err, result) => { <add> if (err) return callback(err); <add> if (result !== expectedResult) return callback(INVALID); <add> callback(); <add> }); <add> break; <add> case "e": <add> resolveEsm(context, path, {}, (err, result) => { <ide> if (err) return callback(err); <ide> if (result !== expectedResult) return callback(INVALID); <ide> callback(); <ide><path>lib/util/processAsyncTree.js <add>/* <add> MIT License http://www.opensource.org/licenses/mit-license.php <add> Author Tobias Koppers @sokra <add>*/ <add> <add>"use strict"; <add> <add>/** <add> * @template T <add> * @param {Iterable<T>} items initial items <add> * @param {number} concurrency number of items running in parallel <add> * @param {function(T, function(T): void, function(Error=): void): void} processor worker which pushes more items <add> * @param {function(Error=): void} callback all items processed <add> * @returns {void} <add> */ <add>const processAsyncTree = (items, concurrency, processor, callback) => { <add> const queue = Array.from(items); <add> if (queue.length === 0) return callback(); <add> let processing = 0; <add> let finished = false; <add> let processScheduled = true; <add> <add> const push = item => { <add> queue.push(item); <add> if (!processScheduled && processing < concurrency) { <add> processScheduled = true; <add> process.nextTick(processQueue); <add> } <add> }; <add> <add> const processorCallback = err => { <add> processing--; <add> if (err && !finished) { <add> finished = true; <add> callback(err); <add> return; <add> } <add> if (!processScheduled) { <add> processScheduled = true; <add> process.nextTick(processQueue); <add> } <add> }; <add> <add> const processQueue = () => { <add> if (finished) return; <add> while (processing < concurrency && queue.length > 0) { <add> processing++; <add> const item = queue.pop(); <add> processor(item, push, processorCallback); <add> } <add> processScheduled = false; <add> if (queue.length === 0 && processing === 0 && !finished) { <add> finished = true; <add> callback(); <add> } <add> }; <add> <add> processQueue(); <add>}; <add> <add>module.exports = processAsyncTree;
2
Ruby
Ruby
convert results of #to_param to strings #879
0919dbf6b63a4aaa49a2855c37e79a90937290e1
<ide><path>actionpack/lib/action_controller/routing.rb <ide> def draw <ide> end <ide> <ide> def self.extract_parameter_value(parameter) <del> value = parameter.respond_to?(:to_param) ? parameter.to_param : parameter.to_s <add> value = (parameter.respond_to?(:to_param) ? parameter.to_param : parameter).to_s <ide> CGI.escape(value) <ide> end <ide> <ide><path>actionpack/test/controller/routing_tests.rb <ide> def test_special_characters <ide> verify_generate('id%2Fwith%2Fslashes', {}, <ide> {:controller => 'content', :action => 'fish', :id => 'id/with/slashes'}, {}) <ide> end <add> <add> def test_generate_with_numeric_param <add> o = Object.new <add> def o.to_param() 10 end <add> verify_generate('content/action/10', {}, {:controller => 'content', :action => 'action', :id => o}, @defaults) <add> verify_generate('content/show/10', {}, {:controller => 'content', :action => 'show', :id => o}, @defaults) <add> end <ide> end <ide> <ide> class RouteSetTests < Test::Unit::TestCase
2
Ruby
Ruby
remove unused variable
3d71826e6dc794b0f3b6b9098ff0b49e3f997bfa
<ide><path>Library/Homebrew/cmd/bottle.rb <ide> def print_filename string, filename <ide> result = false <ide> <ide> keg.each_unique_file_matching(string) do |file| <del> put_filename = false <del> <ide> # Check dynamic library linkage. Importantly, do not run otool on static <ide> # libraries, which will falsely report "linkage" to themselves. <ide> if file.mach_o_executable? or file.dylib? or file.mach_o_bundle?
1
Python
Python
run bdist_wheel as a part of release process
ce06240851d18ab725d348bce1793aad67349131
<ide><path>scripts/make-release.py <ide> def set_setup_version(version): <ide> <ide> <ide> def build_and_upload(): <del> Popen([sys.executable, 'setup.py', 'release', 'sdist', 'upload']).wait() <add> Popen([sys.executable, 'setup.py', 'release', 'sdist', 'bdist_wheel', 'upload']).wait() <ide> <ide> <ide> def fail(message, *args):
1
Python
Python
fix theano.tensor.signal import issue
289804c67ca1329f7992a00a751d80394f85f93e
<ide><path>keras/layers/convolutional.py <ide> <ide> import theano <ide> import theano.tensor as T <add>from theano.tensor.signal import downsample <ide> <ide> from .. import activations, initializations, regularizers, constraints <ide> from ..utils.theano_utils import shared_zeros, on_gpu <ide> def __init__(self, pool_length=2, stride=None, ignore_border=True): <ide> def get_output(self, train): <ide> X = self.get_input(train) <ide> X = T.reshape(X, (X.shape[0], X.shape[1], X.shape[2], 1)).dimshuffle(0, 2, 1, 3) <del> output = T.signal.downsample.max_pool_2d(X, ds=self.poolsize, st=self.st, ignore_border=self.ignore_border) <add> output = downsample.max_pool_2d(X, ds=self.poolsize, st=self.st, ignore_border=self.ignore_border) <ide> output = output.dimshuffle(0, 2, 1, 3) <ide> return T.reshape(output, (output.shape[0], output.shape[1], output.shape[2])) <ide> <ide> def __init__(self, poolsize=(2, 2), stride=None, ignore_border=True): <ide> <ide> def get_output(self, train): <ide> X = self.get_input(train) <del> output = T.signal.downsample.max_pool_2d(X, ds=self.poolsize, st=self.stride, ignore_border=self.ignore_border) <add> output = downsample.max_pool_2d(X, ds=self.poolsize, st=self.stride, ignore_border=self.ignore_border) <ide> return output <ide> <ide> def get_config(self):
1
Text
Text
update queue description
28123415d067e8d019c9551c4878fd6d79b37501
<ide><path>README.md <ide> Two-dimensional recursive spatial subdivision. <ide> <ide> ## [Queues](https://github.com/d3/d3-queue) <ide> <del>A minimalist approach to escaping callback hell. <add>Evaluate asynchronous tasks with configurable concurrency. <ide> <ide> * [d3.queue](https://github.com/d3/d3-queue#queue) - manage the concurrent evaluation of asynchronous tasks. <ide> * [*queue*.defer](https://github.com/d3/d3-queue#queue_defer) - register a task for evaluation.
1
Python
Python
update example, no need to disable cert validation
ce4ee9558ed0c53e03cb31627c597d5388b56ffa
<ide><path>docs/examples/compute/cloudstack/start_interactive_shell_ikoula.py <ide> <ide> from libcloud.compute.types import Provider <ide> from libcloud.compute.providers import get_driver <del>import libcloud.security as sec <del> <del>sec.VERIFY_SSL_CERT = False <ide> <ide> apikey = os.getenv('IKOULA_API_KEY') <ide> secretkey = os.getenv('IKOULA_SECRET_KEY')
1
Javascript
Javascript
replace array to arrayisarray by primordials
1bbd679adc291a9ae9b7dc7bbca863883cee9638
<ide><path>lib/internal/source_map/source_map.js <ide> 'use strict'; <ide> <ide> const { <del> Array <add> ArrayIsArray <ide> } = primordials; <ide> <ide> const { <ide> function cloneSourceMapV3(payload) { <ide> } <ide> payload = { ...payload }; <ide> for (const key in payload) { <del> if (payload.hasOwnProperty(key) && Array.isArray(payload[key])) { <add> if (payload.hasOwnProperty(key) && ArrayIsArray(payload[key])) { <ide> payload[key] = payload[key].slice(0); <ide> } <ide> }
1
Python
Python
add std=c99 flag for gcc, cleanup error reporting
3356d994fcf3fdfcfedc70185fc013501a87a4b9
<ide><path>numpy/core/setup.py <ide> def get_mathlib_info(*args): <ide> # compiler does not work). <ide> st = config_cmd.try_link('int main(void) { return 0;}') <ide> if not st: <add> # rerun the failing command in verbose mode <ide> config_cmd.compiler.verbose = True <del> st = config_cmd.try_link('int main(void) { return 0;}') <del> raise RuntimeError("Broken toolchain: cannot link a simple C program " <del> "when using these flags:\n" <del> ) <add> config_cmd.try_link('int main(void) { return 0;}') <add> raise RuntimeError("Broken toolchain: cannot link a simple C program") <ide> mlibs = check_mathlib(config_cmd) <ide> <ide> posix_mlib = ' '.join(['-l%s' % l for l in mlibs]) <ide><path>numpy/distutils/ccompiler.py <ide> def CCompiler_customize(self, dist, need_cxx=0): <ide> 'g++' in self.compiler[0] or <ide> 'clang' in self.compiler[0]): <ide> self._auto_depends = True <add> if 'gcc' in self.compiler[0]: <add> # add std=c99 flag for gcc < 5 <add> # TODO: does this need to be more specific? <add> self.compiler.append('-std=c99') <add> self.compiler_so.append('-std=c99') <ide> elif os.name == 'posix': <ide> import tempfile <ide> import shutil
2
Text
Text
remove extraneous sentence in events.md
498415b4abd879a81333f48123cc31ca8f32b6f5
<ide><path>doc/api/events.md <ide> added: v0.3.5 <ide> <ide> By default `EventEmitter`s will print a warning if more than `10` listeners are <ide> added for a particular event. This is a useful default that helps finding <del>memory leaks. Obviously, not all events should be limited to just 10 listeners. <del>The `emitter.setMaxListeners()` method allows the limit to be modified for this <del>specific `EventEmitter` instance. The value can be set to `Infinity` (or `0`) <del>to indicate an unlimited number of listeners. <add>memory leaks. The `emitter.setMaxListeners()` method allows the limit to be <add>modified for this specific `EventEmitter` instance. The value can be set to <add>`Infinity` (or `0`) to indicate an unlimited number of listeners. <ide> <ide> Returns a reference to the `EventEmitter`, so that calls can be chained. <ide>
1
Javascript
Javascript
remove transaction from componentwillupdate
735b4f0b7cd4f67f85a5950f35e4adb46ee3d9b4
<ide><path>src/core/ReactCompositeComponent.js <ide> var ReactCompositeComponentMixin = { <ide> var prevState = this.state; <ide> <ide> if (this.componentWillUpdate) { <del> this.componentWillUpdate(nextProps, nextState, transaction); <add> this.componentWillUpdate(nextProps, nextState); <ide> } <ide> <ide> this.props = nextProps;
1
Javascript
Javascript
move exports to bottom for consistent code style
aa00968255cdb6471b9e7fec99337697a2ce882c
<ide><path>lib/child_process.js <ide> const { <ide> <ide> const MAX_BUFFER = 1024 * 1024; <ide> <del>exports.ChildProcess = ChildProcess; <del> <del>exports.fork = function fork(modulePath /* , args, options */) { <add>function fork(modulePath /* , args, options */) { <ide> validateString(modulePath, 'modulePath'); <ide> <ide> // Get options and args arguments. <ide> exports.fork = function fork(modulePath /* , args, options */) { <ide> options.shell = false; <ide> <ide> return spawn(options.execPath, args, options); <del>}; <del> <add>} <ide> <del>exports._forkChild = function _forkChild(fd) { <add>function _forkChild(fd) { <ide> // set process.send() <ide> const p = new Pipe(PipeConstants.IPC); <ide> p.open(fd); <ide> exports._forkChild = function _forkChild(fd) { <ide> process.on('removeListener', function onRemoveListener(name) { <ide> if (name === 'message' || name === 'disconnect') control.unref(); <ide> }); <del>}; <del> <add>} <ide> <ide> function normalizeExecArgs(command, options, callback) { <ide> if (typeof options === 'function') { <ide> function normalizeExecArgs(command, options, callback) { <ide> } <ide> <ide> <del>exports.exec = function exec(command, options, callback) { <add>function exec(command, options, callback) { <ide> const opts = normalizeExecArgs(command, options, callback); <del> return exports.execFile(opts.file, <del> opts.options, <del> opts.callback); <del>}; <add> return module.exports.execFile(opts.file, <add> opts.options, <add> opts.callback); <add>} <ide> <ide> const customPromiseExecFunction = (orig) => { <ide> return (...args) => { <ide> const customPromiseExecFunction = (orig) => { <ide> }; <ide> }; <ide> <del>Object.defineProperty(exports.exec, promisify.custom, { <add>Object.defineProperty(exec, promisify.custom, { <ide> enumerable: false, <del> value: customPromiseExecFunction(exports.exec) <add> value: customPromiseExecFunction(exec) <ide> }); <ide> <del>exports.execFile = function execFile(file /* , args, options, callback */) { <add>function execFile(file /* , args, options, callback */) { <ide> let args = []; <ide> let callback; <ide> let options; <ide> exports.execFile = function execFile(file /* , args, options, callback */) { <ide> child.addListener('error', errorhandler); <ide> <ide> return child; <del>}; <add>} <ide> <del>Object.defineProperty(exports.execFile, promisify.custom, { <add>Object.defineProperty(execFile, promisify.custom, { <ide> enumerable: false, <del> value: customPromiseExecFunction(exports.execFile) <add> value: customPromiseExecFunction(execFile) <ide> }); <ide> <ide> function normalizeSpawnArguments(file, args, options) { <ide> function normalizeSpawnArguments(file, args, options) { <ide> } <ide> <ide> <del>var spawn = exports.spawn = function spawn(file, args, options) { <add>function spawn(file, args, options) { <ide> const child = new ChildProcess(); <ide> <ide> options = normalizeSpawnArguments(file, args, options); <ide> debug('spawn', options); <ide> child.spawn(options); <ide> <ide> return child; <del>}; <add>} <ide> <ide> function spawnSync(file, args, options) { <ide> options = { <ide> function spawnSync(file, args, options) { <ide> <ide> return child_process.spawnSync(options); <ide> } <del>exports.spawnSync = spawnSync; <ide> <ide> <ide> function checkExecSyncError(ret, args, cmd) { <ide> function execFileSync(command, args, options) { <ide> <ide> return ret.stdout; <ide> } <del>exports.execFileSync = execFileSync; <ide> <ide> <ide> function execSync(command, options) { <ide> function execSync(command, options) { <ide> <ide> return ret.stdout; <ide> } <del>exports.execSync = execSync; <ide> <ide> <ide> function validateTimeout(timeout) { <ide> function sanitizeKillSignal(killSignal) { <ide> killSignal); <ide> } <ide> } <add> <add>module.exports = { <add> _forkChild, <add> ChildProcess, <add> exec, <add> execFile, <add> execFileSync, <add> execSync, <add> fork, <add> spawn, <add> spawnSync <add>};
1
Java
Java
fix typos in requestresultmatchers
bb51447860113f2c17d5e8d4df22c0ca1817b8f3
<ide><path>spring-test/src/main/java/org/springframework/test/web/servlet/result/RequestResultMatchers.java <ide> * {@link MockMvcResultMatchers#request}. <ide> * <ide> * @author Rossen Stoyanchev <add> * @author Sam Brannen <ide> * @since 3.2 <ide> */ <ide> public class RequestResultMatchers { <ide> <ide> /** <ide> * Protected constructor. <del> * Use {@link MockMvcResultMatchers#request()}. <add> * <p>Use {@link MockMvcResultMatchers#request()}. <ide> */ <ide> protected RequestResultMatchers() { <ide> } <ide> <ide> <ide> /** <del> * Assert a request attribute value with the given Hamcrest {@link Matcher}. <del> * Whether asynchronous processing started, usually as a result of a <add> * Assert whether asynchronous processing started, usually as a result of a <ide> * controller method returning {@link Callable} or {@link DeferredResult}. <del> * The test will await the completion of a {@code Callable} so that <add> * <p>The test will await the completion of a {@code Callable} so that <ide> * {@link #asyncResult(Matcher)} can be used to assert the resulting value. <ide> * Neither a {@code Callable} nor a {@code DeferredResult} will complete <ide> * processing all the way since a {@link MockHttpServletRequest} does not <ide> public ResultMatcher asyncStarted() { <ide> @Override <ide> public void match(MvcResult result) { <ide> HttpServletRequest request = result.getRequest(); <del> assertEquals("Async started", true, request.isAsyncStarted()); <add> assertAsyncStarted(request); <ide> } <ide> }; <ide> } <ide> <ide> /** <del> * Assert that asynchronous processing was not start. <add> * Assert that asynchronous processing was not started. <ide> * @see #asyncStarted() <ide> */ <ide> public ResultMatcher asyncNotStarted() { <ide> public void match(MvcResult result) { <ide> <ide> /** <ide> * Assert the result from asynchronous processing with the given matcher. <add> * <p>This method can be used when a controller method returns {@link Callable} <add> * or {@link WebAsyncTask}. <ide> */ <ide> public <T> ResultMatcher asyncResult(final Matcher<T> matcher) { <ide> return new ResultMatcher() { <ide> @Override <ide> @SuppressWarnings("unchecked") <ide> public void match(MvcResult result) { <ide> HttpServletRequest request = result.getRequest(); <del> assertEquals("Async started", true, request.isAsyncStarted()); <add> assertAsyncStarted(request); <ide> assertThat("Async result", (T) result.getAsyncResult(), matcher); <ide> } <ide> }; <ide> } <ide> <ide> /** <ide> * Assert the result from asynchronous processing. <del> * This method can be used when a controller method returns {@link Callable} <add> * <p>This method can be used when a controller method returns {@link Callable} <ide> * or {@link WebAsyncTask}. The value matched is the value returned from the <ide> * {@code Callable} or the exception raised. <ide> */ <ide> public <T> ResultMatcher asyncResult(final Object expectedResult) { <ide> @Override <ide> public void match(MvcResult result) { <ide> HttpServletRequest request = result.getRequest(); <del> assertEquals("Async started", true, request.isAsyncStarted()); <add> assertAsyncStarted(request); <ide> assertEquals("Async result", expectedResult, result.getAsyncResult()); <ide> } <ide> }; <ide> public <T> ResultMatcher sessionAttribute(final String name, final Matcher<T> ma <ide> @SuppressWarnings("unchecked") <ide> public void match(MvcResult result) { <ide> T value = (T) result.getRequest().getSession().getAttribute(name); <del> assertThat("Request attribute", value, matcher); <add> assertThat("Session attribute", value, matcher); <ide> } <ide> }; <ide> } <ide> <ide> /** <del> * Assert a session attribute value.. <add> * Assert a session attribute value. <ide> */ <ide> public <T> ResultMatcher sessionAttribute(final String name, final Object value) { <ide> return new ResultMatcher() { <ide> @Override <ide> public void match(MvcResult result) { <del> assertEquals("Request attribute", value, result.getRequest().getSession().getAttribute(name)); <add> assertEquals("Session attribute", value, result.getRequest().getSession().getAttribute(name)); <ide> } <ide> }; <ide> } <ide> <add> private static void assertAsyncStarted(HttpServletRequest request) { <add> assertEquals("Async started", true, request.isAsyncStarted()); <add> } <add> <ide> }
1
Javascript
Javascript
fix all anchor links in docs
5a53d90003139a2d32a67435af880980a142c50f
<ide><path>website/core/Header.js <ide> var React = require('React'); <ide> var slugify = require('slugify'); <ide> <ide> var Header = React.createClass({ <del> getDefaultProps: function() { <del> return {permalink: ''}; <add> contextTypes: { <add> permalink: React.PropTypes.string <ide> }, <ide> <ide> render: function() { <ide> var slug = slugify(this.props.toSlug || this.props.children); <ide> var H = 'h' + this.props.level; <add> var base = this.context.permalink || ''; <ide> return ( <ide> <H {...this.props}> <ide> <a className="anchor" name={slug}></a> <ide> {this.props.children} <del> {' '}<a className="hash-link" href={this.props.permalink + '#' + slug}>#</a> <add> {' '}<a className="hash-link" href={base + '#' + slug}>#</a> <ide> </H> <ide> ); <ide> } <ide><path>website/layout/AutodocsLayout.js <ide> var ComponentDoc = React.createClass({ <ide> renderProp: function(name, prop) { <ide> return ( <ide> <div className="prop" key={name}> <del> <Header level={4} className="propTitle" toSlug={name} permalink={this.props.permalink}> <add> <Header level={4} className="propTitle" toSlug={name}> <ide> {prop.platforms && prop.platforms.map(platform => <ide> <span className="platform">{platform}</span> <ide> )} <ide> var Modal = React.createClass({ <ide> }); <ide> <ide> var Autodocs = React.createClass({ <add> childContextTypes: { <add> permalink: React.PropTypes.string <add> }, <add> <add> getChildContext: function() { <add> return {permalink: this.props.metadata.permalink}; <add> }, <add> <ide> renderFullDescription: function(docs) { <ide> if (!docs.fullDescription) { <ide> return; <ide> var Autodocs = React.createClass({ <ide> var metadata = this.props.metadata; <ide> var docs = JSON.parse(this.props.children); <ide> var content = docs.type === 'component' || docs.type === 'style' ? <del> <ComponentDoc content={docs} permalink={metadata.permalink}/> : <add> <ComponentDoc content={docs} /> : <ide> <APIDoc content={docs} apiName={metadata.title} />; <ide> <ide> return ( <ide><path>website/layout/DocsLayout.js <ide> var React = require('React'); <ide> var Site = require('Site'); <ide> <ide> var DocsLayout = React.createClass({ <add> childContextTypes: { <add> permalink: React.PropTypes.string <add> }, <add> <add> getChildContext: function() { <add> return {permalink: this.props.metadata.permalink}; <add> }, <add> <ide> render: function() { <ide> var metadata = this.props.metadata; <ide> var content = this.props.children; <ide><path>website/layout/PageLayout.js <ide> var Site = require('Site'); <ide> var Marked = require('Marked'); <ide> <ide> var support = React.createClass({ <add> childContextTypes: { <add> permalink: React.PropTypes.string <add> }, <add> <add> getChildContext: function() { <add> return {permalink: this.props.metadata.permalink}; <add> }, <add> <ide> render: function() { <ide> var metadata = this.props.metadata; <ide> var content = this.props.children; <ide><path>website/src/react-native/support.js <ide> var center = require('center'); <ide> var H2 = require('H2'); <ide> <ide> var support = React.createClass({ <add> childContextTypes: { <add> permalink: React.PropTypes.string <add> }, <add> <add> getChildContext: function() { <add> return {permalink: 'support.html'}; <add> }, <ide> render: function() { <ide> return ( <ide> <Site section="support" title="Support">
5
PHP
PHP
fix multi-word models with irregular plurals
b70ec729f8459140505bae0f6a5699c2fc813ef7
<ide><path>src/Illuminate/Database/Eloquent/Model.php <ide> public static function unsetConnectionResolver() <ide> */ <ide> public function getTable() <ide> { <del> if (! isset($this->table)) { <del> return str_replace( <del> '\\', '', Str::snake(Str::plural(class_basename($this))) <del> ); <del> } <del> <del> return $this->table; <add> return isset($this->table) <add> ? $this->table <add> : Str::snake(Str::pluralStudly(class_basename($this))); <ide> } <ide> <ide> /** <ide><path>src/Illuminate/Database/Eloquent/Relations/BelongsToMany.php <ide> protected function touchingParent() <ide> */ <ide> protected function guessInverseRelation() <ide> { <del> return Str::camel(Str::plural(class_basename($this->getParent()))); <add> return Str::camel(Str::pluralStudly(class_basename($this->getParent()))); <ide> } <ide> <ide> /** <ide><path>src/Illuminate/Foundation/Console/ModelMakeCommand.php <ide> protected function createFactory() <ide> */ <ide> protected function createMigration() <ide> { <del> $table = Str::plural(Str::snake(class_basename($this->argument('name')))); <add> $table = Str::snake(Str::pluralStudly(class_basename($this->argument('name')))); <ide> <ide> if ($this->option('pivot')) { <ide> $table = Str::singular($table); <ide><path>src/Illuminate/Support/Str.php <ide> public static function plural($value, $count = 2) <ide> return Pluralizer::plural($value, $count); <ide> } <ide> <add> /** <add> * Pluralize the last word of an English, studly caps case string. <add> * <add> * @param string $value <add> * @param int $count <add> * @return string <add> */ <add> public static function pluralStudly($value, $count = 2) <add> { <add> $parts = preg_split('/(.)(?=[A-Z])/u', $value, -1, PREG_SPLIT_DELIM_CAPTURE); <add> <add> $lastWord = array_pop($parts); <add> <add> return implode('', $parts).self::plural($lastWord, $count); <add> } <add> <ide> /** <ide> * Generate a more truly "random" alpha-numeric string. <ide> * <ide><path>tests/Database/DatabaseEloquentIrregularPluralTest.php <add><?php <add> <add>namespace Illuminate\Tests\Database; <add> <add>use Carbon\Carbon; <add>use PHPUnit\Framework\TestCase; <add>use Illuminate\Database\Eloquent\Model; <add>use Illuminate\Database\Capsule\Manager as DB; <add> <add>class DatabaseEloquentIrregularPluralTest extends TestCase <add>{ <add> public function setUp() <add> { <add> $db = new DB; <add> <add> $db->addConnection([ <add> 'driver' => 'sqlite', <add> 'database' => ':memory:', <add> ]); <add> <add> $db->bootEloquent(); <add> $db->setAsGlobal(); <add> $this->createSchema(); <add> } <add> <add> public function createSchema() <add> { <add> $this->schema()->create('irregular_plural_humans', function ($table) { <add> $table->increments('id'); <add> $table->string('email')->unique(); <add> $table->timestamps(); <add> }); <add> <add> $this->schema()->create('irregular_plural_tokens', function ($table) { <add> $table->increments('id'); <add> $table->string('title'); <add> }); <add> <add> $this->schema()->create('irregular_plural_human_irregular_plural_token', function ($table) { <add> $table->integer('irregular_plural_human_id')->unsigned(); <add> $table->integer('irregular_plural_token_id')->unsigned(); <add> }); <add> } <add> <add> public function tearDown() <add> { <add> $this->schema()->drop('irregular_plural_tokens'); <add> $this->schema()->drop('irregular_plural_humans'); <add> $this->schema()->drop('irregular_plural_human_irregular_plural_token'); <add> } <add> <add> protected function schema() <add> { <add> $connection = Model::getConnectionResolver()->connection(); <add> <add> return $connection->getSchemaBuilder(); <add> } <add> <add> /** @test */ <add> function it_pluralizes_the_table_name() <add> { <add> $model = new IrregularPluralHuman(); <add> <add> $this->assertSame('irregular_plural_humans', $model->getTable()); <add> } <add> <add> /** @test */ <add> function it_touches_the_parent_with_an_irregular_plural() <add> { <add> Carbon::setTestNow('2018-05-01 12:13:14'); <add> <add> IrregularPluralHuman::create(['id' => 1, 'email' => '[email protected]']); <add> <add> IrregularPluralToken::insert([ <add> ['title' => 'The title'], <add> ]); <add> <add> $human = IrregularPluralHuman::query()->first(); <add> <add> $tokenIds = IrregularPluralToken::pluck('id'); <add> <add> Carbon::setTestNow('2018-05-01 15:16:17'); <add> <add> $human->irregularPluralTokens()->sync($tokenIds); <add> <add> $human->refresh(); <add> <add> $this->assertSame('2018-05-01 12:13:14', (string) $human->created_at); <add> $this->assertSame('2018-05-01 15:16:17', (string) $human->updated_at); <add> } <add>} <add> <add>class IrregularPluralHuman extends Model <add>{ <add> protected $guarded = []; <add> <add> public function irregularPluralTokens() <add> { <add> return $this->belongsToMany( <add> IrregularPluralToken::class, <add> 'irregular_plural_human_irregular_plural_token', <add> 'irregular_plural_token_id', <add> 'irregular_plural_human_id' <add> ); <add> } <add>} <add> <add>class IrregularPluralToken extends Model <add>{ <add> protected $guarded = []; <add> <add> public $timestamps = false; <add> <add> protected $touches = [ <add> 'irregularPluralHumans', <add> ]; <add>} <ide><path>tests/Support/SupportPluralizerTest.php <ide> public function testIfEndOfWordPlural() <ide> $this->assertEquals('MatrixFields', Str::plural('MatrixField')); <ide> $this->assertEquals('IndexFields', Str::plural('IndexField')); <ide> $this->assertEquals('VertexFields', Str::plural('VertexField')); <add> <add> // This is expected behavior, use "Str::pluralStudly" instead. <add> $this->assertSame('RealHumen', Str::plural('RealHuman')); <ide> } <ide> <ide> public function testPluralWithNegativeCount() <ide> public function testPluralWithNegativeCount() <ide> $this->assertEquals('test', Str::plural('test', -1)); <ide> $this->assertEquals('tests', Str::plural('test', -2)); <ide> } <add> <add> public function testPluralStudly() <add> { <add> $this->assertPluralStudly('RealHumans', 'RealHuman'); <add> $this->assertPluralStudly('Models', 'Model'); <add> $this->assertPluralStudly('VortexFields', 'VortexField'); <add> $this->assertPluralStudly('MultipleWordsInOneStrings', 'MultipleWordsInOneString'); <add> } <add> <add> public function testPluralStudlyWithCount() <add> { <add> $this->assertPluralStudly('RealHuman', 'RealHuman', 1); <add> $this->assertPluralStudly('RealHumans', 'RealHuman', 2); <add> $this->assertPluralStudly('RealHuman', 'RealHuman', -1); <add> $this->assertPluralStudly('RealHumans', 'RealHuman', -2); <add> } <add> <add> private function assertPluralStudly($expected, $value, $count = 2) <add> { <add> $this->assertSame($expected, Str::pluralStudly($value, $count)); <add> } <ide> }
6
PHP
PHP
use "use" statments
e1e9745f6f18562d99f4dc6bf73ed3a06a935ea0
<ide><path>src/Cache/Engine/RedisEngine.php <ide> namespace Cake\Cache\Engine; <ide> <ide> use Cake\Cache\CacheEngine; <add>use Redis; <add>use RedisException; <ide> <ide> /** <ide> * Redis storage engine for cache. <ide> public function init(array $config = []) <ide> protected function _connect() <ide> { <ide> try { <del> $this->_Redis = new \Redis(); <add> $this->_Redis = new Redis(); <ide> if (!empty($this->_config['unix_socket'])) { <ide> $return = $this->_Redis->connect($this->_config['unix_socket']); <ide> } elseif (empty($this->_config['persistent'])) { <ide> protected function _connect() <ide> $persistentId = $this->_config['port'] . $this->_config['timeout'] . $this->_config['database']; <ide> $return = $this->_Redis->pconnect($this->_config['server'], $this->_config['port'], $this->_config['timeout'], $persistentId); <ide> } <del> } catch (\RedisException $e) { <add> } catch (RedisException $e) { <ide> return false; <ide> } <ide> if ($return && $this->_config['password']) { <ide> public function clearGroup($group) <ide> */ <ide> public function __destruct() <ide> { <del> if (empty($this->_config['persistent']) && $this->_Redis instanceof \Redis) { <add> if (empty($this->_config['persistent']) && $this->_Redis instanceof Redis) { <ide> $this->_Redis->close(); <ide> } <ide> }
1
PHP
PHP
trim sql before executing
1dd6daf7a01f53fec3c7397edcbfbc601ed77ae1
<ide><path>laravel/db/connection.php <ide> public function query($sql, $bindings = array()) <ide> <ide> $this->queries[] = compact('sql', 'bindings'); <ide> <del> return $this->execute($this->pdo->prepare($sql), $bindings); <add> return $this->execute($this->pdo->prepare(trim($sql)), $bindings); <ide> } <ide> <ide> /**
1
Ruby
Ruby
make os x specific
08f68fc4dd6afc6c118ad631889c2a35e4e8d07e
<ide><path>Library/Homebrew/test/test_os_mac_x11_requirement.rb <add>require "testing_env" <add>require "requirements/x11_requirement" <add> <add>class OSMacX11RequirementTests < Homebrew::TestCase <add> def test_satisfied <add> MacOS::XQuartz.stubs(:version).returns("2.7.5") <add> MacOS::XQuartz.stubs(:installed?).returns(true) <add> assert_predicate X11Requirement.new, :satisfied? <add> <add> MacOS::XQuartz.stubs(:installed?).returns(false) <add> refute_predicate X11Requirement.new, :satisfied? <add> end <add>end <ide><path>Library/Homebrew/test/test_x11_requirement.rb <ide> def test_x_env <ide> ENV.expects(:x11) <ide> x.modify_build_environment <ide> end <del> <del> def test_satisfied <del> MacOS::XQuartz.stubs(:version).returns("2.7.5") <del> MacOS::XQuartz.stubs(:installed?).returns(true) <del> assert_predicate X11Requirement.new, :satisfied? <del> <del> MacOS::XQuartz.stubs(:installed?).returns(false) <del> refute_predicate X11Requirement.new, :satisfied? <del> end <ide> end
2
Javascript
Javascript
fix tht typos
34970fd7853a26e72179e5c97ea28143d00297bc
<ide><path>src/core/ReactMount.js <ide> var ReactMount = { <ide> }, <ide> <ide> /** <del> * Ensures tht the top-level event delegation listener is set up. This will be <del> * invoked some time before the first time any React component is rendered. <add> * Ensures that the top-level event delegation listener is set up. This will <add> * be invoked some time before the first time any React component is rendered. <ide> * <ide> * @param {object} TopLevelCallbackCreator <ide> * @private
1
Javascript
Javascript
improve comments in a few tests
8897d255f2567aecdf780d94c3f45ee719c9ace8
<ide><path>test/moment/zone_switching.js <ide> exports.zoneSwitching = { <ide> m.zone(z * 60); <ide> <ide> test.equal(m.clone().local(true).format(fmt), m.format(fmt), <del> "zone(" + z + ":00) to local failed to keep the local time"); <add> "zone(" + z + ":00) to local failed to keep local time"); <ide> } <ide> <ide> test.done(); <ide> exports.zoneSwitching = { <ide> m.zone(z * 60); <ide> <ide> test.equal(m.clone().local(false).valueOf(), m.valueOf(), <del> "zone(" + z + ":00) to local failed to keep the local time (explicit)"); <add> "zone(" + z + ":00) to local failed to keep utc time (explicit)"); <ide> test.equal(m.clone().local().valueOf(), m.valueOf(), <del> "zone(" + z + ":00) to local failed to keep the local time (implicit)"); <add> "zone(" + z + ":00) to local failed to keep utc time (implicit)"); <ide> } <ide> <ide> test.done();
1
Javascript
Javascript
fix flakey preload test
b408d733735b9eb4d6b596bad0cbc7f7e9be696c
<ide><path>test/integration/preload-viewport/test/index.test.js <ide> describe('Prefetching Links in viewport', () => { <ide> })()`) <ide> <ide> console.log({ linkHrefs, scriptSrcs }) <del> expect(linkHrefs.some((href) => scriptSrcs.includes(href))).toBe(false) <add> expect(scriptSrcs.some((src) => src.includes('pages/index-'))).toBe(true) <add> expect(linkHrefs.some((href) => href.includes('pages/index-'))).toBe(false) <ide> }) <ide> <ide> it('should not duplicate prefetches', async () => {
1
Go
Go
remove check for map nilness
8f311f4d8c80fc33613a0f4ddf723ef1e11b1b17
<ide><path>daemon/info.go <ide> import ( <ide> <ide> // SystemInfo returns information about the host server the daemon is running on. <ide> func (daemon *Daemon) SystemInfo() (*types.Info, error) { <del> images := daemon.Graph().Map() <del> var imgcount int <del> if images == nil { <del> imgcount = 0 <del> } else { <del> imgcount = len(images) <del> } <ide> kernelVersion := "<unknown>" <ide> if kv, err := kernel.GetKernelVersion(); err == nil { <ide> kernelVersion = kv.String() <ide> func (daemon *Daemon) SystemInfo() (*types.Info, error) { <ide> v := &types.Info{ <ide> ID: daemon.ID, <ide> Containers: len(daemon.List()), <del> Images: imgcount, <add> Images: len(daemon.Graph().Map()), <ide> Driver: daemon.GraphDriver().String(), <ide> DriverStatus: daemon.GraphDriver().Status(), <ide> IPv4Forwarding: !sysInfo.IPv4ForwardingDisabled,
1
Text
Text
add a warning about totally custom login views
4ad8c17371e25acbdce4e2f449efccc4df072270
<ide><path>docs/api-guide/authentication.md <ide> Unauthenticated responses that are denied permission will result in an `HTTP 403 <ide> <ide> If you're using an AJAX style API with SessionAuthentication, you'll need to make sure you include a valid CSRF token for any "unsafe" HTTP method calls, such as `PUT`, `PATCH`, `POST` or `DELETE` requests. See the [Django CSRF documentation][csrf-ajax] for more details. <ide> <add>**Warning**: Always use Django's standard login view when creating login pages. This will ensure your login views are properly protected. <add> <add>CSRF validation in REST framework works slightly differently to standard Django due to the need to support both session and non-session based authentication to the same views. This means that only authenticated requests require CSRF tokens, and anonymous requests may be sent without CSRF tokens. This behaviour is not suitable for login views, which should always have CSRF validation applied. <add> <ide> # Custom authentication <ide> <ide> To implement a custom authentication scheme, subclass `BaseAuthentication` and override the `.authenticate(self, request)` method. The method should return a two-tuple of `(user, auth)` if authentication succeeds, or `None` otherwise.
1
Javascript
Javascript
verify arguments length in common.expectserror
29cddb40b238daf1dfad0cc42e0d3bd5068dff03
<ide><path>test/common/index.js <ide> exports.expectsError = function expectsError(fn, settings, exact) { <ide> } <ide> <ide> function innerFn(error) { <add> if (arguments.length !== 1) { <add> // Do not use `assert.strictEqual()` to prevent `util.inspect` from <add> // always being called. <add> assert.fail(`Expected one argument, got ${util.inspect(arguments)}`); <add> } <ide> const descriptor = Object.getOwnPropertyDescriptor(error, 'message'); <ide> assert.strictEqual(descriptor.enumerable, <ide> false, 'The error message should be non-enumerable');
1
Python
Python
add pushtohubcallback in main init
ad3e560bc775b888941cdf59c9252fa4e2d79817
<ide><path>src/transformers/__init__.py <ide> _import_structure["benchmark.benchmark_args_tf"] = ["TensorFlowBenchmarkArguments"] <ide> _import_structure["benchmark.benchmark_tf"] = ["TensorFlowBenchmark"] <ide> _import_structure["generation_tf_utils"] = ["tf_top_k_top_p_filtering"] <del> _import_structure["keras_callbacks"] = [] <add> _import_structure["keras_callbacks"] = ["PushToHubCallback"] <ide> _import_structure["modeling_tf_outputs"] = [] <ide> _import_structure["modeling_tf_utils"] = [ <ide> "TFPreTrainedModel", <ide> # Benchmarks <ide> from .benchmark.benchmark_tf import TensorFlowBenchmark <ide> from .generation_tf_utils import tf_top_k_top_p_filtering <add> from .keras_callbacks import PushToHubCallback <ide> from .modeling_tf_layoutlm import ( <ide> TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST, <ide> TFLayoutLMForMaskedLM, <ide><path>src/transformers/utils/dummy_tf_objects.py <ide> def tf_top_k_top_p_filtering(*args, **kwargs): <ide> requires_backends(tf_top_k_top_p_filtering, ["tf"]) <ide> <ide> <add>class PushToHubCallback: <add> def __init__(self, *args, **kwargs): <add> requires_backends(self, ["tf"]) <add> <add> <ide> TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST = None <ide> <ide>
2
Go
Go
create errvolumetargetisroot in the volume package
62143af5437a29d4b95f971d1905cfef763b0847
<ide><path>volume/lcow_parser.go <ide> package volume <ide> <ide> import ( <ide> "errors" <del> "fmt" <ide> "path" <ide> <ide> "github.com/docker/docker/api/types/mount" <ide> ) <ide> <ide> var lcowSpecificValidators mountValidator = func(m *mount.Mount) error { <ide> if path.Clean(m.Target) == "/" { <del> return fmt.Errorf("invalid specification: destination can't be '/'") <add> return ErrVolumeTargetIsRoot <ide> } <ide> if m.Type == mount.TypeNamedPipe { <ide> return errors.New("Linux containers on Windows do not support named pipe mounts") <ide><path>volume/linux_parser.go <ide> func linuxSplitRawSpec(raw string) ([]string, error) { <ide> func linuxValidateNotRoot(p string) error { <ide> p = path.Clean(strings.Replace(p, `\`, `/`, -1)) <ide> if p == "/" { <del> return fmt.Errorf("invalid specification: destination can't be '/'") <add> return ErrVolumeTargetIsRoot <ide> } <ide> return nil <ide> } <ide><path>volume/parser.go <ide> package volume <ide> <ide> import ( <add> "errors" <ide> "runtime" <ide> <ide> "github.com/docker/docker/api/types/mount" <ide> const ( <ide> OSWindows = "windows" <ide> ) <ide> <add>// ErrVolumeTargetIsRoot is returned when the target destination is root. <add>// It's used by both LCOW and Linux parsers. <add>var ErrVolumeTargetIsRoot = errors.New("invalid specification: destination can't be '/'") <add> <ide> // Parser represents a platform specific parser for mount expressions <ide> type Parser interface { <ide> ParseMountRaw(raw, volumeDriver string) (*MountPoint, error)
3
Text
Text
fix changelog url (angularjs.com -> angularjs.org)
5dbf0cc8a28fe5ab5503c45129444cbc59b6bd1f
<ide><path>CHANGELOG.md <ide> <ide> <ide> ### Documentation <del>- brand new template for <http://docs.angularjs.com/> <add>- brand new template for <http://docs.angularjs.org/> <ide> - brand new tutorial that describes how to build a typical angular app <del> <http://docs.angularjs.com/#!/tutorial> <add> <http://docs.angularjs.org/#!/tutorial> <ide> - lots of new content for the dev guide (still work in progress) <del> <http://docs.angularjs.com/#!/guide> <add> <http://docs.angularjs.org/#!/guide> <ide> <ide> <ide> ### Bug Fixes
1
Ruby
Ruby
remove dead code
c7cf7f476a47c9e4c60a369efa338a1fa9d81d6c
<ide><path>activerecord/test/cases/relation/where_chain_test.rb <ide> def test_rewhere_with_one_condition <ide> def test_rewhere_with_multiple_overwriting_conditions <ide> relation = Post.where(title: 'hello').where(body: 'world').rewhere(title: 'alone', body: 'again') <ide> <del> title_expected = Arel::Nodes::Equality.new(Post.arel_table['title'], 'alone') <del> body_expected = Arel::Nodes::Equality.new(Post.arel_table['body'], 'again') <del> <ide> assert_equal 2, relation.where_values.size <ide> <ide> value = relation.where_values.first
1
Python
Python
remove redundant operations in 1d masking
40938e4367b3f6f0f368d71ec3e4722f58592cd8
<ide><path>numpy/lib/histograms.py <ide> def histogramdd(sample, bins=10, range=None, normed=False, weights=None): <ide> on_edge = (np.around(sample[:, i], decimal) == <ide> np.around(edges[i][-1], decimal)) <ide> # Shift these points one bin to the left. <del> Ncount[i][np.nonzero(on_edge & not_smaller_than_edge)[0]] -= 1 <add> Ncount[i][on_edge & not_smaller_than_edge] -= 1 <ide> <ide> # Flattened histogram matrix (1D) <ide> # Reshape is used so that overlarge arrays
1
PHP
PHP
add note to remember method's purpose
2d4dd33f87a604716beb4e03e9526c1a9d4e5a54
<ide><path>src/Illuminate/Foundation/Testing/CrawlerTrait.php <ide> public function delete($uri, array $data = [], array $headers = []) <ide> /** <ide> * Send the given request through the application. <ide> * <add> * This method allows you to fully customize the entire Request object. <add> * <ide> * @param \Illuminate\Http\Request $request <ide> * @return $this <ide> */
1
Text
Text
move fedor to tsc emeritus
7dffabbb8417bb23d3789f10463c441b1e02a535
<ide><path>README.md <ide> For more information about the governance of the Node.js project, see <ide> **Jeremiah Senkpiel** &lt;[email protected]&gt; <ide> * [gibfahn](https://github.com/gibfahn) - <ide> **Gibson Fahnestock** &lt;[email protected]&gt; (he/him) <del>* [indutny](https://github.com/indutny) - <del>**Fedor Indutny** &lt;[email protected]&gt; <ide> * [jasnell](https://github.com/jasnell) - <ide> **James M Snell** &lt;[email protected]&gt; (he/him) <ide> * [joyeecheung](https://github.com/joyeecheung) - <ide> For more information about the governance of the Node.js project, see <ide> **Ben Noordhuis** &lt;[email protected]&gt; <ide> * [chrisdickinson](https://github.com/chrisdickinson) - <ide> **Chris Dickinson** &lt;[email protected]&gt; <add>* [indutny](https://github.com/indutny) - <add>**Fedor Indutny** &lt;[email protected]&gt; <ide> * [isaacs](https://github.com/isaacs) - <ide> **Isaac Z. Schlueter** &lt;[email protected]&gt; <ide> * [joshgav](https://github.com/joshgav) -
1
Ruby
Ruby
use inject rather than multiple assignments
d2405a0aab6b246842163cdb1a40d2d300b7b879
<ide><path>activesupport/lib/active_support/callbacks.rb <ide> def initialize_copy(other) <ide> def compile <ide> return @callbacks if @callbacks <ide> <del> @callbacks = Filters::ENDING <del> @chain.reverse_each do |callback| <del> @callbacks = callback.apply(@callbacks) <add> @callbacks = @chain.reverse.inject(Filters::ENDING) do |chain, callback| <add> callback.apply chain <ide> end <del> @callbacks <ide> end <ide> <ide> def append(*callbacks)
1
PHP
PHP
add mac_address validation message
f79296dcd548c0ced169f6453d75f231fee407fc
<ide><path>resources/lang/en/validation.php <ide> 'ip' => 'The :attribute must be a valid IP address.', <ide> 'ipv4' => 'The :attribute must be a valid IPv4 address.', <ide> 'ipv6' => 'The :attribute must be a valid IPv6 address.', <add> 'mac_address' => 'The :attribute must be a valid MAC address.', <ide> 'json' => 'The :attribute must be a valid JSON string.', <ide> 'lt' => [ <ide> 'numeric' => 'The :attribute must be less than :value.',
1
PHP
PHP
add missing typehints to core classes
5c3da82f0fdcbb12335f38f77714d20c8bf4a5aa
<ide><path>src/Cache/CacheRegistry.php <ide> protected function _resolveClassName($class) <ide> * @return void <ide> * @throws \BadMethodCallException <ide> */ <del> protected function _throwMissingClassError($class, $plugin) <add> protected function _throwMissingClassError(string $class, string $plugin): void <ide> { <ide> throw new BadMethodCallException(sprintf('Cache engine %s is not available.', $class)); <ide> } <ide> protected function _throwMissingClassError($class, $plugin) <ide> * @return \Cake\Cache\CacheEngine The constructed CacheEngine class. <ide> * @throws \RuntimeException when an object doesn't implement the correct interface. <ide> */ <del> protected function _create($class, $alias, $config) <add> protected function _create($class, string $alias, array $config) <ide> { <ide> if (is_object($class)) { <ide> $instance = $class; <ide> protected function _create($class, $alias, $config) <ide> * Remove a single adapter from the registry. <ide> * <ide> * @param string $name The adapter name. <del> * @return void <add> * @return $this <ide> */ <del> public function unload(string $name) <add> public function unload(string $name): ObjectRegistry <ide> { <ide> unset($this->_loaded[$name]); <add> <add> return $this; <ide> } <ide> } <ide><path>src/Core/App.php <ide> public static function shortName(string $class, string $type, string $suffix = ' <ide> * @param string $namespace Namespace. <ide> * @return bool <ide> */ <del> protected static function _classExistsInBase($name, $namespace) <add> protected static function _classExistsInBase(string $name, string $namespace): bool <ide> { <ide> return class_exists($namespace . $name); <ide> } <ide><path>src/Core/BasePlugin.php <ide> */ <ide> namespace Cake\Core; <ide> <add>use Cake\Console\CommandCollection; <add>use Cake\Http\MiddlewareQueue; <add>use Cake\Routing\RouteBuilder; <ide> use InvalidArgumentException; <ide> use ReflectionClass; <ide> <ide> public function __construct(array $options = []) <ide> } <ide> <ide> /** <del> * {@inheritdoc} <add> * Initialization hook called from constructor. <add> * <add> * @return void <ide> */ <del> public function initialize() <add> public function initialize(): void <ide> { <ide> } <ide> <ide> protected function checkHook(string $hook): void <ide> /** <ide> * {@inheritdoc} <ide> */ <del> public function routes($routes) <add> public function routes(RouteBuilder $routes): void <ide> { <ide> $path = $this->getConfigPath() . 'routes.php'; <ide> if (file_exists($path)) { <ide> public function routes($routes) <ide> /** <ide> * {@inheritdoc} <ide> */ <del> public function bootstrap(PluginApplicationInterface $app) <add> public function bootstrap(PluginApplicationInterface $app): void <ide> { <ide> $bootstrap = $this->getConfigPath() . 'bootstrap.php'; <ide> if (file_exists($bootstrap)) { <ide> public function bootstrap(PluginApplicationInterface $app) <ide> /** <ide> * {@inheritdoc} <ide> */ <del> public function console($commands) <add> public function console(CommandCollection $commands): CommandCollection <ide> { <ide> return $commands->addMany($commands->discoverPlugin($this->getName())); <ide> } <ide> <ide> /** <ide> * {@inheritdoc} <ide> */ <del> public function middleware($middleware) <add> public function middleware(MiddlewareQueue $middleware): MiddlewareQueue <ide> { <ide> return $middleware; <ide> } <ide><path>src/Core/ClassLoader.php <ide> class ClassLoader <ide> * <ide> * @return void <ide> */ <del> public function register() <add> public function register(): void <ide> { <ide> spl_autoload_register([$this, 'loadClass']); <ide> } <ide> public function register() <ide> * than last. <ide> * @return void <ide> */ <del> public function addNamespace($prefix, $baseDir, $prepend = false) <add> public function addNamespace(string $prefix, string $baseDir, bool $prepend = false): void <ide> { <ide> $prefix = trim($prefix, '\\') . '\\'; <ide> <ide> public function addNamespace($prefix, $baseDir, $prepend = false) <ide> * @return string|false The mapped file name on success, or boolean false on <ide> * failure. <ide> */ <del> public function loadClass($class) <add> public function loadClass(string $class) <ide> { <ide> $prefix = $class; <ide> <ide> public function loadClass($class) <ide> * @return mixed Boolean false if no mapped file can be loaded, or the <ide> * name of the mapped file that was loaded. <ide> */ <del> protected function _loadMappedFile($prefix, $relativeClass) <add> protected function _loadMappedFile(string $prefix, string $relativeClass) <ide> { <ide> if (!isset($this->_prefixes[$prefix])) { <ide> return false; <ide> protected function _loadMappedFile($prefix, $relativeClass) <ide> * @param string $file The file to require. <ide> * @return bool True if the file exists, false if not. <ide> */ <del> protected function _requireFile($file) <add> protected function _requireFile(string $file): bool <ide> { <ide> if (file_exists($file)) { <ide> require $file; <ide><path>src/Core/Configure.php <ide> public static function dump(string $key, string $config = 'default', array $keys <ide> * Will create new PhpConfig for default if not configured yet. <ide> * <ide> * @param string $config The name of the configured adapter <del> * @return \Cake\Core\Configure\ConfigEngineInterface Engine instance or null <add> * @return \Cake\Core\Configure\ConfigEngineInterface|null Engine instance or null <ide> */ <del> protected static function _getEngine(string $config) <add> protected static function _getEngine(string $config): ?ConfigEngineInterface <ide> { <ide> if (!isset(static::$_engines[$config])) { <ide> if ($config !== 'default') { <ide><path>src/Core/Configure/Engine/IniConfig.php <ide> class IniConfig implements ConfigEngineInterface <ide> * @param string|null $section Only get one section, leave null to parse and fetch <ide> * all sections in the ini file. <ide> */ <del> public function __construct($path = null, $section = null) <add> public function __construct(?string $path = null, ?string $section = null) <ide> { <ide> if ($path === null) { <ide> $path = CONFIG; <ide><path>src/Core/Configure/Engine/JsonConfig.php <ide> class JsonConfig implements ConfigEngineInterface <ide> * <ide> * @param string|null $path The path to read config files from. Defaults to CONFIG. <ide> */ <del> public function __construct($path = null) <add> public function __construct(?string $path = null) <ide> { <ide> if ($path === null) { <ide> $path = CONFIG; <ide><path>src/Core/Configure/Engine/PhpConfig.php <ide> class PhpConfig implements ConfigEngineInterface <ide> * <ide> * @param string|null $path The path to read config files from. Defaults to CONFIG. <ide> */ <del> public function __construct($path = null) <add> public function __construct(?string $path = null) <ide> { <ide> if ($path === null) { <ide> $path = CONFIG; <ide><path>src/Core/Exception/Exception.php <ide> public function __construct($message = '', $code = null, $previous = null) <ide> * <ide> * @return array <ide> */ <del> public function getAttributes() <add> public function getAttributes(): array <ide> { <ide> return $this->_attributes; <ide> } <ide> public function getAttributes() <ide> * @param string|null $value The header value. <ide> * @return array <ide> */ <del> public function responseHeader($header = null, $value = null) <add> public function responseHeader($header = null, $value = null): array <ide> { <ide> if ($header === null) { <ide> return $this->_responseHeaders; <ide><path>src/Core/InstanceConfigTrait.php <ide> public function configShallow($key, $value = null) <ide> * @param string|null $key Key to read. <ide> * @return mixed <ide> */ <del> protected function _configRead($key) <add> protected function _configRead(?string $key) <ide> { <ide> if ($key === null) { <ide> return $this->_config; <ide> protected function _configRead($key) <ide> * @return void <ide> * @throws \Cake\Core\Exception\Exception if attempting to clobber existing config <ide> */ <del> protected function _configWrite($key, $value, $merge = false) <add> protected function _configWrite($key, $value, $merge = false): void <ide> { <ide> if (is_string($key) && $value === null) { <ide> $this->_configDelete($key); <ide><path>src/Core/ObjectRegistry.php <ide> abstract class ObjectRegistry implements Countable, IteratorAggregate <ide> * @return mixed <ide> * @throws \Exception If the class cannot be found. <ide> */ <del> public function load($objectName, array $config = []) <add> public function load(string $objectName, array $config = []) <ide> { <ide> if (is_array($config) && isset($config['className'])) { <ide> $name = $objectName; <ide> abstract protected function _resolveClassName($class); <ide> * @return void <ide> * @throws \Exception <ide> */ <del> abstract protected function _throwMissingClassError($class, $plugin); <add> abstract protected function _throwMissingClassError(string $class, string $plugin): void; <ide> <ide> /** <ide> * Create an instance of a given classname. <ide> abstract protected function _throwMissingClassError($class, $plugin); <ide> * @param array $config The Configuration settings for construction <ide> * @return object <ide> */ <del> abstract protected function _create($class, $alias, $config); <add> abstract protected function _create($class, string $alias, array $config); <ide> <ide> /** <ide> * Get the list of loaded objects. <ide> public function get(string $name) <ide> * @param string $name Name of property to read <ide> * @return mixed <ide> */ <del> public function __get(string $name) <add> public function __get($name) <ide> { <ide> return $this->get($name); <ide> } <ide> public function __get(string $name) <ide> * @param string $name Name of object being checked. <ide> * @return bool <ide> */ <del> public function __isset(string $name) <add> public function __isset($name) <ide> { <ide> return isset($this->_loaded[$name]); <ide> } <ide> public function __isset(string $name) <ide> * @param mixed $object Object to set. <ide> * @return void <ide> */ <del> public function __set(string $name, $object) <add> public function __set($name, $object) <ide> { <ide> $this->set($name, $object); <ide> } <ide> public function __set(string $name, $object) <ide> * @param string $name Name of a property to unset. <ide> * @return void <ide> */ <del> public function __unset(string $name) <add> public function __unset($name) <ide> { <ide> $this->unload($name); <ide> } <ide> public function normalizeArray(array $objects): array <ide> * <ide> * @return $this <ide> */ <del> public function reset() <add> public function reset(): self <ide> { <ide> foreach (array_keys($this->_loaded) as $name) { <ide> $this->unload($name); <ide> public function reset() <ide> * @param object $object instance to store in the registry <ide> * @return $this <ide> */ <del> public function set(string $objectName, $object) <add> public function set(string $objectName, $object): self <ide> { <ide> list(, $name) = pluginSplit($objectName); <ide> <ide> public function set(string $objectName, $object) <ide> * @param string $objectName The name of the object to remove from the registry. <ide> * @return $this <ide> */ <del> public function unload(string $objectName) <add> public function unload(string $objectName): self <ide> { <ide> if (empty($this->_loaded[$objectName])) { <ide> list($plugin, $objectName) = pluginSplit($objectName); <ide> public function unload(string $objectName) <ide> * <ide> * @return \ArrayIterator <ide> */ <del> public function getIterator() <add> public function getIterator(): ArrayIterator <ide> { <ide> return new ArrayIterator($this->_loaded); <ide> } <ide> public function count(): int <ide> * <ide> * @return array <ide> */ <del> public function __debugInfo(): array <add> public function __debugInfo() <ide> { <ide> $properties = get_object_vars($this); <ide> if (isset($properties['_loaded'])) { <ide><path>src/Core/Plugin.php <ide> public static function unload(?string $plugin = null): void <ide> * @internal <ide> * @return \Cake\Core\PluginCollection <ide> */ <del> public static function getCollection() <add> public static function getCollection(): PluginCollection <ide> { <ide> if (!isset(static::$plugins)) { <ide> static::$plugins = new PluginCollection(); <ide><path>src/Core/PluginCollection.php <ide> <ide> use Cake\Core\Exception\MissingPluginException; <ide> use Countable; <add>use Generator; <ide> use InvalidArgumentException; <ide> use Iterator; <ide> <ide> public function get(string $name): PluginInterface <ide> * <ide> * @return void <ide> */ <del> public function next() <add> public function next(): void <ide> { <ide> $this->position++; <ide> } <ide> public function key(): string <ide> * <ide> * @return \Cake\Core\PluginInterface <ide> */ <del> public function current() <add> public function current(): PluginInterface <ide> { <ide> $name = $this->names[$this->position]; <ide> <ide> public function count(): int <ide> * @return \Generator A generator containing matching plugins. <ide> * @throws \InvalidArgumentException on invalid hooks <ide> */ <del> public function with(string $hook) <add> public function with(string $hook): Generator <ide> { <ide> if (!in_array($hook, PluginInterface::VALID_HOOKS)) { <ide> throw new InvalidArgumentException("The `{$hook}` hook is not a known plugin hook."); <ide><path>src/Core/PluginInterface.php <ide> */ <ide> namespace Cake\Core; <ide> <add>use Cake\Console\CommandCollection; <add>use Cake\Http\MiddlewareQueue; <add>use Cake\Routing\RouteBuilder; <add> <ide> /** <ide> * Plugin Interface <ide> */ <ide> public function getClassPath(): string; <ide> * @param \Cake\Core\PluginApplicationInterface $app The host application <ide> * @return void <ide> */ <del> public function bootstrap(PluginApplicationInterface $app); <add> public function bootstrap(PluginApplicationInterface $app): void; <ide> <ide> /** <ide> * Add console commands for the plugin. <ide> * <ide> * @param \Cake\Console\CommandCollection $commands The command collection to update <ide> * @return \Cake\Console\CommandCollection <ide> */ <del> public function console($commands); <add> public function console(CommandCollection $commands): CommandCollection; <ide> <ide> /** <ide> * Add middleware for the plugin. <ide> * <ide> * @param \Cake\Http\MiddlewareQueue $middleware The middleware queue to update. <ide> * @return \Cake\Http\MiddlewareQueue <ide> */ <del> public function middleware($middleware); <add> public function middleware(MiddlewareQueue $middleware): MiddlewareQueue; <ide> <ide> /** <ide> * Add routes for the plugin. <ide> public function middleware($middleware); <ide> * @param \Cake\Routing\RouteBuilder $routes The route builder to update. <ide> * @return void <ide> */ <del> public function routes($routes); <add> public function routes(RouteBuilder $routes): void; <ide> <ide> /** <ide> * Disables the named hook <ide> * <ide> * @param string $hook The hook to disable <ide> * @return $this <ide> */ <del> public function disable(string $hook): self; <add> public function disable(string $hook): PluginInterface; <ide> <ide> /** <ide> * Enables the named hook <ide> * <ide> * @param string $hook The hook to disable <ide> * @return $this <ide> */ <del> public function enable(string $hook): self; <add> public function enable(string $hook): PluginInterface; <ide> <ide> /** <ide> * Check if the named hook is enabled <ide><path>src/Core/Retry/CommandRetry.php <ide> class CommandRetry <ide> * @param \Cake\Core\Retry\RetryStrategyInterface $strategy The strategy to follow should the action fail <ide> * @param int $retries The number of times the action has been already called <ide> */ <del> public function __construct(RetryStrategyInterface $strategy, $retries = 1) <add> public function __construct(RetryStrategyInterface $strategy, int $retries = 1) <ide> { <ide> $this->strategy = $strategy; <ide> $this->retries = $retries; <ide><path>tests/TestCase/Core/Retry/CommandRetryTest.php <ide> <?php <add>declare(strict_types=1); <ide> /** <ide> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) <ide> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <ide><path>tests/test_app/Plugin/TestPlugin/src/Plugin.php <ide> <ide> use Cake\Core\BasePlugin; <ide> use Cake\Event\EventManagerInterface; <add>use Cake\Http\MiddlewareQueue; <ide> <ide> class Plugin extends BasePlugin <ide> { <ide> public function events(EventManagerInterface $events) <ide> return $events; <ide> } <ide> <del> public function middleware($middleware) <add> public function middleware(MiddlewareQueue $middleware): MiddlewareQueue <ide> { <ide> $middleware->add(function ($req, $res, $next) { <ide> return $next($req, $res); <ide><path>tests/test_app/TestApp/Core/TestApp.php <ide> class TestApp extends App <ide> { <ide> public static $existsInBaseCallback; <ide> <del> protected static function _classExistsInBase($name, $namespace) <add> protected static function _classExistsInBase(string $name, string $namespace): bool <ide> { <ide> $callback = static::$existsInBaseCallback; <ide>
18
Ruby
Ruby
add uniq_by and uniq_by! to array
0361414ae328c10de8ed778e826d8244ba0aa63a
<ide><path>activesupport/lib/active_support/core_ext/array.rb <ide> require 'active_support/core_ext/array/wrap' <ide> require 'active_support/core_ext/array/access' <add>require 'active_support/core_ext/array/uniq_by' <ide> require 'active_support/core_ext/array/conversions' <ide> require 'active_support/core_ext/array/extract_options' <ide> require 'active_support/core_ext/array/grouping' <ide><path>activesupport/lib/active_support/core_ext/array/uniq_by.rb <add>class Array <add> # Return an unique array based on the criteria given as a proc. <add> # <add> # [1, 2, 3, 4].uniq_by { |i| i.odd? } <add> # #=> [1, 2] <add> # <add> def uniq_by <add> hash, array = {}, [] <add> each { |i| hash[yield(i)] ||= (array << i) } <add> array <add> end <add> <add> # Same as uniq_by, but modifies self. <add> def uniq_by! <add> replace(uniq_by{ |i| yield(i) }) <add> end <add>end <ide><path>activesupport/test/core_ext/array_ext_test.rb <ide> def test_to_param_array <ide> end <ide> end <ide> <del>class ArrayExtToSentenceTests < Test::Unit::TestCase <del> include ActiveSupport::Testing::Deprecation <del> <add>class ArrayExtToSentenceTests < ActiveSupport::TestCase <ide> def test_plain_array_to_sentence <ide> assert_equal "", [].to_sentence <ide> assert_equal "one", ['one'].to_sentence <ide> def test_extract_options <ide> end <ide> end <ide> <add>class ArrayUniqByTests < Test::Unit::TestCase <add> def test_uniq_by <add> assert_equal [1,2], [1,2,3,4].uniq_by { |i| i.odd? } <add> assert_equal [1,2], [1,2,3,4].uniq_by(&:even?) <add> assert_equal (-5..0).to_a, (-5..5).to_a.uniq_by{ |i| i**2 } <add> end <add> <add> def test_uniq_by! <add> a = [1,2,3,4] <add> a.uniq_by! { |i| i.odd? } <add> assert_equal [1,2], a <add> <add> a = [1,2,3,4] <add> a.uniq_by! { |i| i.even? } <add> assert_equal [1,2], a <add> <add> a = (-5..5).to_a <add> a.uniq_by! { |i| i**2 } <add> assert_equal (-5..0).to_a, a <add> end <add>end <add> <ide> class ArrayExtRandomTests < Test::Unit::TestCase <ide> def test_random_element_from_array <ide> assert_nil [].rand
3
Javascript
Javascript
improve test coverage of internal/worker/io
10b043287c3fe96f677e95a0229c520a57105316
<ide><path>test/parallel/test-broadcastchannel-custom-inspect.js <add>'use strict'; <add> <add>require('../common'); <add>const { BroadcastChannel } = require('worker_threads'); <add>const { inspect } = require('util'); <add>const assert = require('assert'); <add> <add>// This test checks BroadcastChannel custom inspect outputs <add> <add>{ <add> const bc = new BroadcastChannel('name'); <add> assert.throws(() => bc[inspect.custom].call(), { <add> code: 'ERR_INVALID_THIS', <add> }); <add> bc.close(); <add>} <add> <add>{ <add> const bc = new BroadcastChannel('name'); <add> assert.strictEqual(inspect(bc, { depth: -1 }), 'BroadcastChannel'); <add> bc.close(); <add>} <add> <add>{ <add> const bc = new BroadcastChannel('name'); <add> assert.strictEqual( <add> inspect(bc), <add> "BroadcastChannel { name: 'name', active: true }" <add> ); <add> bc.close(); <add>} <add> <add>{ <add> const bc = new BroadcastChannel('name'); <add> assert.strictEqual( <add> inspect(bc, { depth: null }), <add> "BroadcastChannel { name: 'name', active: true }" <add> ); <add> bc.close(); <add>}
1
PHP
PHP
apply fixes from styleci
fdeb01e367ba299e7f5e5accc733c2472b951f7e
<ide><path>src/Illuminate/Support/MessageBag.php <ide> <ide> use Countable; <ide> use JsonSerializable; <del>use Illuminate\Support\Arr; <ide> use Illuminate\Contracts\Support\Jsonable; <ide> use Illuminate\Contracts\Support\Arrayable; <ide> use Illuminate\Contracts\Support\MessageProvider;
1
Ruby
Ruby
fix regex placement
e4ef1f062e7af00db859e205043b127afe33272f
<ide><path>Library/Homebrew/dev-cmd/test-bot.rb <ide> def diff_formulae(start_revision, end_revision, path, filter) <ide> @short_url = @url.gsub("https://github.com/", "") <ide> if @short_url.include? "/commit/" <ide> # 7 characters should be enough for a commit (not 40). <del> @short_url.gsub!(%r{(commit/\w{7}).*/, '\1'}) <add> @short_url.gsub!(%r{(commit/\w{7}).*/}, '\1') <ide> @name = @short_url <ide> else <ide> @name = "#{@short_url}-#{diff_end_sha1}" <ide> def formula(formula_name) <ide> bottle_step = steps.last <ide> if bottle_step.passed? && bottle_step.output? <ide> bottle_filename = <del> bottle_step.output.gsub(%r{.*(\./\S+#{Utils::Bottles.native_regex}).*/m, '\1'}) <add> bottle_step.output.gsub(%r{.*(\./\S+#{Utils::Bottles.native_regex}).*/m}, '\1') <ide> bottle_json_filename = bottle_filename.gsub(/\.(\d+\.)?tar\.gz$/, ".json") <ide> bottle_merge_args = ["--merge", "--write", "--no-commit", bottle_json_filename] <ide> bottle_merge_args << "--keep-old" if ARGV.include? "--keep-old"
1
Python
Python
remove unused argument in private function
0dbc9ad1454aab5044ab0a14b9094db1a3c7c027
<ide><path>numpy/lib/function_base.py <ide> def _median(a, axis=None, out=None, overwrite_input=False): <ide> indexer[axis] = slice(index-1, index+1) <ide> indexer = tuple(indexer) <ide> <add> # Use mean in both odd and even case to coerce data type, <add> # using out array if needed. <add> rout = mean(part[indexer], axis=axis, out=out) <ide> # Check if the array contains any nan's <ide> if np.issubdtype(a.dtype, np.inexact) and sz > 0: <del> # warn and return nans like mean would <del> rout = mean(part[indexer], axis=axis, out=out) <del> return np.lib.utils._median_nancheck(part, rout, axis, out) <del> else: <del> # if there are no nans <del> # Use mean in odd and even case to coerce data type <del> # and check, use out array. <del> return mean(part[indexer], axis=axis, out=out) <add> # If nans are possible, warn and replace by nans like mean would. <add> rout = np.lib.utils._median_nancheck(part, rout, axis) <add> <add> return rout <ide> <ide> <ide> def _percentile_dispatcher(a, q, axis=None, out=None, overwrite_input=None, <ide><path>numpy/lib/utils.py <ide> def safe_eval(source): <ide> return ast.literal_eval(source) <ide> <ide> <del>def _median_nancheck(data, result, axis, out): <add>def _median_nancheck(data, result, axis): <ide> """ <ide> Utility function to check median result from data for NaN values at the end <ide> and return NaN in that case. Input result can also be a MaskedArray. <ide> def _median_nancheck(data, result, axis, out): <ide> data : array <ide> Input data to median function <ide> result : Array or MaskedArray <del> Result of median function <add> Result of median function. <ide> axis : int <ide> Axis along which the median was computed. <del> out : ndarray, optional <del> Output array in which to place the result. <ide> <ide> Returns <ide> ------- <del> median : scalar or ndarray <del> Median or NaN in axes which contained NaN in the input. <add> result : scalar or ndarray <add> Median or NaN in axes which contained NaN in the input. If the input <add> was an array, NaN will be inserted in-place. If a scalar, either the <add> input itself or a scalar NaN. <ide> """ <ide> if data.size == 0: <ide> return result <ide><path>numpy/ma/extras.py <ide> def _median(a, axis=None, out=None, overwrite_input=False): <ide> s = mid.sum(out=out) <ide> if not odd: <ide> s = np.true_divide(s, 2., casting='safe', out=out) <del> s = np.lib.utils._median_nancheck(asorted, s, axis, out) <add> s = np.lib.utils._median_nancheck(asorted, s, axis) <ide> else: <ide> s = mid.mean(out=out) <ide> <ide> def replace_masked(s): <ide> s = np.ma.sum(low_high, axis=axis, out=out) <ide> np.true_divide(s.data, 2., casting='unsafe', out=s.data) <ide> <del> s = np.lib.utils._median_nancheck(asorted, s, axis, out) <add> s = np.lib.utils._median_nancheck(asorted, s, axis) <ide> else: <ide> s = np.ma.mean(low_high, axis=axis, out=out) <ide>
3
Ruby
Ruby
remove unused code in actionview
befec8a0d83bc61238680b584688470069efb23b
<ide><path>actionpack/lib/action_view.rb <ide> def self.load_all! <ide> autoload :Base, 'action_view/base' <ide> autoload :Context, 'action_view/context' <ide> autoload :Helpers, 'action_view/helpers' <del> autoload :InlineTemplate, 'action_view/template/inline' <ide> autoload :MissingTemplate, 'action_view/base' <ide> autoload :Partials, 'action_view/render/partials' <ide> autoload :Resolver, 'action_view/template/resolver' <ide> autoload :PathResolver, 'action_view/template/resolver' <ide> autoload :PathSet, 'action_view/paths' <ide> autoload :Rendering, 'action_view/render/rendering' <del> autoload :Renderable, 'action_view/template/renderable' <del> autoload :RenderablePartial, 'action_view/template/partial' <ide> autoload :Template, 'action_view/template/template' <ide> autoload :TemplateError, 'action_view/template/error' <ide> autoload :TemplateHandler, 'action_view/template/handler' <ide><path>actionpack/lib/action_view/template/inline.rb <del>module ActionView #:nodoc: <del> class InlineTemplate #:nodoc: <del> include Renderable <del> <del> attr_reader :source, :extension, :method_segment <del> <del> def initialize(source, type = nil) <del> @source = source <del> @extension = type <del> @method_segment = "inline_#{@source.hash.abs}" <del> end <del> <del> private <del> # Always recompile inline templates <del> def recompile? <del> true <del> end <del> end <del>end <ide><path>actionpack/lib/action_view/template/partial.rb <del>module ActionView <del> # NOTE: The template that this mixin is being included into is frozen <del> # so you cannot set or modify any instance variables <del> module RenderablePartial #:nodoc: <del> extend ActiveSupport::Memoizable <del> <del> def variable_name <del> name.sub(/\A_/, '').to_sym <del> end <del> memoize :variable_name <del> <del> def counter_name <del> "#{variable_name}_counter".to_sym <del> end <del> memoize :counter_name <del> <del> end <del>end <ide><path>actionpack/lib/action_view/template/renderable.rb <del># encoding: utf-8 <del> <del>module ActionView <del> # NOTE: The template that this mixin is being included into is frozen <del> # so you cannot set or modify any instance variables <del> module Renderable #:nodoc: <del> extend ActiveSupport::Memoizable <del> <del> def render(view, locals) <del> compile(locals) <del> view.send(method_name(locals), locals) {|*args| yield(*args) } <del> end <del> <del> def load! <del> names = CompiledTemplates.instance_methods.grep(/#{method_name_without_locals}/) <del> names.each do |name| <del> CompiledTemplates.class_eval do <del> remove_method(name) <del> end <del> end <del> super <del> end <del> <del> private <del> <del> def filename <del> 'compiled-template' <del> end <del> <del> def handler <del> Template.handler_class_for_extension(extension) <del> end <del> memoize :handler <del> <del> def compiled_source <del> handler.call(self) <del> end <del> memoize :compiled_source <del> <del> def method_name_without_locals <del> ['_run', extension, method_segment].compact.join('_') <del> end <del> memoize :method_name_without_locals <del> <del> def method_name(local_assigns) <del> if local_assigns && local_assigns.any? <del> method_name = method_name_without_locals.dup <del> method_name << "_locals_#{local_assigns.keys.map { |k| k.to_s }.sort.join('_')}" <del> else <del> method_name = method_name_without_locals <del> end <del> method_name.to_sym <del> end <del> <del> # Compile and evaluate the template's code (if necessary) <del> def compile(local_assigns) <del> render_symbol = method_name(local_assigns) <del> <del> if !CompiledTemplates.method_defined?(render_symbol) || recompile? <del> compile!(render_symbol, local_assigns) <del> end <del> end <del> <del> private <del> def compile!(render_symbol, local_assigns) <del> locals_code = local_assigns.keys.map { |key| "#{key} = local_assigns[:#{key}];" }.join <del> <del> source = <<-end_src <del> def #{render_symbol}(local_assigns) <del> old_output_buffer = output_buffer;#{locals_code};#{compiled_source} <del> ensure <del> self.output_buffer = old_output_buffer <del> end <del> end_src <del> <del> begin <del> ActionView::CompiledTemplates.module_eval(source, filename.to_s, 0) <del> rescue Exception => e # errors from template code <del> if logger = defined?(ActionController) && Base.logger <del> logger.debug "ERROR: compiling #{render_symbol} RAISED #{e}" <del> logger.debug "Function body: #{source}" <del> logger.debug "Backtrace: #{e.backtrace.join("\n")}" <del> end <del> <del> raise ActionView::TemplateError.new(self, {}, e) <del> end <del> end <del> <del> def recompile? <del> false <del> end <del> end <del>end
4
PHP
PHP
avoid code duplication
4407cdb010ce497d89c5d87019dbf00f20291d66
<ide><path>lib/Cake/Model/Datasource/CakeSession.php <ide> protected static function _cookieName() { <ide> } <ide> <ide> self::init(); <add> self::_configureSession(); <ide> <del> $sessionConfig = Configure::read('Session'); <del> if (isset($sessionConfig['ini']['session.name'])) { <del> return self::$_cookieName = $sessionConfig['ini']['session.name']; <del> } <del> <del> $defaults = self::_defaultConfig($sessionConfig['defaults']); <del> if ($defaults) { <del> return self::$_cookieName = $defaults['cookie']; <del> } <del> <del> return self::$_cookieName = ini_get('session.name'); <add> return self::$_cookieName = session_name(); <ide> } <ide> <ide> /**
1
Text
Text
remove the word "very"
cbf4407aa8fab03bbe09076edf0ad0bbc9b610c7
<ide><path>README.md <ide> NumPy requires `pytest` and `hypothesis`. Tests can then be run after installat <ide> Code of Conduct <ide> ---------------------- <ide> <del>NumPy is a community-driven open source project developed by a very diverse group of <add>NumPy is a community-driven open source project developed by a diverse group of <ide> [contributors](https://numpy.org/gallery/team.html). The NumPy leadership has made a strong <ide> commitment to creating an open, inclusive, and positive community. Please read the <ide> [NumPy Code of Conduct](https://numpy.org/code-of-conduct/) for guidance on how to interact
1
Ruby
Ruby
simplify strings for search
6fcc5d14de042e5328a0d37972af35aa98f5a9eb
<ide><path>Library/Homebrew/cmd/desc.rb <ide> #: first search, making that search slower than subsequent ones. <ide> <ide> require "descriptions" <del>require "cmd/search" <add>require "search" <ide> <ide> module Homebrew <ide> module_function <ide> <add> extend Search <add> <ide> def desc <ide> search_type = [] <ide> search_type << :either if ARGV.flag? "--search" <ide> def desc <ide> results.print <ide> elsif search_type.size > 1 <ide> odie "Pick one, and only one, of -s/--search, -n/--name, or -d/--description." <del> elsif arg = ARGV.named.first <del> regex = Homebrew.query_regexp(arg) <add> elsif arg = ARGV.named.join(" ") <add> regex = query_regexp(arg) <ide> results = Descriptions.search(regex, search_type.first) <ide> results.print <ide> else <ide><path>Library/Homebrew/descriptions.rb <ide> require "formula" <ide> require "formula_versions" <add>require "search" <ide> <ide> class Descriptions <add> extend Homebrew::Search <add> <ide> CACHE_FILE = HOMEBREW_CACHE + "desc_cache.json" <ide> <ide> def self.cache <ide> def self.search(regex, field = :either) <ide> <ide> results = case field <ide> when :name <del> @cache.select { |name, _| name =~ regex } <add> @cache.select { |name, _| simplify_string(name).match?(regex) } <ide> when :desc <del> @cache.select { |_, desc| desc =~ regex } <add> @cache.select { |_, desc| simplify_string(desc).match?(regex) } <ide> when :either <del> @cache.select { |name, desc| (name =~ regex) || (desc =~ regex) } <add> @cache.select { |name, desc| simplify_string(name).match?(regex) || simplify_string(desc).match?(regex) } <ide> end <ide> <ide> new(results) <ide><path>Library/Homebrew/search.rb <ide> module Homebrew <ide> module Search <add> def simplify_string(string) <add> string.downcase.gsub(/[^a-z\d]/i, "") <add> end <add> <ide> def query_regexp(query) <ide> if m = query.match(%r{^/(.*)/$}) <ide> Regexp.new(m[1]) <ide> else <del> Regexp.new(query.chars.join('[^a-z\d]*'), Regexp::IGNORECASE) <add> Regexp.new(simplify_string(query), Regexp::IGNORECASE) <ide> end <ide> rescue RegexpError <ide> raise "#{query} is not a valid regex." <ide> def search_formulae(regex) <ide> $stderr.puts Formatter.headline("Searching local taps...", color: :blue) <ide> <ide> aliases = Formula.alias_full_names <del> results = (Formula.full_names + aliases).grep(regex).sort <add> results = (Formula.full_names + aliases) <add> .select { |name| simplify_string(name).match?(regex) } <add> .sort <ide> <ide> results.map do |name| <ide> begin <ide><path>Library/Homebrew/test/search_spec.rb <ide> end <ide> end <ide> <del> describe "#query_regexp" do <del> it "correctly parses a regex query" do <del> expect(mod.query_regexp("/^query$/")).to eq(/^query$/) <add> describe "#simplify_string" do <add> it "simplifies a query with dashes" do <add> expect(mod.query_regexp("que-ry")).to eq(/query/i) <ide> end <ide> <del> it "correctly converts a query string to a regex" do <del> expect(mod.query_regexp("query")).to eq(/q[^a-z\d]*u[^a-z\d]*e[^a-z\d]*r[^a-z\d]*y/i) <add> it "simplifies a query with @ symbols" do <add> expect(mod.query_regexp("query@1")).to eq(/query1/i) <ide> end <add> end <ide> <del> it "raises an error if the query is an invalid regex" do <del> expect { mod.query_regexp("/+/") }.to raise_error(/not a valid regex/) <add> describe "#query_regexp" do <add> it "correctly parses a regex query" do <add> expect(mod.query_regexp("/^query$/")).to eq(/^query$/) <ide> end <ide> <del> it "correctly matches with special symbols" do <del> regex = mod.query_regexp("oo-ba") <del> expect(regex).to match("foo-bar") <add> it "correctly converts a query string to a regex" do <add> expect(mod.query_regexp("query")).to eq(/query/i) <ide> end <ide> <del> it "correctly matches without special symbols" do <del> regex = mod.query_regexp("ooba") <del> expect(regex).to match("foo-bar") <add> it "simplifies a query with special symbols" do <add> expect(mod.query_regexp("que-ry")).to eq(/query/i) <ide> end <ide> <del> it "keeps special symbols" do <del> regex = mod.query_regexp("foo-bar") <del> expect(regex).not_to match("foobar") <add> it "raises an error if the query is an invalid regex" do <add> expect { mod.query_regexp("/+/") }.to raise_error(/not a valid regex/) <ide> end <ide> end <ide> end
4
Go
Go
remove some uses of testutil.helpert
0d4ffa3588031ba544a5a6b1ac175dfa0005f147
<ide><path>testutil/daemon/daemon.go <ide> func NewDaemon(workingDir string, ops ...Option) (*Daemon, error) { <ide> // $DOCKER_INTEGRATION_DAEMON_DEST or $DEST. <ide> // The daemon will not automatically start. <ide> func New(t testing.TB, ops ...Option) *Daemon { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> dest := os.Getenv("DOCKER_INTEGRATION_DAEMON_DEST") <ide> if dest == "" { <ide> dest = os.Getenv("DEST") <ide> func (d *Daemon) NewClient(extraOpts ...client.Opt) (*client.Client, error) { <ide> <ide> // Cleanup cleans the daemon files : exec root (network namespaces, ...), swarmkit files <ide> func (d *Daemon) Cleanup(t testing.TB) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> // Cleanup swarmkit wal files if present <ide> cleanupRaftDir(t, d.Root) <ide> cleanupNetworkNamespace(t, d.execRoot) <ide> } <ide> <ide> // Start starts the daemon and return once it is ready to receive requests. <ide> func (d *Daemon) Start(t testing.TB, args ...string) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> if err := d.StartWithError(args...); err != nil { <ide> t.Fatalf("failed to start daemon with arguments %v : %v", args, err) <ide> } <ide> func (d *Daemon) StartWithLogFile(out *os.File, providedArgs ...string) error { <ide> // StartWithBusybox will first start the daemon with Daemon.Start() <ide> // then save the busybox image from the main daemon and load it into this Daemon instance. <ide> func (d *Daemon) StartWithBusybox(t testing.TB, arg ...string) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> d.Start(t, arg...) <ide> d.LoadBusybox(t) <ide> } <ide> func (d *Daemon) DumpStackAndQuit() { <ide> // instantiate a new one with NewDaemon. <ide> // If an error occurs while starting the daemon, the test will fail. <ide> func (d *Daemon) Stop(t testing.TB) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> err := d.StopWithError() <ide> if err != nil { <ide> if err != errDaemonNotStarted { <ide> out2: <ide> // Restart will restart the daemon by first stopping it and the starting it. <ide> // If an error occurs while starting the daemon, the test will fail. <ide> func (d *Daemon) Restart(t testing.TB, args ...string) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> d.Stop(t) <ide> d.Start(t, args...) <ide> } <ide> func (d *Daemon) Info(t assert.TestingT) types.Info { <ide> } <ide> <ide> func cleanupRaftDir(t testing.TB, rootPath string) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> for _, p := range []string{"wal", "wal-v3-encrypted", "snap-v3-encrypted"} { <ide> dir := filepath.Join(rootPath, "swarm/raft", p) <ide> if err := os.RemoveAll(dir); err != nil { <ide><path>testutil/daemon/daemon_unix.go <ide> import ( <ide> "strings" <ide> "testing" <ide> <del> "github.com/docker/docker/testutil" <ide> "golang.org/x/sys/unix" <ide> "gotest.tools/assert" <ide> ) <ide> <ide> func cleanupNetworkNamespace(t testing.TB, execRoot string) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> // Cleanup network namespaces in the exec root of this <ide> // daemon because this exec root is specific to this <ide> // daemon instance and has no chance of getting <ide><path>testutil/daemon/swarm.go <ide> var ( <ide> <ide> // StartNode (re)starts the daemon <ide> func (d *Daemon) StartNode(t testing.TB) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> d.Start(t, startArgs...) <ide> } <ide> <ide> // StartNodeWithBusybox starts daemon to be used as a swarm node, and loads the busybox image <ide> func (d *Daemon) StartNodeWithBusybox(t testing.TB) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> d.StartWithBusybox(t, startArgs...) <ide> } <ide> <ide> // RestartNode restarts a daemon to be used as a swarm node <ide> func (d *Daemon) RestartNode(t testing.TB) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> // avoid networking conflicts <ide> d.Stop(t) <ide> d.Start(t, startArgs...) <ide> func (d *Daemon) StartAndSwarmInit(t testing.TB) { <ide> <ide> // StartAndSwarmJoin starts the daemon (with busybox) and join the specified swarm as worker or manager <ide> func (d *Daemon) StartAndSwarmJoin(t testing.TB, leader *Daemon, manager bool) { <del> if th, ok := t.(testutil.HelperT); ok { <del> th.Helper() <del> } <add> t.Helper() <ide> d.StartNodeWithBusybox(t) <ide> <ide> tokens := leader.JoinTokens(t) <ide><path>testutil/fakecontext/context.go <ide> import ( <ide> "testing" <ide> <ide> "github.com/docker/docker/pkg/archive" <del> "github.com/docker/docker/testutil" <ide> ) <ide> <ide> // New creates a fake build context <ide> func New(t testing.TB, dir string, modifiers ...func(*Fake) error) *Fake { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> fakeContext := &Fake{Dir: dir} <ide> if dir == "" { <ide> if err := newDir(fakeContext); err != nil { <ide> func (f *Fake) Close() error { <ide> <ide> // AsTarReader returns a ReadCloser with the contents of Dir as a tar archive. <ide> func (f *Fake) AsTarReader(t testing.TB) io.ReadCloser { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> reader, err := archive.TarWithOptions(f.Dir, &archive.TarOptions{}) <ide> if err != nil { <ide> t.Fatalf("Failed to create tar from %s: %s", f.Dir, err) <ide><path>testutil/fakegit/fakegit.go <ide> import ( <ide> "path/filepath" <ide> "testing" <ide> <del> "github.com/docker/docker/testutil" <ide> "github.com/docker/docker/testutil/fakecontext" <ide> "github.com/docker/docker/testutil/fakestorage" <ide> ) <ide> func (g *FakeGit) Close() { <ide> <ide> // New create a fake git server that can be used for git related tests <ide> func New(c testing.TB, name string, files map[string]string, enforceLocalServer bool) *FakeGit { <del> if ht, ok := c.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> c.Helper() <ide> ctx := fakecontext.New(c, "", fakecontext.WithFiles(files)) <ide> defer ctx.Close() <ide> curdir, err := os.Getwd() <ide><path>testutil/fakestorage/fixtures.go <ide> import ( <ide> <ide> "github.com/docker/docker/api/types" <ide> "github.com/docker/docker/pkg/archive" <del> "github.com/docker/docker/testutil" <ide> "gotest.tools/assert" <ide> ) <ide> <ide> var ensureHTTPServerOnce sync.Once <ide> <ide> func ensureHTTPServerImage(t testing.TB) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> var doIt bool <ide> ensureHTTPServerOnce.Do(func() { <ide> doIt = true <ide><path>testutil/fakestorage/storage.go <ide> func SetTestEnvironment(env *environment.Execution) { <ide> <ide> // New returns a static file server that will be use as build context. <ide> func New(t testing.TB, dir string, modifiers ...func(*fakecontext.Fake) error) Fake { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> if testEnv == nil { <ide> t.Fatal("fakstorage package requires SetTestEnvironment() to be called before use.") <ide> } <ide><path>testutil/registry/registry.go <ide> type Config struct { <ide> <ide> // NewV2 creates a v2 registry server <ide> func NewV2(t testing.TB, ops ...func(*Config)) *V2 { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> c := &Config{ <ide> registryURL: DefaultURL, <ide> } <ide> http: <ide> <ide> // WaitReady waits for the registry to be ready to serve requests (or fail after a while) <ide> func (r *V2) WaitReady(t testing.TB) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> var err error <ide> for i := 0; i != 50; i++ { <ide> if err = r.Ping(); err == nil { <ide> func (r *V2) WriteBlobContents(t assert.TestingT, blobDigest digest.Digest, data <ide> // TempMoveBlobData moves the existing data file aside, so that we can replace it with a <ide> // malicious blob of data for example. <ide> func (r *V2) TempMoveBlobData(t testing.TB, blobDigest digest.Digest) (undo func()) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> tempFile, err := ioutil.TempFile("", "registry-temp-blob-") <ide> assert.NilError(t, err, "unable to get temporary blob file") <ide> tempFile.Close() <ide><path>testutil/registry/registry_mock.go <ide> import ( <ide> "strings" <ide> "sync" <ide> "testing" <del> <del> "github.com/docker/docker/testutil" <ide> ) <ide> <ide> type handlerFunc func(w http.ResponseWriter, r *http.Request) <ide> func (tr *Mock) RegisterHandler(path string, h handlerFunc) { <ide> <ide> // NewMock creates a registry mock <ide> func NewMock(t testing.TB) (*Mock, error) { <del> if ht, ok := t.(testutil.HelperT); ok { <del> ht.Helper() <del> } <add> t.Helper() <ide> testReg := &Mock{handlers: make(map[string]handlerFunc)} <ide> <ide> ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
9
Javascript
Javascript
fix bug in opacity handling
26536f8849335f17f3958d7eb544ad811732937e
<ide><path>src/plugins/plugin.tooltip.js <ide> class Tooltip extends Element { <ide> }; <ide> <ide> // IE11/Edge does not like very small opacities, so snap to 0 <del> opacity = Math.abs(opacity < 1e-3) ? 0 : opacity; <add> opacity = Math.abs(opacity) < 1e-3 ? 0 : opacity; <ide> <ide> // Truthy/falsey value for empty tooltip <ide> var hasTooltipContent = me.title.length || me.beforeBody.length || me.body.length || me.afterBody.length || me.footer.length;
1
Javascript
Javascript
fix progressplugin log
a9ada9f9198ffbd2af03223d5cd3029616f6e6dd
<ide><path>lib/ProgressPlugin.js <ide> const median3 = (a, b, c) => { <ide> }; <ide> <ide> const createDefaultHandler = (profile, logger) => { <add> let wasLogged = false; <ide> /** @type {{ value: string, time: number }[]} */ <ide> const lastStateInfo = []; <ide> <ide> const createDefaultHandler = (profile, logger) => { <ide> } <ide> } <ide> } <add> if (percentage === 0 && !wasLogged) { <add> wasLogged = true; <add> return; <add> } <ide> logger.status(`${Math.floor(percentage * 100)}%`, msg, ...args); <ide> if (percentage === 1 || (!msg && args.length === 0)) logger.status(); <ide> }; <ide> class ProgressPlugin { <ide> handler(0, ""); <ide> } <ide> }); <del> interceptHook(compiler.hooks.initialize, 0.01, "setup", "initialize"); <del> interceptHook(compiler.hooks.beforeRun, 0.02, "setup", "before run"); <del> interceptHook(compiler.hooks.run, 0.03, "setup", "run"); <add> interceptHook(compiler.hooks.beforeRun, 0.01, "setup", "before run"); <add> interceptHook(compiler.hooks.run, 0.02, "setup", "run"); <ide> interceptHook(compiler.hooks.watchRun, 0.03, "setup", "watch run"); <ide> interceptHook( <ide> compiler.hooks.normalModuleFactory, <ide><path>test/ProgressPlugin.test.js <ide> const createSimpleCompiler = progressOptions => { <ide> entry: "./a.js", <ide> infrastructureLogging: { <ide> debug: /Progress/ <del> } <add> }, <add> plugins: [ <add> new webpack.ProgressPlugin({ <add> activeModules: true, <add> ...progressOptions <add> }) <add> ] <ide> }); <ide> <ide> compiler.outputFileSystem = createFsFromVolume(new Volume()); <ide> <del> new webpack.ProgressPlugin({ <del> activeModules: true, <del> ...progressOptions <del> }).apply(compiler); <del> <ide> return compiler; <ide> }; <ide> <ide> describe("ProgressPlugin", function () { <ide> nanTest(() => createMultiCompiler(undefined, { parallelism: 1 })) <ide> ); <ide> <add> it("should start print only on call run/watch", done => { <add> const compiler = createSimpleCompiler(); <add> <add> const logs = getLogs(stderr.toString()); <add> expect(logs.join("")).toHaveLength(0); <add> <add> compiler.close(done); <add> }); <add> <ide> it("should print profile information", () => { <ide> const compiler = createSimpleCompiler({ <ide> profile: true
2
Go
Go
create the cidfile before creating the container
25be79208a1473a65be883989ae49b7c71081a83
<ide><path>commands.go <ide> func (cli *DockerCli) CmdRun(args ...string) error { <ide> return nil <ide> } <ide> <add> var containerIDFile *os.File <add> if len(hostConfig.ContainerIDFile) > 0 { <add> if _, err := ioutil.ReadFile(hostConfig.ContainerIDFile); err == nil { <add> return fmt.Errorf("cid file found, make sure the other container isn't running or delete %s", hostConfig.ContainerIDFile) <add> } <add> containerIDFile, err = os.Create(hostConfig.ContainerIDFile) <add> if err != nil { <add> return fmt.Errorf("failed to create the container ID file: %s", err) <add> } <add> defer containerIDFile.Close() <add> } <add> <ide> //create the container <ide> body, statusCode, err := cli.call("POST", "/containers/create", config) <ide> //if image not found try to pull it <ide> func (cli *DockerCli) CmdRun(args ...string) error { <ide> fmt.Fprintf(cli.err, "WARNING: %s\n", warning) <ide> } <ide> if len(hostConfig.ContainerIDFile) > 0 { <del> if _, err := ioutil.ReadFile(hostConfig.ContainerIDFile); err == nil { <del> return fmt.Errorf("cid file found, make sure the other container isn't running or delete %s", hostConfig.ContainerIDFile) <del> } <del> file, err := os.Create(hostConfig.ContainerIDFile) <del> if err != nil { <del> return fmt.Errorf("failed to create the container ID file: %s", err) <del> } <del> <del> defer file.Close() <del> if _, err = file.WriteString(runResult.ID); err != nil { <add> if _, err = containerIDFile.WriteString(runResult.ID); err != nil { <ide> return fmt.Errorf("failed to write the container ID to the file: %s", err) <ide> } <ide> }
1
Python
Python
add coding to fix cyrillic output
c4d5ea27eb985024833e15898015ad26339c26d9
<ide><path>airflow/utils/log/file_task_handler.py <ide> def _read(self, ti, try_number, metadata=None): # pylint: disable=unused-argume <ide> pass <ide> <ide> response = requests.get(url, timeout=timeout) <add> response.encoding = "utf-8" <ide> <ide> # Check if the resource was properly fetched <ide> response.raise_for_status()
1
Text
Text
add an explicit version if necessary
9928754c2234ee146f49e9a3dac3af017490e7dd
<ide><path>share/doc/homebrew/Formula-Cookbook.md <ide> Add aliases by creating symlinks in `Library/Aliases`. <ide> <ide> You can run `brew audit` to test formulae for adherence to Homebrew house style. This includes warnings for trailing whitespace, preferred URLs for certain source hosts, and a lot of other style issues. Fixing these warnings before committing will make the process a lot smoother for us. <ide> <add>Use `brew info` and check if the version guessed by Homebrew from the URL is <add>correct. Add an explicit `version` if not. <ide> <ide> ## Commit <ide>
1
PHP
PHP
apply fixes from styleci
acec13a2b3ee734b15410982e84355030c26468f
<ide><path>tests/Auth/AuthAccessGateTest.php <ide> public function test_authorize_with_policy_that_returns_denied_response_object_t <ide> public function test_policy_that_throws_authorization_exception_is_caught_in_inspect() <ide> { <ide> $gate = $this->getBasicGate(); <del> <add> <ide> $gate->policy(AccessGateTestDummy::class, AccessGateTestPolicyThrowingAuthorizationException::class); <ide> <ide> $response = $gate->inspect('create', new AccessGateTestDummy); <ide> public function create() <ide> throw new AuthorizationException('Not allowed.', 'some_code'); <ide> } <ide> } <del>
1
Python
Python
remove unused imports
c8a75ed65008a39d99626c5a488893a04736e37d
<ide><path>numpy/core/_internal.py <ide> import re <ide> import sys <ide> <del>from numpy.compat import basestring, unicode <add>from numpy.compat import unicode <ide> from .multiarray import dtype, array, ndarray <ide> try: <ide> import ctypes <ide> except ImportError: <ide> ctypes = None <del>from .numerictypes import object_ <ide> <ide> if (sys.byteorder == 'little'): <ide> _nbo = b'<' <ide><path>numpy/core/einsumfunc.py <ide> <ide> from numpy.compat import basestring <ide> from numpy.core.multiarray import c_einsum <del>from numpy.core.numeric import asarray, asanyarray, result_type, tensordot, dot <add>from numpy.core.numeric import asanyarray, tensordot <ide> <ide> __all__ = ['einsum', 'einsum_path'] <ide> <ide><path>numpy/lib/function_base.py <ide> import re <ide> import sys <ide> import warnings <del>import operator <ide> <ide> import numpy as np <ide> import numpy.core.numeric as _nx <del>from numpy.core import linspace, atleast_1d, atleast_2d, transpose <add>from numpy.core import atleast_1d, transpose <ide> from numpy.core.numeric import ( <ide> ones, zeros, arange, concatenate, array, asarray, asanyarray, empty, <ide> empty_like, ndarray, around, floor, ceil, take, dot, where, intp, <del> integer, isscalar, absolute, AxisError <add> integer, isscalar, absolute <ide> ) <ide> from numpy.core.umath import ( <del> pi, multiply, add, arctan2, frompyfunc, cos, less_equal, sqrt, sin, <del> mod, exp, log10, not_equal, subtract <add> pi, add, arctan2, frompyfunc, cos, less_equal, sqrt, sin, <add> mod, exp, not_equal, subtract <ide> ) <ide> from numpy.core.fromnumeric import ( <del> ravel, nonzero, sort, partition, mean, any, sum <add> ravel, nonzero, partition, mean, any, sum <ide> ) <del>from numpy.core.numerictypes import typecodes, number <add>from numpy.core.numerictypes import typecodes <ide> from numpy.core.function_base import add_newdoc <ide> from numpy.lib.twodim_base import diag <ide> from .utils import deprecate <ide> ) <ide> from numpy.core.umath import _add_newdoc_ufunc as add_newdoc_ufunc <ide> from numpy.compat import long <del>from numpy.compat.py3k import basestring <ide> <ide> if sys.version_info[0] < 3: <ide> # Force range to be a generator, for np.delete's usage.
3
Ruby
Ruby
add args argument to write_env_script
c244e992afbab4e74fa5134c21c345e281b3f5aa
<ide><path>Library/Homebrew/extend/pathname.rb <ide> def write_exec_script(*targets) <ide> end <ide> <ide> # Writes an exec script that sets environment variables <del> def write_env_script(target, env) <add> def write_env_script(target, args, env = nil) <add> unless env <add> env = args <add> args = nil <add> end <ide> env_export = +"" <ide> env.each { |key, value| env_export << "#{key}=\"#{value}\" " } <ide> dirname.mkpath <ide> write <<~SH <ide> #!/bin/bash <del> #{env_export}exec "#{target}" "$@" <add> #{env_export}exec "#{target}" #{args} "$@" <ide> SH <ide> end <ide>
1
Python
Python
fix pickling failure when spawning processes
520aeedec82d91e15d5c43da4c85c948c5eb2ac3
<ide><path>airflow/configuration.py <ide> <ide> import copy <ide> import logging <add>import multiprocessing <ide> import os <ide> import pathlib <ide> import re <ide> def __init__(self, default_config=None, *args, **kwargs): <ide> self.is_validated = False <ide> <ide> def _validate(self): <del> if ( <del> self.get("core", "executor") not in ('DebugExecutor', 'SequentialExecutor') and <del> "sqlite" in self.get('core', 'sql_alchemy_conn')): <del> raise AirflowConfigException( <del> "error: cannot use sqlite with the {}".format( <del> self.get('core', 'executor'))) <add> <add> self._validate_config_dependencies() <ide> <ide> for section, replacement in self.deprecated_values.items(): <ide> for name, info in replacement.items(): <ide> def _validate(self): <ide> <ide> self.is_validated = True <ide> <add> def _validate_config_dependencies(self): <add> """ <add> Validate that config values aren't invalid given other config values <add> or system-level limitations and requirements. <add> """ <add> <add> if ( <add> self.get("core", "executor") not in ('DebugExecutor', 'SequentialExecutor') and <add> "sqlite" in self.get('core', 'sql_alchemy_conn')): <add> raise AirflowConfigException( <add> "error: cannot use sqlite with the {}".format( <add> self.get('core', 'executor'))) <add> <add> if self.has_option('core', 'mp_start_method'): <add> mp_start_method = self.get('core', 'mp_start_method') <add> start_method_options = multiprocessing.get_all_start_methods() <add> <add> if mp_start_method not in start_method_options: <add> raise AirflowConfigException( <add> "mp_start_method should not be " + mp_start_method + <add> ". Possible values are " + ", ".join(start_method_options)) <add> <ide> def _using_old_value(self, old, current_value): <ide> return old.search(current_value) is not None <ide> <ide><path>airflow/jobs/scheduler_job.py <ide> ) <ide> from airflow.utils.email import get_email_address_list, send_email <ide> from airflow.utils.log.logging_mixin import LoggingMixin, StreamLogWriter, set_context <add>from airflow.utils.mixins import MultiprocessingStartMethodMixin <ide> from airflow.utils.session import provide_session <ide> from airflow.utils.state import State <ide> from airflow.utils.types import DagRunType <ide> <ide> <del>class DagFileProcessorProcess(AbstractDagFileProcessorProcess, LoggingMixin): <add>class DagFileProcessorProcess(AbstractDagFileProcessorProcess, LoggingMixin, MultiprocessingStartMethodMixin): <ide> """Runs DAG processing in a separate process using DagFileProcessor <ide> <ide> :param file_path: a Python file containing Airflow DAG definitions <ide> def start(self): <ide> """ <ide> Launch the process and start processing the DAG. <ide> """ <del> self._parent_channel, _child_channel = multiprocessing.Pipe() <del> self._process = multiprocessing.Process( <add> start_method = self._get_multiprocessing_start_method() <add> context = multiprocessing.get_context(start_method) <add> <add> self._parent_channel, _child_channel = context.Pipe() <add> self._process = context.Process( <ide> target=type(self)._run_file_processor, <ide> args=( <ide> _child_channel, <ide> def _execute(self): <ide> <ide> self.log.info("Processing each file at most %s times", self.num_runs) <ide> <del> def processor_factory(file_path, failure_callback_requests): <del> return DagFileProcessorProcess( <del> file_path=file_path, <del> pickle_dags=pickle_dags, <del> dag_id_white_list=self.dag_ids, <del> failure_callback_requests=failure_callback_requests <del> ) <del> <ide> # When using sqlite, we do not use async_mode <ide> # so the scheduler job and DAG parser don't access the DB at the same time. <ide> async_mode = not self.using_sqlite <ide> def processor_factory(file_path, failure_callback_requests): <ide> processor_timeout = timedelta(seconds=processor_timeout_seconds) <ide> self.processor_agent = DagFileProcessorAgent(self.subdir, <ide> self.num_runs, <del> processor_factory, <add> type(self)._create_dag_file_processor, <ide> processor_timeout, <add> self.dag_ids, <add> pickle_dags, <ide> async_mode) <ide> <ide> try: <ide> def processor_factory(file_path, failure_callback_requests): <ide> self.processor_agent.end() <ide> self.log.info("Exited execute loop") <ide> <add> @staticmethod <add> def _create_dag_file_processor(file_path, failure_callback_requests, dag_ids, pickle_dags): <add> """ <add> Creates DagFileProcessorProcess instance. <add> """ <add> return DagFileProcessorProcess( <add> file_path=file_path, <add> pickle_dags=pickle_dags, <add> dag_id_white_list=dag_ids, <add> failure_callback_requests=failure_callback_requests <add> ) <add> <ide> def _run_scheduler_loop(self): <ide> """ <ide> The actual scheduler loop. The main steps in the loop are: <ide><path>airflow/utils/dag_processing.py <ide> from airflow.configuration import conf <ide> from airflow.dag.base_dag import BaseDag, BaseDagBag <ide> from airflow.exceptions import AirflowException <del>from airflow.jobs.local_task_job import LocalTaskJob as LJ <ide> from airflow.models import errors <ide> from airflow.models.taskinstance import SimpleTaskInstance, TaskInstance <ide> from airflow.settings import STORE_SERIALIZED_DAGS <ide> from airflow.stats import Stats <ide> from airflow.utils import timezone <ide> from airflow.utils.file import list_py_file_paths <ide> from airflow.utils.log.logging_mixin import LoggingMixin <add>from airflow.utils.mixins import MultiprocessingStartMethodMixin <ide> from airflow.utils.process_utils import kill_child_processes_by_pids, reap_process_group <ide> from airflow.utils.session import provide_session <ide> from airflow.utils.state import State <ide> class FailureCallbackRequest(NamedTuple): <ide> msg: str <ide> <ide> <del>class DagFileProcessorAgent(LoggingMixin): <add>class DagFileProcessorAgent(LoggingMixin, MultiprocessingStartMethodMixin): <ide> """ <ide> Agent for DAG file processing. It is responsible for all DAG parsing <ide> related jobs in scheduler process. Mainly it can spin up DagFileProcessorManager <ide> class DagFileProcessorAgent(LoggingMixin): <ide> :type processor_factory: (str, str, list) -> (AbstractDagFileProcessorProcess) <ide> :param processor_timeout: How long to wait before timing out a DAG file processor <ide> :type processor_timeout: timedelta <add> :param dag_ids: if specified, only schedule tasks with these DAG IDs <add> :type dag_ids: list[str] <add> :param pickle_dags: whether to pickle DAGs. <add> :type: pickle_dags: bool <ide> :param async_mode: Whether to start agent in async mode <ide> :type async_mode: bool <ide> """ <ide> def __init__(self, <ide> max_runs, <ide> processor_factory, <ide> processor_timeout, <add> dag_ids, <add> pickle_dags, <ide> async_mode): <ide> super().__init__() <ide> self._file_path_queue = [] <ide> self._dag_directory = dag_directory <ide> self._max_runs = max_runs <ide> self._processor_factory = processor_factory <ide> self._processor_timeout = processor_timeout <add> self._dag_ids = dag_ids <add> self._pickle_dags = pickle_dags <ide> self._async_mode = async_mode <ide> # Map from file path to the processor <ide> self._processors = {} <ide> def start(self): <ide> """ <ide> Launch DagFileProcessorManager processor and start DAG parsing loop in manager. <ide> """ <del> self._parent_signal_conn, child_signal_conn = multiprocessing.Pipe() <del> self._process = multiprocessing.Process( <add> mp_start_method = self._get_multiprocessing_start_method() <add> context = multiprocessing.get_context(mp_start_method) <add> <add> self._parent_signal_conn, child_signal_conn = context.Pipe() <add> self._process = context.Process( <ide> target=type(self)._run_processor_manager, <ide> args=( <ide> self._dag_directory, <ide> self._max_runs, <ide> self._processor_factory, <ide> self._processor_timeout, <ide> child_signal_conn, <del> self._async_mode, <add> self._dag_ids, <add> self._pickle_dags, <add> self._async_mode <ide> ) <ide> ) <ide> self._process.start() <ide> def _run_processor_manager(dag_directory, <ide> processor_factory, <ide> processor_timeout, <ide> signal_conn, <add> dag_ids, <add> pickle_dags, <ide> async_mode): <ide> <ide> # Make this process start as a new process group - that makes it easy <ide> def _run_processor_manager(dag_directory, <ide> processor_factory, <ide> processor_timeout, <ide> signal_conn, <add> dag_ids, <add> pickle_dags, <ide> async_mode) <ide> <ide> processor_manager.start() <ide> class DagFileProcessorManager(LoggingMixin): # pylint: disable=too-many-instanc <ide> :type processor_timeout: timedelta <ide> :param signal_conn: connection to communicate signal with processor agent. <ide> :type signal_conn: MultiprocessingConnection <add> :param dag_ids: if specified, only schedule tasks with these DAG IDs <add> :type dag_ids: list[str] <add> :param pickle_dags: whether to pickle DAGs. <add> :type pickle_dags: bool <ide> :param async_mode: whether to start the manager in async mode <ide> :type async_mode: bool <ide> """ <ide> def __init__(self, <ide> ], <ide> processor_timeout: timedelta, <ide> signal_conn: MultiprocessingConnection, <add> dag_ids: List[str], <add> pickle_dags: bool, <ide> async_mode: bool = True): <ide> super().__init__() <ide> self._file_paths: List[str] = [] <ide> def __init__(self, <ide> self._max_runs = max_runs <ide> self._processor_factory = processor_factory <ide> self._signal_conn = signal_conn <add> self._pickle_dags = pickle_dags <add> self._dag_ids = dag_ids <ide> self._async_mode = async_mode <ide> self._parsing_start_time: Optional[datetime] = None <ide> <ide> def start_new_processes(self): <ide> while self._parallelism - len(self._processors) > 0 and self._file_path_queue: <ide> file_path = self._file_path_queue.pop(0) <ide> callback_to_execute_for_file = self._callback_to_execute[file_path] <del> processor = self._processor_factory(file_path, callback_to_execute_for_file) <add> processor = self._processor_factory( <add> file_path, <add> callback_to_execute_for_file, <add> self._dag_ids, <add> self._pickle_dags) <add> <ide> del self._callback_to_execute[file_path] <ide> Stats.incr('dag_processing.processes') <ide> <ide> def _find_zombies(self, session): <ide> if not self._last_zombie_query_time or \ <ide> (now - self._last_zombie_query_time).total_seconds() > self._zombie_query_interval: <ide> # to avoid circular imports <add> from airflow.jobs.local_task_job import LocalTaskJob as LJ <ide> self.log.info("Finding 'running' jobs without a recent heartbeat") <ide> TI = airflow.models.TaskInstance <ide> DM = airflow.models.DagModel <ide><path>airflow/utils/mixins.py <add># <add># Licensed to the Apache Software Foundation (ASF) under one <add># or more contributor license agreements. See the NOTICE file <add># distributed with this work for additional information <add># regarding copyright ownership. The ASF licenses this file <add># to you under the Apache License, Version 2.0 (the <add># "License"); you may not use this file except in compliance <add># with the License. You may obtain a copy of the License at <add># <add># http://www.apache.org/licenses/LICENSE-2.0 <add># <add># Unless required by applicable law or agreed to in writing, <add># software distributed under the License is distributed on an <add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY <add># KIND, either express or implied. See the License for the <add># specific language governing permissions and limitations <add># under the License. <add> <add>import multiprocessing <add> <add>from airflow.configuration import conf <add> <add> <add>class MultiprocessingStartMethodMixin: <add> """ <add> Convenience class to add support for different types of multiprocessing. <add> """ <add> def _get_multiprocessing_start_method(self): <add> """ <add> Determine method of creating new processes by checking if the <add> mp_start_method is set in configs, else, it uses the OS default. <add> """ <add> if conf.has_option('core', 'mp_start_method'): <add> return conf.get('core', 'mp_start_method') <add> <add> return multiprocessing.get_start_method() <ide><path>tests/jobs/test_scheduler_job.py <ide> def test_scheduler_multiprocessing(self): <ide> self.assertEqual( <ide> len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()), 0) <ide> <add> @conf_vars({("core", "mp_start_method"): "spawn"}) <add> def test_scheduler_multiprocessing_with_spawn_method(self): <add> """ <add> Test that the scheduler can successfully queue multiple dags in parallel <add> when using "spawn" mode of multiprocessing. (Fork is default on Linux and older OSX) <add> """ <add> dag_ids = ['test_start_date_scheduling', 'test_dagrun_states_success'] <add> for dag_id in dag_ids: <add> dag = self.dagbag.get_dag(dag_id) <add> dag.clear() <add> <add> scheduler = SchedulerJob(dag_ids=dag_ids, <add> executor=self.null_exec, <add> subdir=os.path.join( <add> TEST_DAG_FOLDER, 'test_scheduler_dags.py'), <add> num_runs=1) <add> <add> scheduler.run() <add> <add> # zero tasks ran <add> dag_id = 'test_start_date_scheduling' <add> with create_session() as session: <add> self.assertEqual( <add> session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).count(), 0) <add> <ide> def test_scheduler_verify_pool_full(self): <ide> """ <ide> Test task instances not queued when pool is full <ide><path>tests/test_utils/mock_executor.py <ide> def __init__(self, do_update=True, *args, **kwargs): <ide> self.history = [] <ide> # All the tasks, in a stable sort order <ide> self.sorted_tasks = [] <del> self.mock_task_results = defaultdict(lambda: State.SUCCESS) <add> <add> # If multiprocessing runs in spawn mode, <add> # arguments are to be pickled but lambda is not picclable. <add> # So we should pass self.success instead of lambda. <add> self.mock_task_results = defaultdict(self.success) <ide> <ide> super().__init__(*args, **kwargs) <ide> <add> def success(self): <add> return State.SUCCESS <add> <ide> def heartbeat(self): <ide> if not self.do_update: <ide> return <ide><path>tests/utils/test_dag_processing.py <ide> DEFAULT_DATE = timezone.datetime(2016, 1, 1) <ide> <ide> <add>class FakeDagFileProcessorRunner(DagFileProcessorProcess): <add> # This fake processor will return the zombies it received in constructor <add> # as its processing result w/o actually parsing anything. <add> def __init__(self, file_path, pickle_dags, dag_id_white_list, zombies): <add> super().__init__(file_path, pickle_dags, dag_id_white_list, zombies) <add> self._result = zombies, 0 <add> <add> def start(self): <add> pass <add> <add> @property <add> def start_time(self): <add> return DEFAULT_DATE <add> <add> @property <add> def pid(self): <add> return 1234 <add> <add> @property <add> def done(self): <add> return True <add> <add> @property <add> def result(self): <add> return self._result <add> <add> @staticmethod <add> def _fake_dag_processor_factory(file_path, zombies, dag_ids, pickle_dags): <add> return FakeDagFileProcessorRunner( <add> file_path, <add> pickle_dags, <add> dag_ids, <add> zombies <add> ) <add> <add> <ide> class TestDagFileProcessorManager(unittest.TestCase): <ide> def setUp(self): <ide> clear_db_runs() <ide> def test_set_file_paths_when_processor_file_path_not_in_new_file_paths(self): <ide> processor_factory=MagicMock().return_value, <ide> processor_timeout=timedelta.max, <ide> signal_conn=MagicMock(), <add> dag_ids=[], <add> pickle_dags=False, <ide> async_mode=True) <ide> <ide> mock_processor = MagicMock() <ide> def test_set_file_paths_when_processor_file_path_is_in_new_file_paths(self): <ide> processor_factory=MagicMock().return_value, <ide> processor_timeout=timedelta.max, <ide> signal_conn=MagicMock(), <add> dag_ids=[], <add> pickle_dags=False, <ide> async_mode=True) <ide> <ide> mock_processor = MagicMock() <ide> def test_find_zombies(self): <ide> processor_factory=MagicMock().return_value, <ide> processor_timeout=timedelta.max, <ide> signal_conn=MagicMock(), <add> dag_ids=[], <add> pickle_dags=False, <ide> async_mode=True) <ide> <ide> dagbag = DagBag(TEST_DAG_FOLDER) <ide> def test_handle_failure_callback_with_zobmies_are_correctly_passed_to_dag_file_p <ide> ) <ide> ] <ide> <del> class FakeDagFileProcessorRunner(DagFileProcessorProcess): <del> # This fake processor will return the zombies it received in constructor <del> # as its processing result w/o actually parsing anything. <del> def __init__(self, file_path, pickle_dags, dag_id_white_list, failure_callback_requests): <del> super().__init__(file_path, pickle_dags, dag_id_white_list, failure_callback_requests) <del> self._result = failure_callback_requests, 0 <del> <del> def start(self): <del> pass <del> <del> @property <del> def start_time(self): <del> return DEFAULT_DATE <del> <del> @property <del> def pid(self): <del> return 1234 <del> <del> @property <del> def done(self): <del> return True <del> <del> @property <del> def result(self): <del> return self._result <del> <del> def processor_factory(file_path, failure_callback_requests): <del> return FakeDagFileProcessorRunner( <del> file_path, <del> False, <del> [], <del> failure_callback_requests <del> ) <add> test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_example_bash_operator.py') <ide> <ide> async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') <ide> processor_agent = DagFileProcessorAgent(test_dag_path, <ide> 1, <del> processor_factory, <add> FakeDagFileProcessorRunner._fake_dag_processor_factory, <ide> timedelta.max, <add> [], <add> False, <ide> async_mode) <ide> processor_agent.start() <ide> parsing_result = [] <ide> def test_kill_timed_out_processors_kill(self, mock_kill, mock_pid): <ide> processor_factory=MagicMock().return_value, <ide> processor_timeout=timedelta(seconds=5), <ide> signal_conn=MagicMock(), <add> dag_ids=[], <add> pickle_dags=False, <ide> async_mode=True) <ide> <ide> processor = DagFileProcessorProcess('abc.txt', False, [], []) <ide> def test_kill_timed_out_processors_no_kill(self, mock_dag_file_processor, mock_p <ide> processor_factory=MagicMock().return_value, <ide> processor_timeout=timedelta(seconds=5), <ide> signal_conn=MagicMock(), <add> dag_ids=[], <add> pickle_dags=False, <ide> async_mode=True) <ide> <ide> processor = DagFileProcessorProcess('abc.txt', False, [], []) <ide> def test_cleanup_stale_dags_no_serialization(self, sdm_mock, dag_mock): <ide> max_runs=1, <ide> processor_factory=MagicMock().return_value, <ide> processor_timeout=timedelta(seconds=50), <add> dag_ids=[], <add> pickle_dags=False, <ide> signal_conn=MagicMock(), <ide> async_mode=True) <ide> <ide> def setUp(self): <ide> def tearDown(self): <ide> # Remove any new modules imported during the test run. This lets us <ide> # import the same source files for more than one test. <add> remove_list = [] <ide> for mod in sys.modules: <ide> if mod not in self.old_modules: <del> del sys.modules[mod] <add> remove_list.append(mod) <add> <add> for mod in remove_list: <add> del sys.modules[mod] <add> <add> @staticmethod <add> def _processor_factory(file_path, zombies, dag_ids, pickle_dags): <add> return DagFileProcessorProcess(file_path, <add> pickle_dags, <add> dag_ids, <add> zombies) <ide> <ide> def test_reload_module(self): <ide> """ <ide> class path, thus when reloading logging module the airflow.processor_manager <ide> with settings_context(SETTINGS_FILE_VALID): <ide> # Launch a process through DagFileProcessorAgent, which will try <ide> # reload the logging module. <del> def processor_factory(file_path, zombies): <del> return DagFileProcessorProcess(file_path, <del> False, <del> [], <del> zombies) <del> <ide> test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') <ide> async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') <ide> <ide> log_file_loc = conf.get('logging', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION') <add> <ide> try: <ide> os.remove(log_file_loc) <ide> except OSError: <ide> def processor_factory(file_path, zombies): <ide> # Starting dag processing with 0 max_runs to avoid redundant operations. <ide> processor_agent = DagFileProcessorAgent(test_dag_path, <ide> 0, <del> processor_factory, <add> type(self)._processor_factory, <ide> timedelta.max, <add> [], <add> False, <ide> async_mode) <ide> processor_agent.start() <ide> if not async_mode: <ide> processor_agent.run_single_parsing_loop() <ide> <ide> processor_agent._process.join() <del> <ide> # Since we are reloading logging config not creating this file, <ide> # we should expect it to be nonexistent. <add> <ide> self.assertFalse(os.path.isfile(log_file_loc)) <ide> <ide> def test_parse_once(self): <del> def processor_factory(file_path, zombies): <del> return DagFileProcessorProcess(file_path, <del> False, <del> [], <del> zombies) <del> <ide> test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') <ide> async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') <ide> processor_agent = DagFileProcessorAgent(test_dag_path, <ide> 1, <del> processor_factory, <add> type(self)._processor_factory, <ide> timedelta.max, <add> [], <add> False, <ide> async_mode) <ide> processor_agent.start() <ide> parsing_result = [] <ide> def processor_factory(file_path, zombies): <ide> self.assertEqual(dag_ids.count('test_start_date_scheduling'), 1) <ide> <ide> def test_launch_process(self): <del> def processor_factory(file_path, zombies): <del> return DagFileProcessorProcess(file_path, <del> False, <del> [], <del> zombies) <del> <ide> test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') <ide> async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') <ide> <ide> def processor_factory(file_path, zombies): <ide> # Starting dag processing with 0 max_runs to avoid redundant operations. <ide> processor_agent = DagFileProcessorAgent(test_dag_path, <ide> 0, <del> processor_factory, <add> type(self)._processor_factory, <ide> timedelta.max, <add> [], <add> False, <ide> async_mode) <ide> processor_agent.start() <ide> if not async_mode:
7
PHP
PHP
make getconnectionname overridable
741f29d4693156192d7dee6f30670e989bdf8a9d
<ide><path>src/Illuminate/Database/Eloquent/Model.php <ide> public function newFromBuilder($attributes = [], $connection = null) <ide> <ide> $model->setRawAttributes((array) $attributes, true); <ide> <del> $model->setConnection($connection ?: $this->connection); <add> $model->setConnection($connection ?: $this->getConnectionName()); <ide> <ide> return $model; <ide> }
1
Text
Text
add cellular_automata directory
b560b76002006e934533d47b2ea69c8360106d1d
<ide><path>cellular_automata/README.md <add># Cellular Automata <add> <add>* https://en.wikipedia.org/wiki/Cellular_automaton <add>* https://mathworld.wolfram.com/ElementaryCellularAutomaton.html
1
PHP
PHP
apply fixes from styleci
561d38887fd0848d1672d554dd1cd30f5044bb84
<ide><path>src/Illuminate/Console/Scheduling/ManagesFrequencies.php <ide> public function twiceMonthly($first = 1, $second = 16, $time = '0:0') <ide> $days = $first.','.$second; <ide> <ide> $this->dailyAt($time); <del> <add> <ide> return $this->spliceIntoPosition(1, 0) <ide> ->spliceIntoPosition(2, 0) <ide> ->spliceIntoPosition(3, $days);
1
Text
Text
add a note about security
5f6f3c7fc3ea96a362101c3d7fa08f31df0358b3
<ide><path>README.md <ide> tests for cakephp by doing the following: <ide> <ide> See [CONTRIBUTING.md](CONTRIBUTING.md) for more information. <ide> <del> <ide> ## Some Handy Links <ide> <ide> [CakePHP](http://www.cakephp.org) - The rapid development PHP framework. <ide> See [CONTRIBUTING.md](CONTRIBUTING.md) for more information. <ide> <ide> [Cake Software Foundation](http://cakefoundation.org) - Promoting development related to CakePHP. <ide> <del> <ide> ## Get Support! <ide> <ide> [#cakephp](http://webchat.freenode.net/?channels=#cakephp) on irc.freenode.net - Come chat with us, we have cake. <ide> See [CONTRIBUTING.md](CONTRIBUTING.md) for more information. <ide> [CONTRIBUTING.md](CONTRIBUTING.md) - Quick pointers for contributing to the CakePHP project. <ide> <ide> [CookBook "Contributing" Section](http://book.cakephp.org/3.0/en/contributing.html) - Details about contributing to the project. <add> <add># Security <add> <add>If you’ve found a security issue in CakePHP, please use the following procedure instead of the normal bug reporting system. Instead of using the bug tracker, mailing list or IRC please send an email to security [at] cakephp.org. Emails sent to this address go to the CakePHP core team on a private mailing list. <add> <add>For each report, we try to first confirm the vulnerability. Once confirmed, the CakePHP team will take the following actions: <add> <add>- Acknowledge to the reporter that we’ve received the issue, and are working on a fix. We ask that the reporter keep the issue confidential until we announce it. <add>- Get a fix/patch prepared. <add>- Prepare a post describing the vulnerability, and the possible exploits. <add>- Release new versions of all affected versions. <add>- Prominently feature the problem in the release announcement.
1
Python
Python
fix a path so that test can run on windows
c4e9615691a19128f446563718355aedf03cf01b
<ide><path>pytorch_transformers/tests/modeling_common_test.py <ide> import shutil <ide> import json <ide> import random <add>import uuid <ide> <ide> import unittest <ide> import logging <ide> def create_and_test_config_to_json_string(self): <ide> <ide> def create_and_test_config_to_json_file(self): <ide> config_first = self.config_class(**self.inputs_dict) <del> json_file_path = "/tmp/config.json" <add> json_file_path = os.path.join(os.getcwd(), "config_" + str(uuid.uuid4()) + ".json") <ide> config_first.to_json_file(json_file_path) <ide> config_second = self.config_class.from_json_file(json_file_path) <ide> os.remove(json_file_path)
1
Text
Text
fix created_at [ci skip]
70bb0cc2ec9ab13f362a2cc9d414c8622c74e796
<ide><path>guides/source/security.md <ide> class Session < ApplicationRecord <ide> end <ide> ``` <ide> <del>The section about session fixation introduced the problem of maintained sessions. An attacker maintaining a session every five minutes can keep the session alive forever, although you are expiring sessions. A simple solution for this would be to add a created_at column to the sessions table. Now you can delete sessions that were created a long time ago. Use this line in the sweep method above: <add>The section about session fixation introduced the problem of maintained sessions. An attacker maintaining a session every five minutes can keep the session alive forever, although you are expiring sessions. A simple solution for this would be to add a `created_at` column to the sessions table. Now you can delete sessions that were created a long time ago. Use this line in the sweep method above: <ide> <ide> ```ruby <ide> delete_all "updated_at < '#{time.ago.to_s(:db)}' OR
1
PHP
PHP
apply fixes from styleci
6232c084adf5ccc0629bd56042a89fe588b8dc93
<ide><path>tests/Database/DatabaseQueryBuilderTest.php <ide> public function testMySqlWrappingJsonWithBooleanAndIntegerThatLooksLikeOne() <ide> <ide> public function testJsonPathEscaping() <ide> { <del> $expectedWithJsonEscaped = <<<SQL <add> $expectedWithJsonEscaped = <<<'SQL' <ide> select json_unquote(json_extract(`json`, '$."''))#"')) <ide> SQL; <ide>
1
Ruby
Ruby
fix external command test and code style (#281)
78f8c60343b514ee7129cf3c86f216460a4ed3ab
<ide><path>Library/Homebrew/test/test_commands.rb <ide> def setup <ide> end <ide> <ide> def teardown <del> @cmds.each { |f| f.unlink } <add> @cmds.each(&:unlink) <ide> end <ide> <ide> def test_internal_commands <ide> def test_external_commands <ide> %w[brew-t1 brew-t2.rb brew-t3.py].each do |file| <ide> path = "#{dir}/#{file}" <ide> FileUtils.touch path <del> FileUtils.chmod 0744, path <add> FileUtils.chmod 0755, path <ide> end <ide> <del> FileUtils.touch "#{dir}/t4" <add> FileUtils.touch "#{dir}/brew-t4" <ide> <del> ENV["PATH"] = "#{ENV["PATH"]}#{File::PATH_SEPARATOR}#{dir}" <add> ENV["PATH"] += "#{File::PATH_SEPARATOR}#{dir}" <ide> cmds = Homebrew.external_commands <ide> <ide> assert cmds.include?("t1"), "Executable files should be included"
1
Javascript
Javascript
fix trailing whitespace
f1663088c3d75801a37a8b8dc031779b15dc9ba9
<ide><path>src/ng/location.js <ide> var locationPrototype = { <ide> * This method is getter only. <ide> * <ide> * Return host of current url. <del> * <add> * <ide> * Note: compared to the non-angular version `location.host` which returns `hostname:port`, this returns the `hostname` portion only. <ide> * <ide> * <ide> * ```js <ide> * // given url http://example.com/#/some/path?foo=bar&baz=xoxo <ide> * var host = $location.host(); <ide> * // => "example.com" <del> * <add> * <ide> * // given url http://user:[email protected]:8080/#/some/path?foo=bar&baz=xoxo <ide> * host = $location.host(); <ide> * // => "example.com"
1
Javascript
Javascript
add prod urls to verify emails
942962fa0f95b2873c660b37e4a52d30964cc5ac
<ide><path>common/models/user.js <ide> import { blacklistedUsernames } from '../../server/utils/constants'; <ide> <ide> const debug = debugFactory('fcc:user:remote'); <ide> const BROWNIEPOINTS_TIMEOUT = [1, 'hour']; <add>const isDev = process.env.NODE_ENV !== 'production'; <ide> <ide> function getAboutProfile({ <ide> username, <ide> module.exports = function(User) { <ide> to: email, <ide> from: '[email protected]', <ide> subject: 'Welcome to Free Code Camp!', <add> protocol: isDev ? null : 'https', <add> host: isDev ? 'localhost' : 'freecodecamp.com', <add> port: isDev ? null : 443, <ide> template: path.join( <ide> __dirname, <ide> '..', <ide><path>server/boot/a-extendUser.js <ide> import { isEmail } from 'validator'; <ide> import path from 'path'; <ide> <ide> const debug = debugFactory('fcc:user:remote'); <add>const isDev = process.env.NODE_ENV !== 'production'; <ide> <ide> function destroyAllRelated(id, Model) { <ide> return Observable.fromNodeCallback( <ide> module.exports = function(app) { <ide> to: user.email, <ide> from: '[email protected]', <ide> subject: 'Welcome to Free Code Camp!', <add> protocol: isDev ? null : 'https', <add> host: isDev ? 'localhost' : 'freecodecamp.com', <add> port: isDev ? null : 443, <ide> template: path.join( <ide> __dirname, <ide> '..',
2
Go
Go
fix preferred ip allocation in ipam
b2ff78548a9b8b81c481df8c904c1ab3db8b3ad3
<ide><path>libnetwork/ipam/allocator.go <ide> const ( <ide> minNetSizeV6Eff = 96 <ide> // The size of the host subnet used internally, it's the most granular sequence addresses <ide> defaultInternalHostSize = 16 <del> // datastore keyes for ipam obkects <add> // datastore keyes for ipam objects <ide> dsConfigKey = "ipam-config" // ipam-config/<domain>/<map of subent configs> <ide> dsDataKey = "ipam-data" // ipam-data/<domain>/<subnet>/<child-sudbnet>/<bitmask> <ide> ) <ide> func NewAllocator(ds datastore.DataStore) (*Allocator, error) { <ide> if err != nil { <ide> return fmt.Errorf("failed to load address bitmask for configured subnet %s because of %s", v.Subnet.String(), err.Error()) <ide> } <del> a.insertAddressMasks(k, subnetList) <del> return nil <add> return a.insertAddressMasks(k, subnetList) <ide> }) <ide> } <ide> a.Unlock() <ide> func (a *Allocator) request(addrSpace AddressSpace, req *AddressRequest, version <ide> <ide> // Release allows releasing the address from the specified address space <ide> func (a *Allocator) Release(addrSpace AddressSpace, address net.IP) { <add> var ( <add> space *bitseq.Handle <add> sub *net.IPNet <add> ) <add> <ide> if address == nil { <add> log.Debugf("Requested to remove nil address from address space %s", addrSpace) <ide> return <ide> } <add> <ide> ver := getAddressVersion(address) <ide> if ver == v4 { <ide> address = address.To4() <ide> } <add> <add> // Find the subnet containing the address <ide> for _, subKey := range a.getSubnetList(addrSpace, ver) { <del> a.Lock() <del> space := a.addresses[subKey] <del> a.Unlock() <del> sub := subKey.canonicalChildSubnet() <add> sub = subKey.canonicalChildSubnet() <ide> if sub.Contains(address) { <del> // Retrieve correspondent ordinal in the subnet <del> ordinal := ipToUint32(getHostPortionIP(address, sub)) <del> // Release it <del> if err := space.Unset(ordinal); err != nil { <del> log.Warnf("Failed to release address %s because of internal error: %s", address.String(), err.Error()) <del> } <del> return <add> a.Lock() <add> space = a.addresses[subKey] <add> a.Unlock() <add> break <ide> } <ide> } <add> if space == nil { <add> log.Debugf("Could not find subnet on address space %s containing %s on release", addrSpace, address.String()) <add> return <add> } <add> <add> // Retrieve correspondent ordinal in the subnet <add> hostPart, err := types.GetHostPartIP(address, sub.Mask) <add> if err != nil { <add> log.Warnf("Failed to release address %s on address space %s because of internal error: %v", address.String(), addrSpace, err) <add> return <add> } <add> ordinal := ipToUint32(hostPart) <add> <add> // Release it <add> if err := space.Unset(ordinal); err != nil { <add> log.Warnf("Failed to release address %s on address space %s because of internal error: %v", address.String(), addrSpace, err) <add> } <ide> } <ide> <ide> func (a *Allocator) reserveAddress(addrSpace AddressSpace, subnet *net.IPNet, prefAddress net.IP, ver ipVersion) (net.IP, *net.IPNet, error) { <ide> func (a *Allocator) reserveAddress(addrSpace AddressSpace, subnet *net.IPNet, pr <ide> bitmask, ok := a.addresses[key] <ide> a.Unlock() <ide> if !ok { <del> fmt.Printf("\nDid not find a bitmask for subnet key: %s", key.String()) <add> log.Warnf("Did not find a bitmask for subnet key: %s", key.String()) <ide> continue <ide> } <ide> address, err := a.getAddress(key.canonicalChildSubnet(), bitmask, prefAddress, ver) <ide> func (a *Allocator) getAddress(subnet *net.IPNet, bitmask *bitseq.Handle, prefAd <ide> if prefAddress == nil { <ide> ordinal, err = bitmask.SetAny() <ide> } else { <del> err = bitmask.Set(ipToUint32(getHostPortionIP(prefAddress, subnet))) <add> hostPart, e := types.GetHostPartIP(prefAddress, subnet.Mask) <add> if e != nil { <add> return nil, fmt.Errorf("failed to allocate preferred address %s: %v", prefAddress.String(), e) <add> } <add> ordinal = ipToUint32(types.GetMinimalIP(hostPart)) <add> err = bitmask.Set(ordinal) <ide> } <ide> if err != nil { <ide> return nil, ErrNoAvailableIPs <ide> func generateAddress(ordinal uint32, network *net.IPNet) net.IP { <ide> var address [16]byte <ide> <ide> // Get network portion of IP <del> if network.IP.To4() != nil { <add> if getAddressVersion(network.IP) == v4 { <ide> copy(address[:], network.IP.To4()) <ide> } else { <ide> copy(address[:], network.IP) <ide> func ipToUint32(ip []byte) uint32 { <ide> } <ide> return value <ide> } <del> <del>// Given an address and subnet, returns the host portion address <del>func getHostPortionIP(address net.IP, subnet *net.IPNet) net.IP { <del> hostPortion := make([]byte, len(address)) <del> for i := 0; i < len(subnet.Mask); i++ { <del> hostPortion[i] = address[i] &^ subnet.Mask[i] <del> } <del> return hostPortion <del>} <ide><path>libnetwork/ipam/allocator_test.go <ide> package ipam <ide> import ( <ide> "fmt" <ide> "net" <add> "os" <ide> "testing" <ide> "time" <ide> <ide> "github.com/docker/libnetwork/bitseq" <add> "github.com/docker/libnetwork/config" <add> "github.com/docker/libnetwork/datastore" <ide> _ "github.com/docker/libnetwork/netutils" <ide> ) <ide> <add>var ds datastore.DataStore <add> <add>// enable w/ upper case <add>func testMain(m *testing.M) { <add> var err error <add> ds, err = datastore.NewDataStore(&config.DatastoreCfg{Embedded: false, Client: config.DatastoreClientCfg{Provider: "consul", Address: "127.0.0.1:8500"}}) <add> if err != nil { <add> fmt.Println(err) <add> } <add> <add> os.Exit(m.Run()) <add>} <add> <ide> func getAllocator(t *testing.T, subnet *net.IPNet) *Allocator { <del> a, err := NewAllocator(nil) <add> a, err := NewAllocator(ds) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestGetInternalSubnets(t *testing.T) { <ide> for _, d := range input { <ide> assertInternalSubnet(t, d.internalHostSize, d.parentSubnet, d.firstIntSubnet, d.lastIntSubnet) <ide> } <add>} <add> <add>func TestGetSameAddress(t *testing.T) { <add> a, err := NewAllocator(nil) <add> if err != nil { <add> t.Fatal(err) <add> } <ide> <add> addSpace := AddressSpace("giallo") <add> _, subnet, _ := net.ParseCIDR("192.168.100.0/24") <add> if err := a.AddSubnet(addSpace, &SubnetInfo{Subnet: subnet}); err != nil { <add> t.Fatal(err) <add> } <add> <add> ip := net.ParseIP("192.168.100.250") <add> req := &AddressRequest{Subnet: *subnet, Address: ip} <add> <add> _, err = a.Request(addSpace, req) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> _, err = a.Request(addSpace, req) <add> if err == nil { <add> t.Fatal(err) <add> } <ide> } <ide> <ide> func TestGetAddress(t *testing.T) { <ide><path>libnetwork/types/types.go <ide> func CompareIPNet(a, b *net.IPNet) bool { <ide> return a.IP.Equal(b.IP) && bytes.Equal(a.Mask, b.Mask) <ide> } <ide> <add>// GetMinimalIP returns the address in its shortest form <add>func GetMinimalIP(ip net.IP) net.IP { <add> if ip != nil && ip.To4() != nil { <add> return ip.To4() <add> } <add> return ip <add>} <add> <add>// GetMinimalIPNet returns a copy of the passed IP Network with congruent ip and mask notation <add>func GetMinimalIPNet(nw *net.IPNet) *net.IPNet { <add> if nw == nil { <add> return nil <add> } <add> if len(nw.IP) == 16 && nw.IP.To4() != nil { <add> m := nw.Mask <add> if len(m) == 16 { <add> m = m[12:16] <add> } <add> return &net.IPNet{IP: nw.IP.To4(), Mask: m} <add> } <add> return nw <add>} <add> <add>var v4inV6MaskPrefix = []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff} <add> <add>// GetHostPartIP returns the host portion of the ip address identified by the mask. <add>// IP address representation is not modified. If address and mask are not compatible <add>// an error is returned. <add>func GetHostPartIP(ip net.IP, mask net.IPMask) (net.IP, error) { <add> // Find the effective starting of address and mask <add> is := 0 <add> ms := 0 <add> if len(ip) == net.IPv6len && ip.To4() != nil { <add> is = 12 <add> } <add> if len(ip[is:]) == net.IPv4len && len(mask) == net.IPv6len && bytes.Equal(mask[:12], v4inV6MaskPrefix) { <add> ms = 12 <add> } <add> <add> // Check if address and mask are semantically compatible <add> if len(ip[is:]) != len(mask[ms:]) { <add> return nil, fmt.Errorf("cannot compute host portion ip address as ip and mask are not compatible: (%#v, %#v)", ip, mask) <add> } <add> <add> // Compute host portion <add> out := GetIPCopy(ip) <add> for i := 0; i < len(mask[ms:]); i++ { <add> out[is+i] &= ^mask[ms+i] <add> } <add> <add> return out, nil <add>} <add> <ide> const ( <ide> // NEXTHOP indicates a StaticRoute with an IP next hop. <ide> NEXTHOP = iota <ide><path>libnetwork/types/types_test.go <ide> package types <ide> <ide> import ( <ide> "flag" <add> "net" <ide> "testing" <ide> ) <ide> <ide> func TestErrorConstructors(t *testing.T) { <ide> t.Fatal(err) <ide> } <ide> } <add> <add>func TestUtilGetHostPortionIP(t *testing.T) { <add> input := []struct { <add> ip net.IP <add> mask net.IPMask <add> host net.IP <add> err error <add> }{ <add> { // ip in v4Inv6 representation, mask in v4 representation <add> ip: net.IPv4(172, 28, 30, 1), <add> mask: []byte{0xff, 0xff, 0xff, 0}, <add> host: net.IPv4(0, 0, 0, 1), <add> }, <add> { // ip and mask in v4Inv6 representation <add> ip: net.IPv4(172, 28, 30, 2), <add> mask: []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0}, <add> host: net.IPv4(0, 0, 0, 2), <add> }, <add> { // ip in v4 representation, mask in v4Inv6 representation <add> ip: net.IPv4(172, 28, 30, 3)[12:], <add> mask: []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0}, <add> host: net.IPv4(0, 0, 0, 3)[12:], <add> }, <add> { // ip and mask in v4 representation <add> ip: net.IPv4(172, 28, 30, 4)[12:], <add> mask: []byte{0xff, 0xff, 0xff, 0}, <add> host: net.IPv4(0, 0, 0, 4)[12:], <add> }, <add> { // ip and mask as v6 <add> ip: net.ParseIP("2005:2004:2002:2001:FFFF:ABCD:EEAB:00CD"), <add> mask: []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0, 0, 0}, <add> host: net.ParseIP("0::AB:00CD"), <add> }, <add> } <add> <add> for _, i := range input { <add> h, err := GetHostPartIP(i.ip, i.mask) <add> if err != nil { <add> t.Fatal(err) <add> } <add> if !i.host.Equal(h) { <add> t.Fatalf("Failed to return expected host ip. Expected: %s. Got: %s", i.host, h) <add> } <add> } <add> <add> // ip as v6 and mask as v4 are not compatible <add> if _, err := GetHostPartIP(net.ParseIP("2005:2004:2002:2001:FFFF:ABCD:EEAB:00CD"), []byte{0xff, 0xff, 0xff, 0}); err == nil { <add> t.Fatalf("Unexpected success") <add> } <add> // ip as v4 and non conventional mask <add> if _, err := GetHostPartIP(net.ParseIP("173.32.4.5"), []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0xff, 0}); err == nil { <add> t.Fatalf("Unexpected success") <add> } <add> // ip as v4 and non conventional mask <add> if _, err := GetHostPartIP(net.ParseIP("173.32.4.5"), []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0xff, 0xff, 0xff, 0}); err == nil { <add> t.Fatalf("Unexpected success") <add> } <add>}
4
PHP
PHP
throw a 429 http exception
990326c988f3b9a844cd6cf47d067bc1dad2173f
<ide><path>src/Illuminate/Routing/Middleware/ThrottleRequests.php <ide> use Illuminate\Support\Carbon; <ide> use Illuminate\Cache\RateLimiter; <ide> use Symfony\Component\HttpFoundation\Response; <add>use Symfony\Component\HttpKernel\Exception\HttpException; <ide> <ide> class ThrottleRequests <ide> { <ide> public function handle($request, Closure $next, $maxAttempts = 60, $decayMinutes <ide> $maxAttempts = $this->resolveMaxAttempts($request, $maxAttempts); <ide> <ide> if ($this->limiter->tooManyAttempts($key, $maxAttempts, $decayMinutes)) { <del> return $this->buildResponse($key, $maxAttempts); <add> $this->buildException($key, $maxAttempts); <ide> } <ide> <ide> $this->limiter->hit($key, $decayMinutes); <ide> protected function resolveRequestSignature($request) <ide> } <ide> <ide> /** <del> * Create a 'too many attempts' response. <add> * Create a 'too many attempts' exception. <ide> * <ide> * @param string $key <ide> * @param int $maxAttempts <del> * @return \Symfony\Component\HttpFoundation\Response <add> * @return void <add> * @throws \Symfony\Component\HttpKernel\Exception\HttpException <ide> */ <del> protected function buildResponse($key, $maxAttempts) <add> protected function buildException($key, $maxAttempts) <ide> { <del> $response = new Response('Too Many Attempts.', 429); <del> <ide> $retryAfter = $this->limiter->availableIn($key); <ide> <del> return $this->addHeaders( <del> $response, $maxAttempts, <add> $headers = $this->getHeaders( <add> $maxAttempts, <ide> $this->calculateRemainingAttempts($key, $maxAttempts, $retryAfter), <ide> $retryAfter <ide> ); <add> <add> throw new HttpException(429, 'Too Many Attempts.', null, $headers); <ide> } <ide> <ide> /** <ide> protected function buildResponse($key, $maxAttempts) <ide> * @return \Symfony\Component\HttpFoundation\Response <ide> */ <ide> protected function addHeaders(Response $response, $maxAttempts, $remainingAttempts, $retryAfter = null) <add> { <add> $response->headers->add( <add> $this->getHeaders($maxAttempts, $remainingAttempts, $retryAfter) <add> ); <add> <add> return $response; <add> } <add> <add> /** <add> * Get the limit headers information. <add> * <add> * @param int $maxAttempts <add> * @param int $remainingAttempts <add> * @param int|null $retryAfter <add> * @return array <add> */ <add> protected function getHeaders($maxAttempts, $remainingAttempts, $retryAfter = null) <ide> { <ide> $headers = [ <ide> 'X-RateLimit-Limit' => $maxAttempts, <ide> protected function addHeaders(Response $response, $maxAttempts, $remainingAttemp <ide> $headers['X-RateLimit-Reset'] = Carbon::now()->getTimestamp() + $retryAfter; <ide> } <ide> <del> $response->headers->add($headers); <del> <del> return $response; <add> return $headers; <ide> } <ide> <ide> /**
1
Java
Java
replace action1 with consumer in docs
0b0355e3bc09326c8005fd26d09e7c1eb4aeb6e3
<ide><path>src/main/java/io/reactivex/flowables/ConnectableFlowable.java <ide> public Flowable<T> autoConnect(int numberOfSubscribers) { <ide> * @param numberOfSubscribers the number of subscribers to await before calling connect <ide> * on the ConnectableObservable. A non-positive value indicates <ide> * an immediate connection. <del> * @param connection the callback Action1 that will receive the Subscription representing the <add> * @param connection the callback Consumer that will receive the Subscription representing the <ide> * established connection <ide> * @return an Observable that automatically connects to this ConnectableObservable <ide> * when the specified number of Subscribers subscribe to it and calls the <ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableRefCount.java <ide> public void subscribeActual(final Subscriber<? super T> subscriber) { <ide> source.connect(onSubscribe(subscriber, writeLocked)); <ide> } finally { <ide> // need to cover the case where the source is subscribed to <del> // outside of this class thus preventing the Action1 passed <add> // outside of this class thus preventing the Consumer passed <ide> // to source.connect above being called <ide> if (writeLocked.get()) { <del> // Action1 passed to source.connect was not called <add> // Consumer passed to source.connect was not called <ide> lock.unlock(); <ide> } <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableRefCount.java <ide> public void subscribeActual(final Observer<? super T> subscriber) { <ide> source.connect(onSubscribe(subscriber, writeLocked)); <ide> } finally { <ide> // need to cover the case where the source is subscribed to <del> // outside of this class thus preventing the Action1 passed <add> // outside of this class thus preventing the Consumer passed <ide> // to source.connect above being called <ide> if (writeLocked.get()) { <del> // Action1 passed to source.connect was not called <add> // Consumer passed to source.connect was not called <ide> lock.unlock(); <ide> } <ide> } <ide><path>src/main/java/io/reactivex/observables/ConnectableObservable.java <ide> public Observable<T> autoConnect(int numberOfSubscribers) { <ide> * @param numberOfSubscribers the number of subscribers to await before calling connect <ide> * on the ConnectableObservable. A non-positive value indicates <ide> * an immediate connection. <del> * @param connection the callback Action1 that will receive the Subscription representing the <add> * @param connection the callback Consumer that will receive the Subscription representing the <ide> * established connection <ide> * @return an Observable that automatically connects to this ConnectableObservable <ide> * when the specified number of Subscribers subscribe to it and calls the
4
Java
Java
fix crash on reactedittext with appcompat 1.4.0
e21f8ec34984551f87a306672160cc88e67e4793
<ide><path>ReactAndroid/src/main/java/com/facebook/react/views/textinput/ReactEditText.java <ide> public class ReactEditText extends AppCompatEditText <ide> <ide> private ReactViewBackgroundManager mReactBackgroundManager; <ide> <del> private final FabricViewStateManager mFabricViewStateManager = new FabricViewStateManager(); <add> private final @Nullable FabricViewStateManager mFabricViewStateManager = <add> new FabricViewStateManager(); <ide> protected boolean mDisableTextDiffing = false; <ide> <ide> protected boolean mIsSettingTextFromState = false; <ide> private void setIntrinsicContentSize() { <ide> // view, we don't need to construct one or apply it at all - it provides no use in Fabric. <ide> ReactContext reactContext = getReactContext(this); <ide> <del> if (!mFabricViewStateManager.hasStateWrapper() && !reactContext.isBridgeless()) { <add> if (mFabricViewStateManager != null <add> && !mFabricViewStateManager.hasStateWrapper() <add> && !reactContext.isBridgeless()) { <ide> final ReactTextInputLocalData localData = new ReactTextInputLocalData(this); <ide> UIManagerModule uiManager = reactContext.getNativeModule(UIManagerModule.class); <ide> if (uiManager != null) { <ide> public FabricViewStateManager getFabricViewStateManager() { <ide> */ <ide> private void updateCachedSpannable(boolean resetStyles) { <ide> // Noops in non-Fabric <del> if (!mFabricViewStateManager.hasStateWrapper()) { <add> if (mFabricViewStateManager != null && !mFabricViewStateManager.hasStateWrapper()) { <ide> return; <ide> } <ide> // If this view doesn't have an ID yet, we don't have a cache key, so bail here
1
Javascript
Javascript
remove obsolete webpack plugins
34cb05a86074182bc04accf43937d297ececc18e
<ide><path>build/webpack.js <ide> export default async function getBaseWebpackConfig (dir: string, {dev = false, i <ide> // required not to cache removed files <ide> useHashIndex: false <ide> }), <del> new webpack.DefinePlugin({ <del> 'process.env.NODE_ENV': JSON.stringify(dev ? 'development' : 'production') <del> }), <ide> // This is used in client/dev-error-overlay/hot-dev-client.js to replace the dist directory <ide> !isServer && dev && new webpack.DefinePlugin({ <ide> 'process.env.__NEXT_DIST_DIR': JSON.stringify(distDir) <ide> }), <del> !dev && new webpack.optimize.ModuleConcatenationPlugin(), <ide> isServer && new PagesManifestPlugin(), <ide> !isServer && new BuildManifestPlugin(), <ide> !isServer && new PagesPlugin(), <ide><path>test/integration/basic/pages/process-env.js <add>export default () => ( <add> <div id='node-env'>{process.env.NODE_ENV}</div> <add>) <ide><path>test/integration/basic/test/index.test.js <ide> import hmr from './hmr' <ide> import errorRecovery from './error-recovery' <ide> import dynamic from './dynamic' <ide> import asset from './asset' <add>import processEnv from './process-env' <ide> <ide> const context = {} <ide> jasmine.DEFAULT_TIMEOUT_INTERVAL = 1000 * 60 * 5 <ide> describe('Basic Features', () => { <ide> renderViaHTTP(context.appPort, '/with-cdm'), <ide> renderViaHTTP(context.appPort, '/url-prop'), <ide> renderViaHTTP(context.appPort, '/url-prop-override'), <add> renderViaHTTP(context.appPort, '/process-env'), <ide> <ide> renderViaHTTP(context.appPort, '/nav'), <ide> renderViaHTTP(context.appPort, '/nav/about'), <ide> describe('Basic Features', () => { <ide> hmr(context, (p, q) => renderViaHTTP(context.appPort, p, q)) <ide> errorRecovery(context, (p, q) => renderViaHTTP(context.appPort, p, q)) <ide> asset(context) <add> processEnv(context) <ide> }) <ide><path>test/integration/basic/test/process-env.js <add>/* global describe, it, expect */ <add>import webdriver from 'next-webdriver' <add> <add>export default (context, render) => { <add> describe('process.env', () => { <add> it('should set process.env.NODE_ENV in development', async () => { <add> const browser = await webdriver(context.appPort, '/process-env') <add> const nodeEnv = await browser.elementByCss('#node-env').text() <add> expect(nodeEnv).toBe('development') <add> browser.close() <add> }) <add> }) <add>} <ide><path>test/integration/production/pages/process-env.js <add>export default () => ( <add> <div id='node-env'>{process.env.NODE_ENV}</div> <add>) <ide><path>test/integration/production/test/index.test.js <ide> import { <ide> import webdriver from 'next-webdriver' <ide> import fetch from 'node-fetch' <ide> import dynamicImportTests from './dynamic' <add>import processEnv from './process-env' <ide> import security from './security' <ide> import {BUILD_MANIFEST, REACT_LOADABLE_MANIFEST} from 'next/constants' <ide> <ide> describe('Production Usage', () => { <ide> <ide> dynamicImportTests(context, (p, q) => renderViaHTTP(context.appPort, p, q)) <ide> <add> processEnv(context) <ide> security(context) <ide> }) <ide><path>test/integration/production/test/process-env.js <add>/* global describe, it, expect */ <add>import webdriver from 'next-webdriver' <add> <add>export default (context) => { <add> describe('process.env', () => { <add> it('should set process.env.NODE_ENV in production', async () => { <add> const browser = await webdriver(context.appPort, '/process-env') <add> const nodeEnv = await browser.elementByCss('#node-env').text() <add> expect(nodeEnv).toBe('production') <add> browser.close() <add> }) <add> }) <add>}
7
Javascript
Javascript
replace var with const in test-require-dot
fa4f1587d34c8d18ab93b7c3e9fc37d7e05697a9
<ide><path>test/parallel/test-require-dot.js <ide> 'use strict'; <del>var common = require('../common'); <del>var assert = require('assert'); <del>var module = require('module'); <add>const common = require('../common'); <add>const assert = require('assert'); <add>const m = require('module'); <ide> <del>var a = require(common.fixturesDir + '/module-require/relative/dot.js'); <del>var b = require(common.fixturesDir + '/module-require/relative/dot-slash.js'); <add>const a = require(common.fixturesDir + '/module-require/relative/dot.js'); <add>const b = require(common.fixturesDir + '/module-require/relative/dot-slash.js'); <ide> <del>assert.equal(a.value, 42); <del>assert.equal(a, b, 'require(".") should resolve like require("./")'); <add>assert.strictEqual(a.value, 42); <add>assert.strictEqual(a, b, 'require(".") should resolve like require("./")'); <ide> <ide> process.env.NODE_PATH = common.fixturesDir + '/module-require/relative'; <del>module._initPaths(); <add>m._initPaths(); <ide> <del>var c = require('.'); <add>const c = require('.'); <ide> <del>assert.equal(c.value, 42, 'require(".") should honor NODE_PATH'); <add>assert.strictEqual(c.value, 42, 'require(".") should honor NODE_PATH');
1
PHP
PHP
allow multiple manifest files for mix helper
404671a623e955f0eb593eb7436fa24dceef2bce
<ide><path>src/Illuminate/Foundation/helpers.php <ide> function method_field($method) <ide> /** <ide> * Get the path to a versioned Mix file. <ide> * <del> * @param string $path <add> * @param string $path <add> * @param string $manifestDir <ide> * @return \Illuminate\Support\HtmlString <ide> * <ide> * @throws \Exception <ide> */ <del> function mix($path) <add> function mix($path, $manifestDir = '') <ide> { <ide> static $manifest; <del> static $shouldHotReload; <ide> <del> if (! $manifest) { <del> if (! file_exists($manifestPath = public_path('mix-manifest.json'))) { <add> if ( $manifestDir && ! starts_with($manifestDir, '/')) { <add> $manifestDir = "/{$manifestDir}"; <add> } <add> <add> if ( ! $manifest) { <add> if ( ! file_exists($manifestPath = public_path($manifestDir . '/mix-manifest.json'))) { <ide> throw new Exception('The Mix manifest does not exist.'); <ide> } <ide> <ide> $manifest = json_decode(file_get_contents($manifestPath), true); <ide> } <ide> <del> if (! starts_with($path, '/')) { <add> if ( ! starts_with($path, '/')) { <ide> $path = "/{$path}"; <ide> } <ide> <del> if (! array_key_exists($path, $manifest)) { <add> if ( ! array_key_exists($path, $manifest)) { <ide> throw new Exception( <del> "Unable to locate Mix file: {$path}. Please check your ". <add> "Unable to locate Mix file: {$path}. Please check your " . <ide> 'webpack.mix.js output paths and try again.' <ide> ); <ide> } <ide> <del> return $shouldHotReload = file_exists(public_path('hot')) <del> ? new HtmlString("http://localhost:8080{$manifest[$path]}") <del> : new HtmlString($manifest[$path]); <add> return file_exists(public_path($manifestDir . '/hot')) <add> ? new HtmlString("http://localhost:8080{$manifest[$path]}") <add> : new HtmlString($manifestDir . $manifest[$path]); <ide> } <ide> } <ide>
1
PHP
PHP
use fewer empty calls
b4aed2425e9a06ae32a7194935b5d069315a1b00
<ide><path>src/Controller/Component/CsrfComponent.php <ide> public function startup(Event $event) <ide> if ($request->is('get') && $cookieData === null) { <ide> $this->_setCookie($request, $response); <ide> } <del> if ($request->is(['put', 'post', 'delete', 'patch']) || !empty($request->data())) { <add> if ($request->is(['put', 'post', 'delete', 'patch']) || $request->data()) { <ide> $this->_validateToken($request); <ide> unset($request->data[$this->_config['field']]); <ide> } <ide><path>src/Controller/Component/SecurityComponent.php <ide> public function startup(Event $event) <ide> $controller = $event->subject(); <ide> $this->session = $this->request->session(); <ide> $this->_action = $this->request->param('action'); <del> $hasData = !empty($this->request->data()); <add> $hasData = (bool)$this->request->data(); <ide> try { <ide> $this->_secureRequired($controller); <ide> $this->_authRequired($controller); <ide> protected function _authRequired(Controller $controller) <ide> { <ide> if (is_array($this->_config['requireAuth']) && <ide> !empty($this->_config['requireAuth']) && <del> !empty($this->request->data()) <add> $this->request->data() <ide> ) { <ide> $requireAuth = $this->_config['requireAuth']; <ide> <ide> protected function _authRequired(Controller $controller) <ide> */ <ide> protected function _validatePost(Controller $controller) <ide> { <del> if (empty($controller->request->data())) { <add> if (!$controller->request->data()) { <ide> return true; <ide> } <ide> $token = $this->_validToken($controller);
2
Python
Python
streamline test for
6c083b12a1162bf8e0f51e6c52ff13a1bd621cf2
<ide><path>tests/browsable_api/auth_urls.py <ide> from __future__ import unicode_literals <ide> from django.conf.urls import patterns, url, include <del>from rest_framework import routers <ide> <del>from .views import MockView, FooViewSet, BarViewSet <add>from .views import MockView <ide> <del>router = routers.SimpleRouter() <del>router.register(r'foo', FooViewSet) <del>router.register(r'bar', BarViewSet) <ide> <ide> urlpatterns = patterns( <ide> '', <ide> (r'^$', MockView.as_view()), <del> url(r'^', include(router.urls)), <del> url(r'^bar/(?P<pk>\d+)/$', BarViewSet, name='bar-list'), <ide> url(r'^auth/', include('rest_framework.urls', namespace='rest_framework')), <ide> ) <ide><path>tests/browsable_api/models.py <del>from django.db import models <del> <del> <del>class Foo(models.Model): <del> name = models.CharField(max_length=30) <del> <del> <del>class Bar(models.Model): <del> foo = models.ForeignKey("Foo", editable=False) <ide><path>tests/browsable_api/serializers.py <del>from .models import Foo, Bar <del>from rest_framework.serializers import HyperlinkedModelSerializer, HyperlinkedIdentityField <del> <del> <del>class FooSerializer(HyperlinkedModelSerializer): <del> bar = HyperlinkedIdentityField(view_name='bar-list') <del> <del> class Meta: <del> model = Foo <del> <del> <del>class BarSerializer(HyperlinkedModelSerializer): <del> class Meta: <del> model = Bar <ide><path>tests/browsable_api/test_browsable_api.py <ide> from django.test import TestCase <ide> <ide> from rest_framework.test import APIClient <del>from .models import Foo, Bar <ide> <ide> <ide> class DropdownWithAuthTests(TestCase): <ide> def setUp(self): <ide> self.email = '[email protected]' <ide> self.password = 'password' <ide> self.user = User.objects.create_user(self.username, self.email, self.password) <del> foo = Foo.objects.create(name='Foo') <del> Bar.objects.create(foo=foo) <ide> <ide> def tearDown(self): <ide> self.client.logout() <ide> def test_name_shown_when_logged_in(self): <ide> response = self.client.get('/') <ide> self.assertContains(response, 'john') <ide> <del> def test_bug_2455_clone_request(self): <del> self.client.login(username=self.username, password=self.password) <del> json_response = self.client.get('/foo/1/?format=json') <del> self.assertEqual(json_response.status_code, 200) <del> browsable_api_response = self.client.get('/foo/1/') <del> self.assertEqual(browsable_api_response.status_code, 200) <del> <ide> def test_logout_shown_when_logged_in(self): <ide> self.client.login(username=self.username, password=self.password) <ide> response = self.client.get('/') <ide><path>tests/browsable_api/views.py <ide> from __future__ import unicode_literals <ide> <ide> from rest_framework.views import APIView <del>from rest_framework.viewsets import ModelViewSet <ide> from rest_framework import authentication <ide> from rest_framework import renderers <ide> from rest_framework.response import Response <del>from rest_framework.renderers import BrowsableAPIRenderer, JSONRenderer <del>from rest_framework.versioning import NamespaceVersioning <del>from .models import Foo, Bar <del>from .serializers import FooSerializer, BarSerializer <ide> <ide> <ide> class MockView(APIView): <ide> class MockView(APIView): <ide> <ide> def get(self, request): <ide> return Response({'a': 1, 'b': 2, 'c': 3}) <del> <del> <del>class SerializerClassMixin(object): <del> def get_serializer_class(self): <del> # Get base name of serializer <del> self.request.version <del> return self.serializer_class <del> <del> <del>class FooViewSet(SerializerClassMixin, ModelViewSet): <del> versioning_class = NamespaceVersioning <del> model = Foo <del> queryset = Foo.objects.all() <del> serializer_class = FooSerializer <del> renderer_classes = (BrowsableAPIRenderer, JSONRenderer) <del> <del> <del>class BarViewSet(SerializerClassMixin, ModelViewSet): <del> model = Bar <del> queryset = Bar.objects.all() <del> serializer_class = BarSerializer <del> renderer_classes = (BrowsableAPIRenderer, ) <ide><path>tests/test_metadata.py <ide> from __future__ import unicode_literals <ide> from rest_framework import exceptions, serializers, status, views <ide> from rest_framework.request import Request <add>from rest_framework.renderers import BrowsableAPIRenderer <ide> from rest_framework.test import APIRequestFactory <ide> <ide> request = Request(APIRequestFactory().options('/')) <ide> def get_object(self): <ide> response = view(request=request) <ide> assert response.status_code == status.HTTP_200_OK <ide> assert list(response.data['actions'].keys()) == ['POST'] <add> <add> def test_bug_2455_clone_request(self): <add> class ExampleView(views.APIView): <add> renderer_classes = (BrowsableAPIRenderer,) <add> <add> def post(self, request): <add> pass <add> <add> def get_serializer(self): <add> assert hasattr(self.request, 'version') <add> return serializers.Serializer() <add> <add> view = ExampleView.as_view() <add> view(request=request)
6
PHP
PHP
deprecate several log methods
a8a65130c92d9e818dc09f3c538707153c5a3e84
<ide><path>lib/Cake/Log/Log.php <ide> public static function reset() { <ide> static::$_registry = null; <ide> } <ide> <del>/** <del> * @deprecated Use Configure::write() to configure logging. <del> * @see App/Config/logging.php <del> * @return void <del> */ <del> public static function config($key, $config) { <del> trigger_error( <del> __d('cake_dev', 'You must use Configure::write() to define logging configuration. Or use engine() to inject new adapter.'), <del> E_USER_WARNING <del> ); <del> } <del> <ide> /** <ide> * Returns the keynames of the currently active streams <ide> * <ide> public static function levels() { <ide> return static::$_levels; <ide> } <ide> <add>/** <add> * @deprecated Use Configure::write() to configure logging. <add> * @see App/Config/logging.php <add> * @return void <add> */ <add> public static function config($key, $config) { <add> trigger_error( <add> __d('cake_dev', 'You must use Configure::write() to define logging configuration. Or use engine() to inject new adapter.'), <add> E_USER_WARNING <add> ); <add> } <add> <ide> /** <ide> * Removes a stream from the active streams. Once a stream has been removed <ide> * it will no longer have messages sent to it. <ide> public static function drop($streamName) { <ide> } <ide> <ide> /** <del> * Checks wether $streamName is enabled <add> * Checks whether $streamName is enabled <ide> * <ide> * @param string $streamName to check <ide> * @return bool <ide> * @throws Cake\Error\Exception <add> * @deprecated This method will be removed in 3.0 stable. <ide> */ <ide> public static function enabled($streamName) { <del> static::_init(); <del> if (!isset(static::$_registry->{$streamName})) { <del> throw new Error\Exception(__d('cake_dev', 'Stream %s not found', $streamName)); <del> } <del> return static::$_registry->enabled($streamName); <add> throw new Error\Exception(__d('cake_dev', 'Log::enabled() is deprecated. Use Log::configured() instead.')); <ide> } <ide> <ide> /** <del> * Enable stream. Streams that were previously disabled <del> * can be re-enabled with this method. <add> * Enable stream. <ide> * <ide> * @param string $streamName to enable <ide> * @return void <ide> * @throws Cake\Error\Exception <add> * @deprecated This method will be removed in 3.0 stable. <ide> */ <ide> public static function enable($streamName) { <del> static::_init(); <del> if (!isset(static::$_registry->{$streamName})) { <del> throw new Error\Exception(__d('cake_dev', 'Stream %s not found', $streamName)); <del> } <del> static::$_registry->enable($streamName); <add> throw new Error\Exception(__d('cake_dev', 'Log::enable() is deprecated. Use Log::engine() instead.')); <ide> } <ide> <ide> /** <del> * Disable stream. Disabling a stream will <del> * prevent that log stream from receiving any messages until <del> * its re-enabled. <add> * Disable stream. <ide> * <ide> * @param string $streamName to disable <ide> * @return void <ide> * @throws Cake\Error\Exception <add> * @deprecated This method will be removed in 3.0 stable. <ide> */ <ide> public static function disable($streamName) { <del> static::_init(); <del> if (!isset(static::$_registry->{$streamName})) { <del> throw new Error\Exception(__d('cake_dev', 'Stream %s not found', $streamName)); <del> } <del> static::$_registry->disable($streamName); <add> throw new Error\Exception(__d('cake_dev', 'Log::disable() is deprecated. Use Log::drop() instead.')); <ide> } <ide> <ide> /** <ide><path>lib/Cake/Test/TestCase/Log/LogTest.php <ide> public function testSelectiveLoggingByLevel() { <ide> } <ide> <ide> /** <del> * test enable <add> * test enable() throws exceptions <ide> * <ide> * @expectedException Cake\Error\Exception <ide> */ <ide> public function testStreamEnable() { <del> Configure::write('Log.spam', array( <del> 'engine' => 'File', <del> 'file' => 'spam', <del> )); <del> $this->assertTrue(Log::enabled('spam')); <del> Log::drop('spam'); <del> Log::enable('bogus_stream'); <del> } <del> <del>/** <del> * test disable <del> * <del> * @expectedException Cake\Error\Exception <del> */ <del> public function testStreamDisable() { <del> Configure::write('Log.spam', array( <del> 'engine' => 'File', <del> 'file' => 'spam', <del> )); <del> $this->assertTrue(Log::enabled('spam')); <del> Log::disable('spam'); <del> $this->assertFalse(Log::enabled('spam')); <del> Log::drop('spam'); <del> Log::enable('bogus_stream'); <add> Log::enable('debug'); <ide> } <ide> <ide> /** <del> * test enabled() invalid stream <add> * test enabled() throws exceptions <ide> * <ide> * @expectedException Cake\Error\Exception <ide> */ <del> public function testStreamEnabledInvalid() { <del> Log::enabled('bogus_stream'); <add> public function testStreamEnabled() { <add> Log::enabled('debug'); <ide> } <ide> <ide> /** <del> * test disable invalid stream <add> * test disable() throws exceptions <ide> * <ide> * @expectedException Cake\Error\Exception <ide> */ <del> public function testStreamDisableInvalid() { <del> Log::disable('bogus_stream'); <add> public function testStreamDisable() { <add> Log::disable('debug'); <ide> } <ide> <ide> protected function _resetLogConfig() {
2
Python
Python
use select inputs for relationships. closes
fd97d9bff82b96b9362930686b9008ba78326115
<ide><path>rest_framework/relations.py <ide> def __init__(self, **kwargs): <ide> <ide> def __new__(cls, *args, **kwargs): <ide> # We override this method in order to automagically create <del> # `ManyRelation` classes instead when `many=True` is set. <add> # `ManyRelatedField` classes instead when `many=True` is set. <ide> if kwargs.pop('many', False): <ide> list_kwargs = {'child_relation': cls(*args, **kwargs)} <ide> for key in kwargs.keys(): <ide> if key in MANY_RELATION_KWARGS: <ide> list_kwargs[key] = kwargs[key] <del> return ManyRelation(**list_kwargs) <add> return ManyRelatedField(**list_kwargs) <ide> return super(RelatedField, cls).__new__(cls, *args, **kwargs) <ide> <ide> def run_validation(self, data=empty): <ide> def to_representation(self, obj): <ide> return getattr(obj, self.slug_field) <ide> <ide> <del>class ManyRelation(Field): <add>class ManyRelatedField(Field): <ide> """ <ide> Relationships with `many=True` transparently get coerced into instead being <del> a ManyRelation with a child relationship. <add> a ManyRelatedField with a child relationship. <ide> <del> The `ManyRelation` class is responsible for handling iterating through <add> The `ManyRelatedField` class is responsible for handling iterating through <ide> the values and passing each one to the child relationship. <ide> <ide> You shouldn't need to be using this class directly yourself. <ide> class ManyRelation(Field): <ide> def __init__(self, child_relation=None, *args, **kwargs): <ide> self.child_relation = child_relation <ide> assert child_relation is not None, '`child_relation` is a required argument.' <del> super(ManyRelation, self).__init__(*args, **kwargs) <add> super(ManyRelatedField, self).__init__(*args, **kwargs) <ide> self.child_relation.bind(field_name='', parent=self) <ide> <ide> def get_value(self, dictionary): <ide><path>rest_framework/renderers.py <ide> class HTMLFormRenderer(BaseRenderer): <ide> serializers.MultipleChoiceField: { <ide> 'base_template': 'select_multiple.html', # Also valid: 'checkbox_multiple.html' <ide> }, <del> serializers.ManyRelation: { <add> serializers.RelatedField: { <add> 'base_template': 'select.html', # Also valid: 'radio.html' <add> }, <add> serializers.ManyRelatedField: { <ide> 'base_template': 'select_multiple.html', # Also valid: 'checkbox_multiple.html' <ide> }, <ide> serializers.Serializer: {
2
Javascript
Javascript
remove rethrowcaughterror injection
0a41890eaa018a3509912c12df6c4f24fc0a3200
<ide><path>src/renderers/shared/utils/ReactErrorUtils.js <ide> const ReactErrorUtils = { <ide> typeof injectedErrorUtils.invokeGuardedCallback === 'function', <ide> 'Injected invokeGuardedCallback() must be a function.', <ide> ); <del> invariant( <del> typeof injectedErrorUtils.rethrowCaughtError === 'function', <del> 'Injected rethrowCaughtError() must be a function.', <del> ); <ide> invokeGuardedCallback = injectedErrorUtils.invokeGuardedCallback; <del> rethrowCaughtError = injectedErrorUtils.rethrowCaughtError; <ide> }, <ide> }, <ide>
1
Ruby
Ruby
fix renew feature on cookies
afc3ccf74cbb6c3d495558e934ced1c006dacda8
<ide><path>actionpack/lib/action_dispatch/middleware/session/cookie_store.rb <ide> def unpacked_cookie_data(env) <ide> end <ide> <ide> def set_session(env, sid, session_data, options) <del> persistent_session_id!(session_data, sid) <add> session_data.merge("session_id" => sid) <ide> end <ide> <ide> def set_cookie(env, session_id, cookie) <ide><path>actionpack/test/dispatch/session/cookie_store_test.rb <ide> def change_session_id <ide> get_session_id <ide> end <ide> <add> def renew_session_id <add> request.session_options[:renew] = true <add> head :ok <add> end <add> <ide> def rescue_action(e) raise end <ide> end <ide> <ide> def test_does_not_set_secure_cookies_over_http <ide> end <ide> end <ide> <add> def test_properly_renew_cookies <add> with_test_route_set do <add> get '/set_session_value' <add> get '/persistent_session_id' <add> session_id = response.body <add> get '/renew_session_id' <add> get '/persistent_session_id' <add> assert_not_equal response.body, session_id <add> end <add> end <add> <ide> def test_does_set_secure_cookies_over_https <ide> with_test_route_set(:secure => true) do <ide> get '/set_session_value', nil, 'HTTPS' => 'on'
2
PHP
PHP
fix tests that occasionally fail on postgres
e0586da808431811dc2d0f0738032b7a5838f8c0
<ide><path>lib/Cake/Test/Case/Model/BehaviorCollectionTest.php <ide> public function testBehaviorHasOneFindCallbacks() { <ide> public function testBehaviorBelongsToFindCallbacks() { <ide> $this->skipIf($this->db instanceof Sqlserver, 'This test is not compatible with SQL Server.'); <ide> <add> $conditions = array('order' => 'Apple.id ASC'); <ide> $Apple = new Apple(); <ide> $Apple->unbindModel(array('hasMany' => array('Child'), 'hasOne' => array('Sample')), false); <del> $expected = $Apple->find('all'); <add> $expected = $Apple->find('all', $conditions); <ide> <ide> $Apple->unbindModel(array('belongsTo' => array('Parent'))); <del> $wellBehaved = $Apple->find('all'); <add> $wellBehaved = $Apple->find('all', $conditions); <ide> $Apple->Parent->Behaviors->attach('Test'); <ide> $Apple->unbindModel(array('belongsTo' => array('Parent'))); <del> $this->assertSame($Apple->find('all'), $wellBehaved); <add> $this->assertSame($Apple->find('all', $conditions), $wellBehaved); <ide> <ide> $Apple->Parent->Behaviors->attach('Test', array('before' => 'off')); <del> $this->assertSame($expected, $Apple->find('all')); <add> $this->assertSame($expected, $Apple->find('all', $conditions)); <ide> <ide> $Apple->Parent->Behaviors->attach('Test', array('before' => 'test')); <del> $this->assertSame($expected, $Apple->find('all')); <add> $this->assertSame($expected, $Apple->find('all', $conditions)); <ide> <ide> $Apple->Parent->Behaviors->attach('Test', array('before' => 'modify')); <ide> $expected2 = array( <ide> public function testBehaviorBelongsToFindCallbacks() { <ide> ); <ide> $result2 = $Apple->find('all', array( <ide> 'fields' => array('Apple.id', 'Parent.id', 'Parent.name', 'Parent.mytime'), <del> 'conditions' => array('Apple.id <' => '4') <add> 'conditions' => array('Apple.id <' => '4'), <add> 'order' => 'Apple.id ASC', <ide> )); <ide> $this->assertEquals($expected2, $result2); <ide> <ide> $Apple->Parent->Behaviors->disable('Test'); <del> $result = $Apple->find('all'); <add> $result = $Apple->find('all', $conditions); <ide> $this->assertEquals($expected, $result); <ide> <ide> $Apple->Parent->Behaviors->attach('Test', array('after' => 'off')); <del> $this->assertEquals($expected, $Apple->find('all')); <add> $this->assertEquals($expected, $Apple->find('all', $conditions)); <ide> <ide> $Apple->Parent->Behaviors->attach('Test', array('after' => 'test')); <del> $this->assertEquals($expected, $Apple->find('all')); <add> $this->assertEquals($expected, $Apple->find('all', $conditions)); <ide> <ide> $Apple->Parent->Behaviors->attach('Test', array('after' => 'test2')); <del> $this->assertEquals($expected, $Apple->find('all')); <add> $this->assertEquals($expected, $Apple->find('all', $conditions)); <ide> } <ide> <ide> /**
1
Text
Text
remove nat rule when removing bridge
d10d0e568e44a4e55293610a5cac84359719baff
<ide><path>docs/sources/articles/networking.md <ide> stopping the service and removing the interface: <ide> $ sudo service docker stop <ide> $ sudo ip link set dev docker0 down <ide> $ sudo brctl delbr docker0 <add> $ sudo iptables -t nat -F POSTROUTING <ide> <ide> Then, before starting the Docker service, create your own bridge and <ide> give it whatever configuration you want. Here we will create a simple <ide> illustrate the technique. <ide> inet 192.168.5.1/24 scope global bridge0 <ide> valid_lft forever preferred_lft forever <ide> <add> # Confirming outgoing NAT masquerade is setup <add> <add> $ sudo iptables -t nat -L -n <add> ... <add> Chain POSTROUTING (policy ACCEPT) <add> target prot opt source destination <add> MASQUERADE all -- 192.168.5.0/24 0.0.0.0/0 <add> <ide> # Tell Docker about it and restart (on Ubuntu) <ide> <ide> $ echo 'DOCKER_OPTS="-b=bridge0"' >> /etc/default/docker
1
Text
Text
add security contact to readme
f9ac0b384ac64b024dfcf277fd80daa52b941142
<ide><path>readme.md <ide> Documentation for the framework can be found on the [Laravel website](http://lar <ide> <ide> Thank you for considering contributing to the Laravel framework! The contribution guide can be found in the [Laravel documentation](http://laravel.com/docs/contributions). <ide> <add>## Security Vulnerabilities <add> <add>If you discover a security vulnerability within Laravel, please send an e-mail to Taylor Otwell at [email protected]. All security vulnerabilities will be promptly addressed. <add> <ide> ### License <ide> <ide> The Laravel framework is open-sourced software licensed under the [MIT license](http://opensource.org/licenses/MIT)
1