content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
PHP
PHP
add method to contract
53ac15248368ebda4a809b08ec0f25218ee9e6d5
<ide><path>src/Illuminate/Contracts/Encryption/Encrypter.php <ide> public function encrypt($value, $serialize = true); <ide> * @throws \Illuminate\Contracts\Encryption\DecryptException <ide> */ <ide> public function decrypt($payload, $unserialize = true); <add> <add> /** <add> * Get the encryption key that the encrypter is currently using. <add> * <add> * @return string <add> */ <add> public function getKey(); <ide> } <ide><path>src/Illuminate/Encryption/Encrypter.php <ide> protected function validMac(array $payload) <ide> } <ide> <ide> /** <del> * Get the encryption key. <add> * Get the encryption key that the encrypter is currently using. <ide> * <ide> * @return string <ide> */
2
PHP
PHP
add a sequence builder to the new http client
391a1ad63ac4577a5d7730c67ccad3bd9dcf50e0
<ide><path>src/Illuminate/Http/Client/Factory.php <ide> public static function response($body = null, $status = 200, $headers = []) <ide> * @param array $responses <ide> * @return \Illuminate\Http\Client\ResponseSequence <ide> */ <del> public static function sequence(array $responses) <add> public static function sequence(array $responses = []) <ide> { <ide> return new ResponseSequence($responses); <ide> } <ide><path>src/Illuminate/Http/Client/ResponseSequence.php <ide> public function __construct(array $responses) <ide> $this->responses = $responses; <ide> } <ide> <add> /** <add> * Push a response to the sequence. <add> * <add> * @param mixed $response <add> * @return $this <add> */ <add> public function pushResponse($response) <add> { <add> $this->responses[] = $response; <add> <add> return $this; <add> } <add> <add> /** <add> * Push an empty response to the sequence. <add> * <add> * @param int $status <add> * @param array $headers <add> * @return $this <add> */ <add> public function pushEmptyResponse($status = 200, $headers = []) <add> { <add> return $this->pushResponse( <add> Factory::response('', $status, $headers) <add> ); <add> } <add> <add> /** <add> * Push response with a string body to the sequence. <add> * <add> * @param string $string <add> * @param int $status <add> * @param array $headers <add> * @return $this <add> */ <add> public function pushString($string, $status = 200, $headers = []) <add> { <add> return $this->pushResponse( <add> Factory::response($string, $status, $headers) <add> ); <add> } <add> <add> /** <add> * Push response with a json body to the sequence. <add> * <add> * @param array $data <add> * @param int $status <add> * @param array $headers <add> * @return $this <add> */ <add> public function pushJson(array $data, $status = 200, $headers = []) <add> { <add> return $this->pushResponse( <add> Factory::response(json_encode($data), $status, $headers) <add> ); <add> } <add> <add> /** <add> * Push response with the contents of a file as the body to the sequence. <add> * <add> * @param string $filePath <add> * @param int $status <add> * @param array $headers <add> * @return $this <add> */ <add> public function pushFile($filePath, $status = 200, $headers = []) <add> { <add> $string = file_get_contents($filePath); <add> <add> return $this->pushResponse( <add> Factory::response($string, $status, $headers) <add> ); <add> } <add> <ide> /** <ide> * Get the next response in the sequence. <ide> * <ide><path>tests/Http/HttpClientTest.php <ide> namespace Illuminate\Tests\Http; <ide> <ide> use Illuminate\Http\Client\Factory; <add>use Illuminate\Http\Client\PendingRequest; <ide> use Illuminate\Support\Str; <ide> use PHPUnit\Framework\TestCase; <ide> <ide> public function testFilesCanBeAttached() <ide> $request->hasFile('foo', 'data', 'file.txt'); <ide> }); <ide> } <add> <add> public function testSequenceBuilder() <add> { <add> $factory = new Factory; <add> <add> $factory->fake([ <add> '*' => Factory::sequence() <add> ->pushString('Ok', 201) <add> ->pushJson(['fact' => 'Cats are great!']) <add> ->pushFile(__DIR__.'/fixtures/test.txt') <add> ->pushEmptyResponse(403), <add> ]); <add> <add> /** @var PendingRequest $factory */ <add> $response = $factory->get('https://example.com'); <add> $this->assertSame('Ok', $response->body()); <add> $this->assertSame(201, $response->status()); <add> <add> $response = $factory->get('https://example.com'); <add> $this->assertSame(['fact' => 'Cats are great!'], $response->json()); <add> $this->assertSame(200, $response->status()); <add> <add> $response = $factory->get('https://example.com'); <add> $this->assertSame("This is a story about something that happened long ago when your grandfather was a child.\n", $response->body()); <add> $this->assertSame(200, $response->status()); <add> <add> $response = $factory->get('https://example.com'); <add> $this->assertSame("", $response->body()); <add> $this->assertSame(403, $response->status()); <add> } <ide> }
3
Text
Text
release notes for 1.3.0-beta.19 rafter-ascension
b93601e691e6f5e042b1d26e04bd330fab9e1120
<ide><path>CHANGELOG.md <add><a name="1.3.0-beta.19"></a> <add># 1.3.0-beta.19 rafter-ascension (2014-08-22) <add> <add> <add>## Bug Fixes <add> <add>- **$compile:** <add> - use the correct namespace for transcluded SVG elements <add> ([ffbd276d](https://github.com/angular/angular.js/commit/ffbd276d6def6ff35bfdb30553346e985f4a0de6), <add> [#8716](https://github.com/angular/angular.js/issues/8716)) <add> - update the jQuery `.context` when an element is replaced by `replace:true` directive <add> ([f02f7d9c](https://github.com/angular/angular.js/commit/f02f7d9c15deea9c5d83212301e2a5e18223bbe5), <add> [#8253](https://github.com/angular/angular.js/issues/8253), [#7900](https://github.com/angular/angular.js/issues/7900)) <add>- **$location:** <add> - rewrite relative URI correctly if `path==='/'` in legacy html5Mode <add> ([d18b2819](https://github.com/angular/angular.js/commit/d18b2819768e467897dee7bc223876ca23ea71b1), <add> [#8684](https://github.com/angular/angular.js/issues/8684)) <add> - don't call `indexOf()` of undefined `href` attribute <add> ([5b77e30c](https://github.com/angular/angular.js/commit/5b77e30c1ac49be7b079b82527a5631f68bac904), <add> [#7721](https://github.com/angular/angular.js/issues/7721), [#8681](https://github.com/angular/angular.js/issues/8681)) <add>- **$parse:** remove unused variable declaration in generated getters <add> ([6acea115](https://github.com/angular/angular.js/commit/6acea1152f72a4026583897c67bea2839bc9e89e)) <add>- **$sanitize:** sanitize javascript urls with comments <add> ([b7e82a33](https://github.com/angular/angular.js/commit/b7e82a33eee03fc683f982c6ee13d15d88b07f67), <add> [#8274](https://github.com/angular/angular.js/issues/8274)) <add>- **$watchGroup:** call listener once when the `watchExpressions` array is empty <add> ([bf0e8373](https://github.com/angular/angular.js/commit/bf0e83732aa02c7aa08d0ccdf122116235fcfa11)) <add>- **Angular:** make Date comparison in `equals()` `NaN`-aware <add> ([693e846a](https://github.com/angular/angular.js/commit/693e846add5089d0e516604ae4a109e445fd3664), <add> [#8650](https://github.com/angular/angular.js/issues/8650), [#8715](https://github.com/angular/angular.js/issues/8715)) <add>- **Scope:** don't clear the phase when an exception is thrown from asyncQueue or watch <add> ([bf1a57ad](https://github.com/angular/angular.js/commit/bf1a57ad4822bb152fdd4d2fb54c0689e466481b)) <add>- **copy:** clear array destinations correctly for non-array sources <add> ([a603e202](https://github.com/angular/angular.js/commit/a603e202cc7e048c2ab6f12dee1cc8f277cf6f4f), <add> [#8610](https://github.com/angular/angular.js/issues/8610), [#8702](https://github.com/angular/angular.js/issues/8702)) <add>- **forEach:** match behaviour of Array.prototype.forEach (ignore missing properties) <add> ([36230194](https://github.com/angular/angular.js/commit/36230194be8aa417b0af33d618060829a75c4c5f), <add> [#8510](https://github.com/angular/angular.js/issues/8510), [#8522](https://github.com/angular/angular.js/issues/8522), [#8525](https://github.com/angular/angular.js/issues/8525)) <add>- **input:** <add> - use lowercase method to account for undefined type <add> ([066c0499](https://github.com/angular/angular.js/commit/066c049957a8af2fe449040eca2f1cb499655e32)) <add> - by default, do not trim input[type=password] values <add> ([a7fb357f](https://github.com/angular/angular.js/commit/a7fb357fa122e0a056ce1de838a2dfaf1ebc2953), <add> [#8250](https://github.com/angular/angular.js/issues/8250), [#8230](https://github.com/angular/angular.js/issues/8230)) <add>- **jQuery:** cooperate with other libraries monkey-patching jQuery.cleanData <add> ([b9389b26](https://github.com/angular/angular.js/commit/b9389b26ba2cf6aa70372fa32a7b28c62d174bf5), <add> [#8471](https://github.com/angular/angular.js/issues/8471)) <add>- **jqLite:** <add> - clone wrapNode in jqlite/wrap <add> ([77d3e754](https://github.com/angular/angular.js/commit/77d3e7544642396d868aa49b85f0c027e8057bd7), <add> [#3860](https://github.com/angular/angular.js/issues/3860), [#4194](https://github.com/angular/angular.js/issues/4194)) <add> - revert the `ready()` optimization until jQuery does the same <add> ([1bdca93d](https://github.com/angular/angular.js/commit/1bdca93d708ce9441b26d00e564210755395edf7)) <add>- **linky:** handle quotes around email addresses <add> ([a9d22712](https://github.com/angular/angular.js/commit/a9d227120dc2d433372da415a450e56b783b57a0), <add> [#8520](https://github.com/angular/angular.js/issues/8520)) <add>- **minErr:** encode btstrpd error input to strip angle brackets <add> ([0872388a](https://github.com/angular/angular.js/commit/0872388a1b88b8637fdb0fb1ebbee269bead0508), <add> [#8683](https://github.com/angular/angular.js/issues/8683)) <add>- **ngRepeat:** <add> - allow aliasAs identifiers which contain but do not match reserved words <add> ([d713ad1b](https://github.com/angular/angular.js/commit/d713ad1b6607389649fbb8d12ac103565b02a1d4), <add> [#8729](https://github.com/angular/angular.js/issues/8729)) <add> - make allowed aliasAs expressions more strict <add> ([09b29870](https://github.com/angular/angular.js/commit/09b298705f74255aff55bb7e4ba200c4200d712d), <add> [#8438](https://github.com/angular/angular.js/issues/8438), [#8440](https://github.com/angular/angular.js/issues/8440)) <add> <add> <add>## Features <add> <add>- **$compile:** <add> - use allOrNothing interpolation for ngAttr* <add> ([09de7b5d](https://github.com/angular/angular.js/commit/09de7b5db466498becb295ecf5c1d0a698b1512c), <add> [#8376](https://github.com/angular/angular.js/issues/8376), [#8399](https://github.com/angular/angular.js/issues/8399)) <add>- **benchpress:** configure benchpress grunt task <add> ([6bdaa4bc](https://github.com/angular/angular.js/commit/6bdaa4bc213805a58f51e9f5285dfe03bb06ddc3)) <add>- **jqLite:** implement the `detach` method <add> ([1a05daf5](https://github.com/angular/angular.js/commit/1a05daf5dc67813528afdb88086766dc22b6c0df), <add> [#5461](https://github.com/angular/angular.js/issues/5461)) <add>- **ngRoute:** add method for changing url params <add> ([77a1acc7](https://github.com/angular/angular.js/commit/77a1acc7fcad7a8a7d0376b33d38a8977372cfe2)) <add> <add> <add>## Performance Improvements <add> <add>- **$compile:** <add> - don't register $destroy callbacks on element-transcluded nodes <add> ([b5f7970b](https://github.com/angular/angular.js/commit/b5f7970be5950580bde4de0002a578daf3ae3aac)) <add> - refactor publicLinkFn to simplify the code and use 'for in' loop <add> ([645625cf](https://github.com/angular/angular.js/commit/645625cf349a4be57691a7bf418b2386b4c1a53d)) <add> - clone the nodeList during linking only if necessary <add> ([3e0a2e1f](https://github.com/angular/angular.js/commit/3e0a2e1f3367a5b4ae7d8de6cff559f522aacfba)) <add> - delay object initialization in nodeLinkFn <add> ([31ed0af7](https://github.com/angular/angular.js/commit/31ed0af74b0081906415dcefe5610e1217cc0c48)) <add> - optimize nodeLinkFn <add> ([35134a0e](https://github.com/angular/angular.js/commit/35134a0e237d193cd7d3995dacfdc6bf3e92635e)) <add> - optimize publicLinkFn <add> ([274e9c4d](https://github.com/angular/angular.js/commit/274e9c4ddfd64138d39fcf84047aabc3ccde2f0b)) <add>- **$interpolate:** do not keep empty separators <add> ([94b5c9f0](https://github.com/angular/angular.js/commit/94b5c9f00edff7fa631d09316ceb9c7fd4c6426a)) <add>- **$parse:** <add> - don't bind filters to a context <add> ([8863b9d0](https://github.com/angular/angular.js/commit/8863b9d04c722b278fa93c5d66ad1e578ad6eb1f)) <add> - optimize filter implementation <add> ([ece6ef47](https://github.com/angular/angular.js/commit/ece6ef479c741f17fc217d743cad64c516dbed27)) <add> - speed up fn invocation for no args case <add> ([a17578ad](https://github.com/angular/angular.js/commit/a17578ad3db5d1375aec1d601055ab718eeafd10)) <add> - speed up fn invocation by optimizing arg collection <add> ([fecfc5b0](https://github.com/angular/angular.js/commit/fecfc5b09feb7e4079364013b0beb6bf204ade2a)) <add> - use no-proto maps as caches and avoid hasOwnProperty checks <add> ([d302ea0c](https://github.com/angular/angular.js/commit/d302ea0cfade2787d7cc500398b7dcd3e4eff945)) <add> - trim expression only if string <add> ([a1341223](https://github.com/angular/angular.js/commit/a1341223c084c8188671bb8d6ea1608490b66f9f)) <add>- **$rootScope:** do not use `Function::call` when not needed <add> ([7eae29e5](https://github.com/angular/angular.js/commit/7eae29e5ab478ccb7e02fee8311f8b99ea1d165d)) <add>- **Scope:** <add> - optimize `$watchCollection` when used for watching objects <add> ([e822e906](https://github.com/angular/angular.js/commit/e822e9061c2a605649d91abbd641f757e2829275)) <add> - don't use forEach in <add> ([301463a2](https://github.com/angular/angular.js/commit/301463a2e249011d7cb696c6cf34254f8317a706)) <add> - watchCollection optimization <add> ([7d96ab0d](https://github.com/angular/angular.js/commit/7d96ab0d132d923ec3e3a212aaf9d79f1d4a02de)) <add> - exit $broadcast early if nobody is listening for the given event <add> ([a09fa356](https://github.com/angular/angular.js/commit/a09fa356416c033a52666f3becf00524ecff3a03)) <add> - use remove the need for the extra watch in $watchGroup <add> ([3f0e642e](https://github.com/angular/angular.js/commit/3f0e642eefcbbb315839c4456ba6ac029a7b8a20), <add> [#8396](https://github.com/angular/angular.js/issues/8396)) <add>- **benchpress:** add benchpress node module and port over large table test <add> ([1229334f](https://github.com/angular/angular.js/commit/1229334fbd8c778e95785d6a5e5589099ce655f7)) <add>- **isObject:** use strict comparison <add> ([d208ba25](https://github.com/angular/angular.js/commit/d208ba254442649d35f96c76bcd9e47326ec59f3)) <add>- **jqLite:** <add> - simplify jqLiteDealoc <add> ([f8f7a1df](https://github.com/angular/angular.js/commit/f8f7a1df34560222cb5d2e18d4be996f5553815a)) <add> - optimize event handler <add> ([d05f27e2](https://github.com/angular/angular.js/commit/d05f27e274c41c33eebf4fe8035715d3f6596069)) <add> - only take `str.split()` path when needed <add> ([187b1b8e](https://github.com/angular/angular.js/commit/187b1b8ef45babd86afa853dc9321cd23160096e), <add> [#8648](https://github.com/angular/angular.js/issues/8648)) <add> - optimize `off()` <add> ([abb17cce](https://github.com/angular/angular.js/commit/abb17cce8b459e4646d1c2a2428b691c3d95fb4c)) <add> - refactor jqLiteExpandoStore to minimize access to expensive element.ng339 expando property <add> ([1e8698b3](https://github.com/angular/angular.js/commit/1e8698b33e61b1a196f05f42856a2da4590a10e1)) <add> - microoptimization in chaining fn <add> ([fafbd494](https://github.com/angular/angular.js/commit/fafbd494907a8c068d79415b7ba8f42f283be521)) <add> - don't use String#split in on() unless we need it <add> ([bda673f8](https://github.com/angular/angular.js/commit/bda673f8e785f299407c8c45887f37448a0f0192)) <add> - don't check isString many times in constructor <add> ([443b521e](https://github.com/angular/angular.js/commit/443b521e22f9ec7009b913a2fe78caee0a515e87)) <add> - optimize jqLiteAcceptsData method <add> ([b493c62f](https://github.com/angular/angular.js/commit/b493c62f6b3e4288f5dee7c8b5952e088c2e3329)) <add> - optimize `append()` and `after()` <add> ([8d933bf9](https://github.com/angular/angular.js/commit/8d933bf99520fe3936e33d3ee28fd37e574b99de)) <add> - don't register DOM listener for $destroy event <add> ([6251751a](https://github.com/angular/angular.js/commit/6251751ad7bc2f3621db538edb5a9d7313a4ce6d)) <add> - optimize event listener registration <add> ([566f1015](https://github.com/angular/angular.js/commit/566f1015d27118d259e0886910d6b73b3cb0eb10)) <add> - improve createEventHandler method by switching from forEach to for loop <add> ([e9cd6dc0](https://github.com/angular/angular.js/commit/e9cd6dc055cb7bd80ae9232d8985b2bc3999135e)) <add> - don't use `forEach` in `off()` <add> ([960a8410](https://github.com/angular/angular.js/commit/960a8410515b2d7d461d7c95e8a2ca3d75129087)) <add> - don't recreate the Node.contains polyfill <add> ([d1536e7c](https://github.com/angular/angular.js/commit/d1536e7c8bf60549096138d08953a43190c7b1a6)) <add> - speed up shallowCopy and special case Attributes cloning <add> ([54fa16e4](https://github.com/angular/angular.js/commit/54fa16e45d8769ce6708a28388326db0eea53c7e)) <add>- **ngBind:** bypass jquery/jqlite when setting text <add> ([0a738ce1](https://github.com/angular/angular.js/commit/0a738ce1760f38efe45e79aa133442be09b56803)) <add>- **ngRepeat:** <add> - simplify code and remove duplicate array.length access <add> ([08eb0558](https://github.com/angular/angular.js/commit/08eb05583bf39c63fef43b4faf29c61360699c81)) <add> - optimize marking of nodes that are being removed via an animation <add> ([36e35b2c](https://github.com/angular/angular.js/commit/36e35b2cb17c5ff7c43746d9ac0a259f77ff494e)) <add> - use no-proto objects for blockMaps <add> ([13d113c5](https://github.com/angular/angular.js/commit/13d113c522f124b91a1fd8606c22bbd399abf121)) <add> - move work to compile fn <add> ([bdd853cb](https://github.com/angular/angular.js/commit/bdd853cb83839eef9901af164293611eaa23ee2c)) <add> - move updateScope fn to factory and reuse it for all repeaters <add> ([e58d65a5](https://github.com/angular/angular.js/commit/e58d65a520cfbc630cbfbc248479416777ca16b2)) <add> - clone boundary comment nodes <add> ([fbd48845](https://github.com/angular/angular.js/commit/fbd48845e0e88e9935f82fe4c9f686ad78b5d924)) <add> <add> <add>## Breaking Changes <add> <add>- **$compile:** <add> - due to [09de7b5d](https://github.com/angular/angular.js/commit/09de7b5db466498becb295ecf5c1d0a698b1512c), <add> <add> <add>Now, `ng-attr-*` will never add the attribute to the DOM if any of the interpolated expressions <add>evaluate to `undefined`. <add> <add>To work around this, initialize values which are intended to be the empty string with the <add>empty string: <add> <add>For example, given the following markup: <add> <add>```html <add><div ng-attr-style="border-radius: {{value}}{{units}}"></div> <add>``` <add> <add>If `$scope.value` is `4`, and `$scope.units` is `undefined`, the resulting markup is unchanged: <add> <add>```html <add><div ng-attr-style="border-radius: {{value}}{{units}}"></div> <add>``` <add> <add>However, if $scope.units is `""`, then the resulting markup is updated: <add> <add>```html <add><div ng-attr-style="border-radius: {{value}}{{units}}" style="border-radius: 4"></div> <add>``` <add> <add>Closes #8376 <add>Closes #8399 <add> <add> - due to [0d608d04](https://github.com/angular/angular.js/commit/0d608d041f37a659d8d8ba7a9b688e132587035d), <add> element-transcluded directives now have an extra comment automatically appended to their cloned DOM <add> <add>This comment is usually needed to keep track the end boundary in the event child directives modify the root node(s). <add>If not used for this purpose it can be safely ignored. <add> <add> - due to [75c4cbf8](https://github.com/angular/angular.js/commit/75c4cbf81fcd6d49656d3cb044e59e5fd24e0479), <add> `directive.type` was renamed to `directive.templateNamespace` <add> <add>This change is breaking only within 1.3.0-beta releases: `directive.type` was renamed to `directive.templateNamespace` <add> <add>The property name `type` was too general. <add> <add>- **$parse:** due to [8863b9d0](https://github.com/angular/angular.js/commit/8863b9d04c722b278fa93c5d66ad1e578ad6eb1f), <add> `this` in filters is now undefined and no longer the scope <add> <add>It's a bad practice for filters to have hidden dependencies, so pulling stuff from scope directly <add>is not a good idea. Scope being the filter context was never documented as public API, so we don't <add>expect that any significant code depends on this behavior. <add> <add>If an existing filter has a dependency on the scope instance, the scope reference can <add>be passed into the filter as a filter argument (this is highly discouraged for new code): <add> <add>Before: `{{ user.name | customFilter }}` <add>After: `{{ user.name | customFilter:this }}` <add> <add>- **Scope:** due to [0554c1aa](https://github.com/angular/angular.js/commit/0554c1aae49a81691154a77e70b602b0f24dca81), <add> `deregisterNotifier` callback for `$watch` is no longer available <add> <add>This API was available only in the last few 1.3 beta versions and is not <add>very useful for applications, so we don't expect that anyone will be affected <add>by this change. <add> <add>- **input:** due to [a7fb357f](https://github.com/angular/angular.js/commit/a7fb357fa122e0a056ce1de838a2dfaf1ebc2953), <add> by default, do not trim `input[type=password]` values. <add> <add>Previously, `input[type=password]` would trim values by default, and would require an explicit `ng-trim="false"` <add>to disable the trimming behaviour. After this change, `ng-trim` no longer affects `input[type=password]`, and will <add>never trim the password value. <add> <add>Closes #8250 <add>Closes #8230 <add> <add> <add> <ide> <a name="1.3.0-beta.18"></a> <ide> # 1.3.0-beta.18 spontaneous-combustion (2014-08-12) <ide>
1
Python
Python
add failed testcase for fieldvalidation
dc1c57d595c3917e3fed9076894d5fa88ec083c9
<ide><path>rest_framework/tests/serializer.py <ide> def validate_content(self, attrs, source): <ide> self.assertFalse(serializer.is_valid()) <ide> self.assertEquals(serializer.errors, {'content': [u'Test not in value']}) <ide> <add> incomplete_data = { <add> 'email': '[email protected]', <add> 'created': datetime.datetime(2012, 1, 1) <add> } <add> serializer = CommentSerializerWithFieldValidator(data=incomplete_data) <add> self.assertFalse(serializer.is_valid()) <add> self.assertEquals(serializer.errors, {'content': [u'This field is required.']}) <add> <ide> def test_bad_type_data_is_false(self): <ide> """ <ide> Data of the wrong type is not valid.
1
Javascript
Javascript
restore hyphen requirement for component lookup
5014201d48e3777b7578b2d9e44619f92cad0eae
<ide><path>packages/ember-application/tests/system/dependency_injection/custom_resolver_test.js <ide> QUnit.module("Ember.Application Dependency Injection – customResolver", { <ide> resolveTemplate(resolvable) { <ide> var resolvedTemplate = this._super(resolvable); <ide> if (resolvedTemplate) { return resolvedTemplate; } <del> return fallbackTemplate; <add> if (resolvable.fullNameWithoutType === 'application') { <add> return fallbackTemplate; <add> } else { <add> return; <add> } <ide> } <ide> }); <ide> <ide> QUnit.module("Ember.Application Dependency Injection – customResolver", { <ide> QUnit.test("a resolver can be supplied to application", function() { <ide> equal(jQuery("h1", application.rootElement).text(), "Fallback"); <ide> }); <del> <ide><path>packages/ember-htmlbars/tests/integration/component_lookup_test.js <add>import EmberView from "ember-views/views/view"; <add>import Registry from "container/registry"; <add>import compile from "ember-template-compiler/system/compile"; <add>import ComponentLookup from 'ember-views/component_lookup'; <add>//import Component from "ember-views/views/component"; <add>import { runAppend, runDestroy } from "ember-runtime/tests/utils"; <add> <add>var registry, container, view; <add> <add>QUnit.module('component - lookup', { <add> setup() { <add> registry = new Registry(); <add> container = registry.container(); <add> registry.optionsForType('component', { singleton: false }); <add> registry.optionsForType('view', { singleton: false }); <add> registry.optionsForType('template', { instantiate: false }); <add> registry.optionsForType('helper', { instantiate: false }); <add> registry.register('component-lookup:main', ComponentLookup); <add> }, <add> <add> teardown() { <add> runDestroy(container); <add> runDestroy(view); <add> registry = container = view = null; <add> } <add>}); <add> <add>QUnit.test('dashless components should not be found', function() { <add> expect(1); <add> <add> registry.register('template:components/dashless', compile('Do not render me!')); <add> <add> view = EmberView.extend({ <add> template: compile('{{dashless}}'), <add> container: container <add> }).create(); <add> <add> expectAssertion(function() { <add> runAppend(view); <add> }, /You canot use 'dashless' as a component name. Component names must contain a hyphen./); <add>}); <ide><path>packages/ember-views/lib/component_lookup.js <add>import Ember from 'ember-metal/core'; <ide> import EmberObject from "ember-runtime/system/object"; <add>import { ISNT_HELPER_CACHE } from "ember-htmlbars/system/lookup-helper"; <ide> <ide> export default EmberObject.extend({ <add> invalidName(name) { <add> var invalidName = ISNT_HELPER_CACHE.get(name); <add> <add> if (invalidName) { <add> Ember.assert(`You canot use '${name}' as a component name. Component names must contain a hyphen.`); <add> } <add> }, <add> <ide> lookupFactory(name, container) { <ide> <ide> container = container || this.container; <ide> export default EmberObject.extend({ <ide> }, <ide> <ide> componentFor(name, container) { <add> if (this.invalidName(name)) { <add> return; <add> } <add> <ide> var fullName = 'component:' + name; <ide> return container.lookupFactory(fullName); <ide> }, <ide> <ide> layoutFor(name, container) { <add> if (this.invalidName(name)) { <add> return; <add> } <add> <ide> var templateFullName = 'template:components/' + name; <ide> return container.lookup(templateFullName); <ide> }
3
Javascript
Javascript
remove unused requires in eslint config
5b30f6ebfc2392603cdf0b225d112b2f4f26cc98
<ide><path>.eslintrc.js <del>var fs = require('fs'); <del>var path = require('path'); <del> <del>var options = { <add>module.exports = { <ide> root: true, <ide> parserOptions: { <ide> ecmaVersion: 6, <ide> var options = { <ide> 'comma-dangle': 'off', <ide> }, <ide> }; <del> <del>module.exports = options;
1
Javascript
Javascript
fix an broken question
82cfdb88facd946926a40822b6939737e0ebddc4
<ide><path>lib/_debugger.js <ide> Interface.prototype.yesNoQuestion = function(prompt, cb) { <ide> cb(false); <ide> } else { <ide> console.log('Please answer y or n.'); <del> self.restartQuestion(cb); <add> self.yesNoQuestion(prompt, cb); <ide> } <ide> }); <ide> };
1
Javascript
Javascript
update externalmodule.js for umd2
e044598a98aca665726916fe90b6026f52401490
<ide><path>lib/ExternalModule.js <ide> ExternalModule.prototype.source = function(dependencyTemplates, outputOptions, r <ide> str = "module.exports = require(" + JSON.stringify(request[0]) + ")" + request.slice(1).map(function(r) { <ide> return "[" + JSON.stringify(r) + "]"; <ide> }).join("") + ";"; <del> } else <add> } else <ide> str = "module.exports = require(" + JSON.stringify(request) + ");"; <ide> break; <ide> case "amd": <ide> case "umd": <add> case "umd2": <ide> str = ""; <ide> if(this.optional) { <ide> str += "if(typeof __WEBPACK_EXTERNAL_MODULE_" + this.id + "__ === 'undefined') {" + WebpackMissingModule.moduleCode(request) + "}\n"; <ide> ExternalModule.prototype.source = function(dependencyTemplates, outputOptions, r <ide> <ide> ExternalModule.prototype.size = function() { <ide> return 42; <del>}; <ide>\ No newline at end of file <add>};
1
Ruby
Ruby
remove double scoping blocks and just use one
ce5f27b04b1ff25eed520a3d06b3b9c150536e21
<ide><path>actionpack/lib/action_dispatch/routing/mapper.rb <ide> def resource(*resources, &block) <ide> return self <ide> end <ide> <del> controller(resource.controller) do <del> namespace(resource.name) do <del> with_scope_level(:resource, resource) do <del> yield if block_given? <del> <del> get "", :to => :show, :as => resource.member_name <del> post "", :to => :create <del> put "", :to => :update <del> delete "", :to => :destroy <del> get "new", :to => :new, :as => "new_#{resource.singular}" <del> get "edit", :to => :edit, :as => "edit_#{resource.singular}" <del> end <add> scope(:path => resource.name, :controller => resource.controller) do <add> with_scope_level(:resource, resource) do <add> yield if block_given? <add> <add> get "", :to => :show, :as => resource.member_name <add> post "", :to => :create <add> put "", :to => :update <add> delete "", :to => :destroy <add> get "new", :to => :new, :as => "new_#{resource.singular}" <add> get "edit", :to => :edit, :as => "edit_#{resource.singular}" <ide> end <ide> end <ide> <ide> def resources(*resources, &block) <ide> return self <ide> end <ide> <del> controller(resource.controller) do <del> namespace(resource.name) do <del> with_scope_level(:resources, resource) do <del> yield if block_given? <add> scope(:path => resource.name, :controller => resource.controller) do <add> with_scope_level(:resources, resource) do <add> yield if block_given? <ide> <del> with_scope_level(:collection) do <del> get "", :to => :index, :as => resource.collection_name <del> post "", :to => :create <del> get "new", :to => :new, :as => "new_#{resource.singular}" <del> end <add> with_scope_level(:collection) do <add> get "", :to => :index, :as => resource.collection_name <add> post "", :to => :create <add> get "new", :to => :new, :as => "new_#{resource.singular}" <add> end <ide> <del> with_scope_level(:member) do <del> scope(":id") do <del> get "", :to => :show, :as => resource.member_name <del> put "", :to => :update <del> delete "", :to => :destroy <del> get "edit", :to => :edit, :as => "edit_#{resource.singular}" <del> end <add> with_scope_level(:member) do <add> scope(":id") do <add> get "", :to => :show, :as => resource.member_name <add> put "", :to => :update <add> delete "", :to => :destroy <add> get "edit", :to => :edit, :as => "edit_#{resource.singular}" <ide> end <ide> end <ide> end
1
Javascript
Javascript
use old new buffer api for node 4
16a5831620e0f3c3fbba51c704952f827b040706
<ide><path>lib/optimize/ConcatenatedModule.js <ide> class ConcatenatedModule extends Module { <ide> exportName = true; <ide> } else { <ide> const exportData = match[2]; <del> exportName = Buffer.from(exportData, "hex").toString("utf-8"); <add> exportName = new Buffer(exportData, "hex").toString("utf-8"); // eslint-disable-line node/no-deprecated-api <ide> } <ide> const asCall = !!match[3]; <ide> const finalName = getFinalName(referencedModule, exportName, moduleToInfoMap, requestShortener, asCall); <ide> class HarmonyImportSpecifierDependencyConcatenatedTemplate { <ide> } else if(dep.namespaceObjectAsContext) { <ide> content = `__WEBPACK_MODULE_REFERENCE__${info.index}_ns__[${JSON.stringify(dep.id)}]`; <ide> } else { <del> const exportData = Buffer.from(dep.id, "utf-8").toString("hex"); <add> const exportData = new Buffer(dep.id, "utf-8").toString("hex"); // eslint-disable-line node/no-deprecated-api <ide> content = `__WEBPACK_MODULE_REFERENCE__${info.index}_${exportData}${dep.call ? "_call" : ""}__`; <ide> } <ide> if(dep.shorthand) { <ide> class HarmonyExportImportedSpecifierDependencyConcatenatedTemplate { <ide> if(def.id === true) { <ide> finalName = `__WEBPACK_MODULE_REFERENCE__${info.index}_ns__`; <ide> } else { <del> const exportData = Buffer.from(def.id, "utf-8").toString("hex"); <add> const exportData = new Buffer(def.id, "utf-8").toString("hex"); // eslint-disable-line node/no-deprecated-api <ide> finalName = `__WEBPACK_MODULE_REFERENCE__${info.index}_${exportData}__`; <ide> } <ide> const exportsName = this.rootModule.exportsArgument || "exports";
1
Java
Java
introduce isnotempty() aliases in stringutils
47f8d435e621163f56727891bb2e874e76e1f9b8
<ide><path>spring-core/src/main/java/org/springframework/util/StringUtils.java <ide> * @author Rob Harrop <ide> * @author Rick Evans <ide> * @author Arjen Poutsma <add> * @author Sam Brannen <ide> * @since 16 April 2001 <ide> */ <ide> public abstract class StringUtils { <ide> public abstract class StringUtils { <ide> * Objects since attributes may e.g. be primitive value objects as well. <ide> * @param str the candidate String <ide> * @since 3.2.1 <add> * @see #isNotEmpty(CharSequence) <add> * @see #isNotEmpty(String) <ide> */ <ide> public static boolean isEmpty(Object str) { <ide> return (str == null || "".equals(str)); <ide> } <ide> <ide> /** <del> * Check that the given CharSequence is neither {@code null} nor of length 0. <del> * Note: Will return {@code true} for a CharSequence that purely consists of whitespace. <add> * Check that the given {@code CharSequence} is not empty (i.e., neither <add> * {@code null} nor of length 0). <add> * <p>This method is an alias for {@link #hasLength(CharSequence)}. <add> * <p>Note: this method returns {@code true} for a {@code CharSequence} <add> * that purely consists of whitespace. <add> * @param str the {@code CharSequence} to check (may be {@code null}) <add> * @return {@code true} if the {@code CharSequence} is not {@code null} and has length <add> * @see #hasText(CharSequence) <add> * @since 4.2 <add> */ <add> public static boolean isNotEmpty(CharSequence str) { <add> return hasLength(str); <add> } <add> <add> /** <add> * Check that the given {@code String} is not empty (i.e., neither <add> * {@code null} nor of length 0). <add> * <p>This method is an alias for {@link #hasLength(String)}. <add> * <p>Note: this method returns {@code true} for a {@code String} that <add> * purely consists of whitespace. <add> * @param str the {@code String} to check (may be {@code null}) <add> * @return {@code true} if the {@code String} is not {@code null} and has length <add> * @see #hasText(String) <add> * @since 4.2 <add> */ <add> public static boolean isNotEmpty(String str) { <add> return hasLength(str); <add> } <add> <add> /** <add> * Check that the given {@code CharSequence} is neither {@code null} nor <add> * of length 0. <add> * <p>Note: this method returns {@code true} for a {@code CharSequence} <add> * that purely consists of whitespace. <ide> * <p><pre class="code"> <ide> * StringUtils.hasLength(null) = false <ide> * StringUtils.hasLength("") = false
1
Python
Python
fix more lint issues
4ad7558fd056c13f2df1c3213cf95e50fca5df13
<ide><path>libcloud/compute/drivers/kubevirt.py <ide> from libcloud.container.drivers.kubernetes import VALID_RESPONSE_CODES <ide> <ide> from libcloud.common.base import KeyCertificateConnection, ConnectionKey <del>from libcloud.common.types import InvalidCredsError, ProviderError <add>from libcloud.common.types import InvalidCredsError, LibcloudError <ide> <ide> from libcloud.compute.types import Provider, NodeState <ide> from libcloud.compute.base import NodeDriver, NodeSize, Node <ide> def create_node(self, name, image, location=None, ex_memory=128, ex_cpu=1, <ide> if disk_type == "persistentVolumeClaim": <ide> if 'claim_name' in disk: <ide> claimName = disk['claim_name'] <del> if claimName not in self.list_persistent_volume_claims( <add> if claimName not in self.ex_list_persistent_volume_claims( <ide> namespace=namespace <ide> ): <ide> if ('size' not in disk or "storage_class_name" <ide> def attach_volume(self, node, volume, device='disk', <ide> if not volume.extra['is_bound']: <ide> volume = self._bind_volume(volume, node.extra['namespace']) <ide> if volume is None: <del> raise ProviderError("Selected Volume (PV) could not be bound " <del> "(to a PVC), please select another volume") <add> raise LibcloudError("Selected Volume (PV) could not be bound " <add> "(to a PVC), please select another volume", <add> driver=self) <ide> <ide> claimName = volume.extra['pvc']['name'] <ide> if ex_name is None:
1
Javascript
Javascript
fix failing offset tests
fad0e221818f0a78550584b3c0a16a6808ecd921
<ide><path>test/unit/offset.js <ide> testIframe("offset/static", "static", function( $ ) { <ide> }); <ide> <ide> testIframe("offset/fixed", "fixed", function( $ ) { <add> // IE is collapsing the top margin of 1px; detect and adjust accordingly <add> var ie = $("#fixed-1").position().top === 2; <add> <ide> expect(34); <ide> <ide> var tests = [ <del> { "id": "#fixed-1", "offsetTop": 1001, "offsetLeft": 1001, "positionTop": 0, "positionLeft": 0 }, <del> { "id": "#fixed-2", "offsetTop": 1021, "offsetLeft": 1021, "positionTop": 20, "positionLeft": 20 } <add> { <add> "id": "#fixed-1", <add> "offsetTop": 1001, <add> "offsetLeft": 1001, <add> "positionTop": ie ? 2 : 0, <add> "positionLeft": ie ? 2 : 0 <add> }, <add> { <add> "id": "#fixed-2", <add> "offsetTop": 1021, <add> "offsetLeft": 1021, <add> "positionTop": ie ? 22 : 20, <add> "positionLeft": ie ? 22 : 20 <add> } <ide> ]; <ide> <ide> jQuery.each( tests, function() { <ide> testIframe("offset/fixed", "fixed", function( $ ) { <ide> // need to have same number of assertions <ide> ok( true, "Fixed position is not supported" ); <ide> ok( true, "Fixed position is not supported" ); <add> ok( true, "Fixed position is not supported" ); <add> ok( true, "Fixed position is not supported" ); <ide> } <ide> }); <ide>
1
Javascript
Javascript
fix message update in modulerestoreerror
59250743bae013f2050aa2c4c3a98bad9426a9bc
<ide><path>lib/ModuleRestoreError.js <ide> class ModuleRestoreError extends WebpackError { <ide> message += err; <ide> } <ide> } else { <del> message = String(err); <add> message += String(err); <ide> } <ide> <ide> super(message);
1
Javascript
Javascript
reset player ui on player#reset
175f77325346af9ec5fa7f090eba8be2076a54e2
<ide><path>src/js/control-bar/time-controls/remaining-time-display.js <ide> class RemainingTimeDisplay extends TimeDisplay { <ide> * @listens Player#durationchange <ide> */ <ide> updateContent(event) { <del> if (!this.player_.duration()) { <add> if (typeof this.player_.duration() !== 'number') { <ide> return; <ide> } <ide> <ide><path>src/js/player.js <ide> class Player extends Component { <ide> this.poster(''); <ide> this.loadTech_(this.options_.techOrder[0], null); <ide> this.techCall_('reset'); <add> this.resetControlBarUI_(); <ide> if (isEvented(this)) { <ide> this.trigger('playerreset'); <ide> } <ide> } <ide> <add> /** <add> * Reset Control Bar's UI by calling sub-methods that reset <add> * all of Control Bar's components <add> */ <add> resetControlBarUI_() { <add> this.resetProgressBar_(); <add> this.resetPlaybackRate_(); <add> this.resetVolumeBar_(); <add> } <add> <add> /** <add> * Reset tech's progress so progress bar is reset in the UI <add> */ <add> resetProgressBar_() { <add> this.currentTime(0); <add> this.controlBar.durationDisplay.updateContent(); <add> this.controlBar.remainingTimeDisplay.updateContent(); <add> } <add> <add> /** <add> * Reset Playback ratio <add> */ <add> resetPlaybackRate_() { <add> this.playbackRate(this.defaultPlaybackRate()); <add> this.handleTechRateChange_(); <add> } <add> <add> /** <add> * Reset Volume bar <add> */ <add> resetVolumeBar_() { <add> this.volume(1.0); <add> this.trigger('volumechange'); <add> } <add> <ide> /** <ide> * Returns all of the current source objects. <ide> * <ide><path>test/unit/player.test.js <ide> QUnit.test('player#reset loads the Html5 tech and then techCalls reset', functio <ide> techCall_(method) { <ide> techCallMethod = method; <ide> }, <add> resetControlBarUI_() {}, <ide> poster() {} <ide> }; <ide> <ide> QUnit.test('player#reset loads the first item in the techOrder and then techCall <ide> techCall_(method) { <ide> techCallMethod = method; <ide> }, <add> resetControlBarUI_() {}, <ide> poster() {} <ide> }; <ide> <ide><path>test/unit/reset-ui.test.js <add>/* eslint-env qunit */ <add>import TestHelpers from './test-helpers.js'; <add>QUnit.module('player reset-ui'); <add> <add>QUnit.test('Calling resetProgressBar player method should place progress bar at 0% width', function(assert) { <add> const player = TestHelpers.makePlayer(); <add> <add> player.currentTime(20); <add> player.trigger('timeupdate'); <add> player.resetProgressBar_(); <add> assert.equal( <add> player.controlBar.progressControl.seekBar.playProgressBar.el().offsetWidth, 0, <add> 'progress bar is reset to width 0%' <add> ); <add> assert.equal( <add> player.currentTime(), 0, <add> 'player current time is 0' <add> ); <add> player.dispose(); <add>}); <add> <add>QUnit.test('Calling resetPlaybackRate player method should place play rate at 1x', function(assert) { <add> const player = TestHelpers.makePlayer({techOrder: ['html5']}); <add> <add> player.playbackRate(2); <add> player.handleTechRateChange_(); <add> player.resetPlaybackRate_(); <add> const defaultRate = player.defaultPlaybackRate(); <add> <add> assert.equal( <add> player.controlBar.playbackRateMenuButton.labelEl_.textContent, defaultRate + 'x', <add> 'Playback rate is the default one on the UI' <add> ); <add> assert.equal( <add> player.playbackRate(), defaultRate, <add> 'Playback rate is the default one on the player object' <add> ); <add> player.dispose(); <add>}); <add> <add>QUnit.test('Calling resetVolumeBar player method should reset volume bar', function(assert) { <add> const player = TestHelpers.makePlayer({ techOrder: ['html5'] }); <add> <add> player.volume(0.5); <add> <add> player.trigger('volumechange'); <add> <add> assert.equal(player.controlBar.volumePanel.volumeControl.volumeBar.el_.getAttribute('aria-valuenow'), 50, 'UI value of VolumeBar is 50'); <add> <add> player.resetVolumeBar_(); <add> <add> assert.equal(player.controlBar.volumePanel.volumeControl.volumeBar.el_.getAttribute('aria-valuenow'), 100, 'UI value of VolumeBar is 100'); <add> <add> player.dispose(); <add>});
4
PHP
PHP
check the instance of the thrown exception
c71213673fff3a83471ade4535087c77d2d2f4d9
<ide><path>tests/Integration/Http/ThrottleRequestsTest.php <ide> use Orchestra\Testbench\TestCase; <ide> use Illuminate\Support\Facades\Route; <ide> use Illuminate\Routing\Middleware\ThrottleRequests; <add>use Illuminate\Http\Exceptions\ThrottleRequestsException; <ide> <ide> /** <ide> * @group integration <ide> public function test_lock_opens_immediately_after_decay() <ide> try { <ide> $this->withoutExceptionHandling()->get('/'); <ide> } catch (Throwable $e) { <add> $this->assertTrue($e instanceof ThrottleRequestsException); <ide> $this->assertEquals(429, $e->getStatusCode()); <ide> $this->assertEquals(2, $e->getHeaders()['X-RateLimit-Limit']); <ide> $this->assertEquals(0, $e->getHeaders()['X-RateLimit-Remaining']);
1
Ruby
Ruby
remove example file
703a2e8da1d3a68b3902d9024c6f0d1cd32435aa
<ide><path>activemodel/examples/validations.rb <del>require File.expand_path('../../../load_paths', __FILE__) <del>require 'active_model' <del> <del>class Person <del> include ActiveModel::Conversion <del> include ActiveModel::Validations <del> <del> validates :name, presence: true <del> <del> attr_accessor :name <del> <del> def initialize(attributes = {}) <del> @name = attributes[:name] <del> end <del> <del> def persist <del> @persisted = true <del> end <del> <del> def persisted? <del> @persisted <del> end <del>end <del> <del>person1 = Person.new <del>p person1.valid? # => false <del>p person1.errors.messages # => {:name=>["can't be blank"]} <del> <del>person2 = Person.new(name: 'matz') <del>p person2.valid? # => true
1
Javascript
Javascript
fix regexp in managed paths
adea157a9139079379ecbf420687b0fef4c2b7f7
<ide><path>lib/config/defaults.js <ide> const applySnapshotDefaults = (snapshot, { production, futureDefaults }) => { <ide> return [path.resolve(match[1], "unplugged")]; <ide> } <ide> } else { <del> const match = /^(.+?[\\/]node_modules)[\\/]/.exec( <add> const match = /^(.+?[\\/]node_modules[\\/])/.exec( <ide> // eslint-disable-next-line node/no-extraneous-require <ide> require.resolve("watchpack") <ide> ); <ide><path>test/Defaults.unittest.js <ide> describe("Defaults", () => { <ide> }, <ide> "immutablePaths": Array [], <ide> "managedPaths": Array [ <del> "<cwd>/node_modules", <add> "<cwd>/node_modules/", <ide> ], <ide> "module": Object { <ide> "timestamp": true, <ide> describe("Defaults", () => { <ide> - "hashFunction": "md4", <ide> + "hashFunction": "xxhash64", <ide> <del> - "<cwd>/node_modules", <add> - "<cwd>/node_modules/", <ide> + /^(.+?[\\\\/]node_modules[\\\\/])/, <ide> `) <ide> );
2
Ruby
Ruby
fix typo in xquartz comment
82ecc01ac0944e1c9071352b2ef2f3b4fcb36d6c
<ide><path>Library/Homebrew/macos/xquartz.rb <ide> def provided_by_apple? <ide> end <ide> <ide> # This should really be private, but for compatibility reasons it must <del> # remain public. New code should use MacOS::XQuartz.{bin,lib,include} <add> # remain public. New code should use MacOS::X11.{bin,lib,include} <ide> # instead, as that accounts for Xcode-only systems. <ide> def prefix <ide> @prefix ||= if Pathname.new('/opt/X11/lib/libpng.dylib').exist?
1
Python
Python
reformat the code for pep8
697d8d0bb502183e61919d3a5a6119cbe577e072
<ide><path>libcloud/common/google.py <ide> def parse_body(self): <ide> body = self.body <ide> json_error = True <ide> <del> if self.status in [httplib.OK, httplib.CREATED, httplib.ACCEPTED, httplib.CONFLICT]: <add> valid_http_codes = [ <add> httplib.OK, <add> httplib.CREATED, <add> httplib.ACCEPTED, <add> httplib.CONFLICT, <add> ] <add> if self.status in valid_http_codes: <ide> if json_error: <ide> raise JsonParseError(body, self.status, None) <ide> elif 'error' in body: <ide> (code, message) = self._get_error(body) <ide> if code == 'QUOTA_EXCEEDED': <ide> raise QuotaExceededError(message, self.status, code) <del> elif (code == 'RESOURCE_ALREADY_EXISTS' or code == 'alreadyExists'): <add> elif code == 'RESOURCE_ALREADY_EXISTS': <add> raise ResourceExistsError(message, self.status, code) <add> elif code == 'alreadyExists': <ide> raise ResourceExistsError(message, self.status, code) <ide> elif code.startswith('RESOURCE_IN_USE'): <ide> raise ResourceInUseError(message, self.status, code)
1
Go
Go
replace consul with boltdb in test
25594c33b949fb21ec8ce48bb926279db1e802c1
<ide><path>libnetwork/drivers/overlay/overlay_test.go <ide> import ( <ide> "fmt" <ide> "net" <ide> "os" <add> "path/filepath" <ide> "syscall" <ide> "testing" <ide> "time" <ide> <del> "golang.org/x/sys/unix" <del> <ide> "github.com/docker/docker/libnetwork/datastore" <ide> "github.com/docker/docker/libnetwork/discoverapi" <ide> "github.com/docker/docker/libnetwork/driverapi" <ide> "github.com/docker/docker/libnetwork/netlabel" <ide> "github.com/docker/docker/pkg/plugingetter" <del> "github.com/docker/libkv/store/consul" <add> "github.com/docker/libkv/store" <add> "github.com/docker/libkv/store/boltdb" <ide> "github.com/vishvananda/netlink/nl" <add> "golang.org/x/sys/unix" <ide> ) <ide> <ide> func init() { <del> consul.Register() <add> boltdb.Register() <ide> } <ide> <ide> type driverTester struct { <ide> const testNetworkType = "overlay" <ide> func setupDriver(t *testing.T) *driverTester { <ide> dt := &driverTester{t: t} <ide> config := make(map[string]interface{}) <add> <add> tmp, err := os.CreateTemp(t.TempDir(), "libnetwork-") <add> if err != nil { <add> t.Fatalf("Error creating temp file: %v", err) <add> } <add> err = tmp.Close() <add> if err != nil { <add> t.Fatalf("Error closing temp file: %v", err) <add> } <add> defaultPrefix := filepath.Join(os.TempDir(), "libnetwork", "test", "overlay") <add> <ide> config[netlabel.GlobalKVClient] = discoverapi.DatastoreConfigData{ <ide> Scope: datastore.GlobalScope, <del> Provider: "consul", <del> Address: "127.0.0.01:8500", <add> Provider: "boltdb", <add> Address: filepath.Join(defaultPrefix, filepath.Base(tmp.Name())), <add> Config: &store.Config{ <add> Bucket: "libnetwork", <add> ConnectionTimeout: 3 * time.Second, <add> }, <ide> } <ide> <ide> if err := Init(dt, config); err != nil {
1
PHP
PHP
unskip a secured form test
1a9b98951a453a6528d4e69f0643fdaa147aaaa2
<ide><path>src/View/Helper/FormHelper.php <ide> protected function _magicOptions($fieldName, $options, $allowOverride) { <ide> $fieldDef = $context->attributes($fieldName); <ide> <ide> if ($options['type'] === 'number' && !isset($options['step'])) { <del> if ($type === 'decimal') { <del> $decimalPlaces = substr($fieldDef['length'], strpos($fieldDef['length'], ',') + 1); <add> if ($type === 'decimal' && isset($fieldDef['precision'])) { <add> $decimalPlaces = $fieldDef['precision']; <ide> $options['step'] = sprintf('%.' . $decimalPlaces . 'F', pow(10, -1 * $decimalPlaces)); <ide> } elseif ($type === 'float') { <ide> $options['step'] = 'any'; <ide> public function hidden($fieldName, $options = array()) { <ide> )); <ide> <ide> if ($secure === true) { <del> $this->_secure(true, $this->_secureFieldName($options), $options['val']); <add> $this->_secure(true, $this->_secureFieldName($options), (string)$options['val']); <ide> } <ide> <ide> $options['type'] = 'hidden'; <ide><path>tests/TestCase/View/Helper/FormHelperTest.php <ide> public function testFormSecureWithCustomNameAttribute() { <ide> * @return void <ide> */ <ide> public function testFormSecuredInput() { <del> $this->markTestIncomplete('Need to revisit once models work again.'); <ide> $this->Form->request->params['_csrfToken'] = 'testKey'; <add> $this->Form->request->params['_Token'] = 'stuff'; <add> $this->article['schema'] = [ <add> 'ratio' => ['type' => 'decimal', 'length' => 5, 'precision' => 6], <add> 'population' => ['type' => 'decimal', 'length' => 15, 'precision' => 0], <add> ]; <ide> <del> $result = $this->Form->create('Contact', array('url' => '/contacts/add')); <add> $result = $this->Form->create($this->article, array('url' => '/articles/add')); <ide> $encoding = strtolower(Configure::read('App.encoding')); <ide> $expected = array( <del> 'form' => array('method' => 'post', 'action' => '/contacts/add', 'accept-charset' => $encoding, 'id' => 'ContactAddForm'), <add> 'form' => array('method' => 'post', 'action' => '/articles/add', 'accept-charset' => $encoding), <ide> 'div' => array('style' => 'display:none;'), <ide> array('input' => array('type' => 'hidden', 'name' => '_method', 'value' => 'POST')), <ide> array('input' => array( <del> 'type' => 'hidden', 'name' => '_csrfToken', <del> 'value' => 'testKey', 'id' => 'preg:/Token\d+/' <add> 'type' => 'hidden', <add> 'name' => '_csrfToken', <add> 'value' => 'testKey' <ide> )), <ide> '/div' <ide> ); <ide> $this->assertTags($result, $expected); <ide> <del> $result = $this->Form->input('ValidateUser.ratio'); <add> $result = $this->Form->input('ratio'); <ide> $expected = array( <ide> 'div' => array('class'), <ide> 'label' => array('for'), <ide> public function testFormSecuredInput() { <ide> ); <ide> $this->assertTags($result, $expected); <ide> <del> $result = $this->Form->input('ValidateUser.population'); <add> $result = $this->Form->input('population'); <ide> $expected = array( <ide> 'div' => array('class'), <ide> 'label' => array('for'), <ide> public function testFormSecuredInput() { <ide> ); <ide> $this->assertTags($result, $expected); <ide> <del> $result = $this->Form->input('UserForm.published', array('type' => 'text')); <add> $result = $this->Form->input('published', array('type' => 'text')); <ide> $expected = array( <ide> 'div' => array('class' => 'input text'), <del> 'label' => array('for' => 'UserFormPublished'), <add> 'label' => array('for' => 'published'), <ide> 'Published', <ide> '/label', <ide> array('input' => array( <del> 'type' => 'text', 'name' => 'UserForm[published]', <del> 'id' => 'UserFormPublished' <add> 'type' => 'text', <add> 'name' => 'published', <add> 'id' => 'published' <ide> )), <ide> '/div' <ide> ); <ide> $this->assertTags($result, $expected); <ide> <del> $result = $this->Form->input('UserForm.other', array('type' => 'text')); <add> $result = $this->Form->input('other', array('type' => 'text')); <ide> $expected = array( <ide> 'div' => array('class' => 'input text'), <del> 'label' => array('for' => 'UserFormOther'), <add> 'label' => array('for' => 'other'), <ide> 'Other', <ide> '/label', <ide> array('input' => array( <del> 'type' => 'text', 'name' => 'UserForm[other]', <del> 'id' => 'UserFormOther' <add> 'type' => 'text', <add> 'name' => 'other', <add> 'id', <ide> )), <ide> '/div' <ide> ); <ide> $this->assertTags($result, $expected); <ide> <del> $result = $this->Form->hidden('UserForm.stuff'); <add> $result = $this->Form->hidden('stuff'); <ide> $expected = array( <ide> 'input' => array( <del> 'type' => 'hidden', 'name' => 'UserForm[stuff]', <del> 'id' => 'UserFormStuff' <add> 'type' => 'hidden', <add> 'name' => 'stuff', <ide> )); <ide> $this->assertTags($result, $expected); <ide> <del> $result = $this->Form->hidden('UserForm.hidden', array('value' => '0')); <add> $result = $this->Form->hidden('hidden', array('value' => '0')); <ide> $expected = array('input' => array( <del> 'type' => 'hidden', 'name' => 'UserForm[hidden]', <del> 'value' => '0', 'id' => 'UserFormHidden' <add> 'type' => 'hidden', <add> 'name' => 'hidden', <add> 'value' => '0' <ide> )); <ide> $this->assertTags($result, $expected); <ide> <del> $result = $this->Form->input('UserForm.something', array('type' => 'checkbox')); <add> $result = $this->Form->input('something', array('type' => 'checkbox')); <ide> $expected = array( <ide> 'div' => array('class' => 'input checkbox'), <ide> array('input' => array( <del> 'type' => 'hidden', 'name' => 'UserForm[something]', <del> 'value' => '0', 'id' => 'UserFormSomething_' <add> 'type' => 'hidden', <add> 'name' => 'something', <add> 'value' => '0' <ide> )), <ide> array('input' => array( <del> 'type' => 'checkbox', 'name' => 'UserForm[something]', <del> 'value' => '1', 'id' => 'UserFormSomething' <add> 'type' => 'checkbox', <add> 'name' => 'something', <add> 'value' => '1', <add> 'id' => 'something' <ide> )), <del> 'label' => array('for' => 'UserFormSomething'), <add> 'label' => array('for' => 'something'), <ide> 'Something', <ide> '/label', <ide> '/div' <ide> public function testFormSecuredInput() { <ide> <ide> $result = $this->Form->fields; <ide> $expected = array( <del> 'UserForm.published', 'UserForm.other', 'UserForm.stuff' => '', <del> 'UserForm.hidden' => '0', 'UserForm.something' <add> 'ratio', 'population', 'published', 'other', <add> 'stuff' => '', <add> 'hidden' => '0', <add> 'something' <ide> ); <ide> $this->assertEquals($expected, $result); <ide> <del> $hash = 'bd7c4a654e5361f9a433a43f488ff9a1065d0aaf%3AUserForm.hidden%7CUserForm.stuff'; <del> <ide> $result = $this->Form->secure($this->Form->fields); <ide> $expected = array( <ide> 'div' => array('style' => 'display:none;'), <ide> array('input' => array( <del> 'type' => 'hidden', 'name' => '_Token[fields]', <del> 'value' => $hash <add> 'type' => 'hidden', <add> 'name' => '_Token[fields]', <add> 'value' <ide> )), <ide> array('input' => array( <del> 'type' => 'hidden', 'name' => '_Token[unlocked]', <del> 'value' => '', 'id' => 'preg:/TokenUnlocked\d+/' <add> 'type' => 'hidden', <add> 'name' => '_Token[unlocked]', <add> 'value' => '' <ide> )), <ide> '/div' <ide> );
2
Javascript
Javascript
reset test results before execution
43a4fa8cb03beb26c786655fd48c9b50aa300aa4
<ide><path>client/src/templates/Challenges/redux/execute-challenge-saga.js <ide> export function* executeChallengeSaga() { <ide> } <ide> <ide> const consoleProxy = yield channel(); <add> <ide> try { <ide> yield put(initLogs()); <ide> yield put(initConsole('// running tests')); <add> // reset tests to initial state <add> const tests = (yield select(challengeTestsSelector)).map( <add> ({ text, testString }) => ({ text, testString }) <add> ); <add> yield put(updateTests(tests)); <add> <ide> yield fork(logToConsole, consoleProxy); <ide> const proxyLogger = args => consoleProxy.put(args); <ide> <ide> export function* executeChallengeSaga() { <ide> proxyLogger, <ide> document <ide> ); <del> const testResults = yield executeTests(testRunner); <add> const testResults = yield executeTests(testRunner, tests); <ide> <ide> yield put(updateTests(testResults)); <ide> yield put(updateConsole('// tests completed')); <ide> function* buildChallengeData(challengeData) { <ide> } <ide> } <ide> <del>function* executeTests(testRunner) { <del> const tests = yield select(challengeTestsSelector); <del> const testTimeout = 5000; <add>function* executeTests(testRunner, tests, testTimeout = 5000) { <ide> const testResults = []; <ide> for (const { text, testString } of tests) { <ide> const newTest = { text, testString };
1
Text
Text
remove default parameter value from header
55d9833ebf038f100fe14f9d75a81b082eeaf048
<ide><path>doc/api/cluster.md <ide> if (cluster.isMaster) { <ide> } <ide> ``` <ide> <del>### `worker.kill([signal='SIGTERM'])` <add>### `worker.kill([signal])` <ide> <!-- YAML <ide> added: v0.9.12 <ide> --> <ide> <ide> * `signal` {string} Name of the kill signal to send to the worker <del> process. <add> process. **Default**: `'SIGTERM'` <ide> <ide> This function will kill the worker. In the master, it does this by disconnecting <ide> the `worker.process`, and once disconnected, killing with `signal`. In the
1
Python
Python
add support for texture folder
932ee2d443e2661103629d82dac4ab936796275d
<ide><path>utils/exporters/blender/addons/io_three/__init__.py <ide> EnumProperty, <ide> BoolProperty, <ide> FloatProperty, <del> IntProperty <add> IntProperty, <add> StringProperty <ide> ) <ide> <ide> from . import constants <ide> bl_info = { <ide> 'name': "Three.js Format", <ide> 'author': "repsac, mrdoob, yomotsu, mpk, jpweeks", <del> 'version': (1, 2, 3), <add> 'version': (1, 2, 2), <ide> 'blender': (2, 7, 3), <ide> 'location': "File > Export", <ide> 'description': "Export Three.js formatted JSON files.", <ide> def save_settings_export(properties): <ide> constants.COMPRESSION: properties.option_compression, <ide> constants.INDENT: properties.option_indent, <ide> constants.COPY_TEXTURES: properties.option_copy_textures, <add> constants.TEXTURE_FOLDER: properties.option_texture_folder, <ide> <ide> constants.SCENE: properties.option_export_scene, <ide> #constants.EMBED_GEOMETRY: properties.option_embed_geometry, <ide> def restore_settings_export(properties): <ide> constants.COPY_TEXTURES, <ide> constants.EXPORT_OPTIONS[constants.COPY_TEXTURES]) <ide> <add> properties.option_texture_folder = settings.get( <add> constants.TEXTURE_FOLDER, <add> constants.EXPORT_OPTIONS[constants.TEXTURE_FOLDER]) <add> <ide> properties.option_embed_animation = settings.get( <ide> constants.EMBED_ANIMATION, <ide> constants.EXPORT_OPTIONS[constants.EMBED_ANIMATION]) <ide> class ExportThree(bpy.types.Operator, ExportHelper): <ide> description="Copy textures", <ide> default=constants.EXPORT_OPTIONS[constants.COPY_TEXTURES]) <ide> <add> option_texture_folder = StringProperty(name="Texture folder", <add> description="add this folder to textures path", default="") <add> <ide> option_lights = BoolProperty( <ide> name="Lights", <ide> description="Export default scene lights", <ide> def execute(self, context): <ide> raise Exception("filename not set") <ide> <ide> settings = save_settings_export(self.properties) <del> settings['addon_version'] = bl_info['version'] <ide> <ide> filepath = self.filepath <ide> if settings[constants.COMPRESSION] == constants.MSGPACK: <ide> def draw(self, context): <ide> row = layout.row() <ide> row.prop(self.properties, 'option_copy_textures') <ide> <add> row = layout.row() <add> row.prop(self.properties, "option_texture_folder") <add> <ide> row = layout.row() <ide> row.prop(self.properties, 'option_scale') <ide> <ide><path>utils/exporters/blender/addons/io_three/constants.py <ide> FACE_MATERIALS = 'faceMaterials' <ide> SKINNING = 'skinning' <ide> COPY_TEXTURES = 'copyTextures' <add>TEXTURE_FOLDER = "texture_folder" <ide> ENABLE_PRECISION = 'enablePrecision' <ide> PRECISION = 'precision' <ide> DEFAULT_PRECISION = 6 <ide> EXPORT_OPTIONS = { <ide> FACES: True, <ide> VERTICES: True, <del> NORMALS: False, <del> UVS: False, <add> NORMALS: True, <add> UVS: True, <ide> COLORS: False, <del> MATERIALS: False, <add> MATERIALS: True, <ide> FACE_MATERIALS: False, <ide> SCALE: 1, <ide> FRAME_STEP: 1, <ide> FRAME_INDEX_AS_TIME: False, <del> SCENE: False, <add> SCENE: True, <ide> MIX_COLORS: False, <ide> COMPRESSION: None, <del> MAPS: False, <add> MAPS: True, <ide> ANIMATION: OFF, <ide> BONES: False, <ide> SKINNING: False, <ide> MORPH_TARGETS: False, <ide> CAMERAS: False, <ide> LIGHTS: False, <ide> COPY_TEXTURES: True, <add> TEXTURE_FOLDER: "", <ide> LOGGING: DEBUG, <ide> ENABLE_PRECISION: True, <ide> PRECISION: DEFAULT_PRECISION, <ide><path>utils/exporters/blender/addons/io_three/exporter/__init__.py <ide> import os <ide> import sys <ide> import traceback <del>from .. import constants, logger, exceptions, dialogs <add>from .. import constants, logger, exceptions <ide> from . import scene, geometry, api, base_classes <ide> <ide> <ide> def _error_handler(func): <ide> <ide> def inner(filepath, options, *args, **kwargs): <ide> level = options.get(constants.LOGGING, constants.DEBUG) <del> version = options.get('addon_version') <ide> logger.init('io_three.export.log', level=level) <del> if version is not None: <del> logger.debug("Addon Version %s", version) <ide> api.init() <ide> <ide> try: <ide> def export_scene(filepath, options): <ide> <ide> @_error_handler <ide> def export_geometry(filepath, options, node=None): <del> msg = "" <del> exception = None <ide> if node is None: <ide> node = api.active_object() <ide> if node is None: <del> msg = "Nothing selected" <add> msg = 'Nothing selected' <ide> logger.error(msg) <del> exception = exceptions.SelectionError <add> raise exceptions.SelectionError(msg) <ide> if node.type != 'MESH': <del> msg = "%s is not a valid mesh object" % node.name <del> logger.error(msg) <del> exception = exceptions.GeometryError <add> msg = 'Not a valid mesh object' <add> raise exceptions.GeometryError(msg) <ide> <del> if exception is not None: <del> if api.batch_mode(): <del> raise exception(msg) <del> else: <del> dialogs.error(msg) <del> return <del> <ide> mesh = api.object.mesh(node, options) <ide> parent = base_classes.BaseScene(filepath, options) <ide> geo = geometry.Geometry(mesh, parent) <ide><path>utils/exporters/blender/addons/io_three/exporter/api/__init__.py <ide> def active_object(): <ide> return bpy.context.scene.objects.active <ide> <ide> <del>def batch_mode(): <del> """ <del> <del> :return: Whether or not the session is interactive <del> :rtype: bool <del> <del> """ <del> return bpy.context.area is None <del> <del> <ide> def init(): <ide> """Initializing the api module. Required first step before <ide> initializing the actual export process. <ide><path>utils/exporters/blender/addons/io_three/exporter/api/material.py <ide> def _valid_textures(material): <ide> for texture in material.texture_slots: <ide> if not texture: <ide> continue <del> if texture.texture.type != IMAGE: <add> if texture.texture.type != IMAGE or not texture.use: <ide> continue <ide> logger.debug("Valid texture found %s", texture) <ide> yield texture <ide><path>utils/exporters/blender/addons/io_three/exporter/api/texture.py <ide> def textures(): <ide> <ide> """ <ide> logger.debug("texture.textures()") <del> for texture in data.textures: <del> if texture.type == IMAGE: <del> yield texture.name <add> for mat in data.materials: <add> if mat.users == 0: <add> continue <add> for slot in mat.texture_slots: <add> if slot and slot.use and slot.texture.type == IMAGE: <add> yield slot.texture.name <ide><path>utils/exporters/blender/addons/io_three/exporter/geometry.py <ide> def copy(self, scene=True): <ide> <ide> return data <ide> <del> def copy_textures(self): <add> def copy_textures(self, texture_folder=""): <ide> """Copy the textures to the destination directory.""" <ide> logger.debug("Geometry().copy_textures()") <ide> if self.options.get(constants.COPY_TEXTURES): <ide> texture_registration = self.register_textures() <ide> if texture_registration: <ide> logger.info("%s has registered textures", self.node) <ide> io.copy_registered_textures( <del> os.path.dirname(self.scene.filepath), <add> os.path.join(os.path.dirname(self.scene.filepath), texture_folder), <ide> texture_registration) <ide> <ide> def parse(self): <ide><path>utils/exporters/blender/addons/io_three/exporter/image.py <ide> def __init__(self, node, parent): <ide> logger.debug("Image().__init__(%s)", node) <ide> base_classes.BaseNode.__init__(self, node, parent, constants.IMAGE) <ide> <del> self[constants.URL] = api.image.file_name(self.node) <add> texture_folder = self.scene.options.get(constants.TEXTURE_FOLDER, "") <add> self[constants.URL] = os.path.join(texture_folder, api.image.file_name(self.node)) <ide> <ide> @property <ide> def destination(self): <ide><path>utils/exporters/blender/addons/io_three/exporter/io.py <ide> def copy_registered_textures(dest, registration): <ide> <ide> """ <ide> logger.debug("io.copy_registered_textures(%s, %s)", dest, registration) <add> os.makedirs(dest, exist_ok=True) <ide> for value in registration.values(): <ide> copy(value['file_path'], dest) <ide> <ide><path>utils/exporters/blender/addons/io_three/exporter/scene.py <ide> def write(self): <ide> io.dump(self.filepath, data, options=self.options) <ide> <ide> if self.options.get(constants.COPY_TEXTURES): <add> texture_folder = self.options.get(constants.TEXTURE_FOLDER) <ide> for geo in self[constants.GEOMETRIES]: <ide> logger.info("Copying textures from %s", geo.node) <del> geo.copy_textures() <add> geo.copy_textures(texture_folder) <ide> <ide> def _parse_geometries(self): <ide> """Locate all geometry nodes and parse them"""
10
Javascript
Javascript
add ios 11 support test results
60cc6c9fbf9188eb3ce45f0cd97e8af7f9f54263
<ide><path>test/unit/support.js <ide> testIframe( <ide> "reliableMarginLeft": false, <ide> "scrollboxSize": true <ide> }; <add> } else if ( /iphone os 11_/i.test( userAgent ) ) { <add> expected = { <add> "ajax": true, <add> "boxSizingReliable": true, <add> "checkClone": true, <add> "checkOn": true, <add> "clearCloneStyle": true, <add> "cors": true, <add> "createHTMLDocument": true, <add> "focusin": false, <add> "noCloneChecked": true, <add> "optSelected": true, <add> "pixelBoxStyles": true, <add> "pixelPosition": true, <add> "radioValue": true, <add> "reliableMarginLeft": true, <add> "scrollboxSize": true <add> }; <ide> } else if ( /iphone os (?:9|10)_/i.test( userAgent ) ) { <ide> expected = { <ide> "ajax": true,
1
Javascript
Javascript
remove globalobject hack from test case
85e078d7e8d8a12a28b9711e2accb30e43140399
<ide><path>test/configCases/target/chunk-loading-per-entry/webpack.config.js <ide> const base = { <ide> chunkLoading: "import-scripts" <ide> } <ide> }, <del> output: { <del> globalObject: "(typeof self === 'undefined' ? window : self)" <del> }, <ide> target: "web" <ide> }; <ide>
1
Javascript
Javascript
reduce ascii checks in makeinlineimage()
ffae848f4ea24c58f45ed14102a2670627f49ca4
<ide><path>src/core/parser.js <ide> FlateStream, isArray, isCmd, isDict, isInt, isName, isNum, isRef, <ide> isString, Jbig2Stream, JpegStream, JpxStream, LZWStream, Name, <ide> NullStream, PredictorStream, Ref, RunLengthStream, warn, info, <del> StreamType, MissingDataException */ <add> StreamType, MissingDataException, assert */ <ide> <ide> 'use strict'; <ide> <ide> var Parser = (function ParserClosure() { <ide> <ide> // searching for the /EI\s/ <ide> var state = 0, ch, i, ii; <del> while (state !== 4 && (ch = stream.getByte()) !== -1) { <del> switch (ch | 0) { <del> case 0x20: <del> case 0x0D: <del> case 0x0A: <del> // let's check next five bytes to be ASCII... just be sure <del> var followingBytes = stream.peekBytes(5); <del> for (i = 0, ii = followingBytes.length; i < ii; i++) { <add> var E = 0x45, I = 0x49, SPACE = 0x20, NL = 0xA, CR = 0xD; <add> while ((ch = stream.getByte()) !== -1) { <add> if (state === 0) { <add> state = (ch === E) ? 1 : 0; <add> } else if (state === 1) { <add> state = (ch === I) ? 2 : 0; <add> } else { <add> assert(state === 2); <add> if (ch === SPACE || ch === NL || ch === CR) { <add> // Let's check the next five bytes are ASCII... just be sure. <add> var n = 5; <add> var followingBytes = stream.peekBytes(n); <add> for (i = 0; i < n; i++) { <ide> ch = followingBytes[i]; <del> if (ch !== 0x0A && ch !== 0x0D && (ch < 0x20 || ch > 0x7F)) { <del> // not a LF, CR, SPACE or any visible ASCII character <add> if (ch !== NL && ch !== CR && (ch < SPACE || ch > 0x7F)) { <add> // Not a LF, CR, SPACE or any visible ASCII character, i.e. <add> // it's binary stuff. Resetting the state. <ide> state = 0; <del> break; // some binary stuff found, resetting the state <add> break; <ide> } <ide> } <del> state = (state === 3 ? 4 : 0); <del> break; <del> case 0x45: <del> state = 2; <del> break; <del> case 0x49: <del> state = (state === 2 ? 3 : 0); <del> break; <del> default: <add> if (state === 2) { <add> break; // finished! <add> } <add> } else { <ide> state = 0; <del> break; <add> } <ide> } <ide> } <ide>
1
Mixed
Javascript
allow use of abortcontroller with on
df1023bb224d7ac79315308e2b9b18f3b3b01d39
<ide><path>doc/api/events.md <ide> Value: `Symbol.for('nodejs.rejection')` <ide> <ide> See how to write a custom [rejection handler][rejection]. <ide> <del>## `events.on(emitter, eventName)` <add>## `events.on(emitter, eventName[, options])` <ide> <!-- YAML <ide> added: <ide> - v13.6.0 <ide> added: <ide> <ide> * `emitter` {EventEmitter} <ide> * `eventName` {string|symbol} The name of the event being listened for <add>* `options` {Object} <add> * `signal` {AbortSignal} An {AbortSignal} that can be used to cancel awaiting <add> events. <ide> * Returns: {AsyncIterator} that iterates `eventName` events emitted by the `emitter` <ide> <ide> ```js <ide> if the `EventEmitter` emits `'error'`. It removes all listeners when <ide> exiting the loop. The `value` returned by each iteration is an array <ide> composed of the emitted event arguments. <ide> <add>An {AbortSignal} may be used to cancel waiting on events: <add> <add>```js <add>const { on, EventEmitter } = require('events'); <add>const ac = new AbortController(); <add> <add>(async () => { <add> const ee = new EventEmitter(); <add> <add> // Emit later on <add> process.nextTick(() => { <add> ee.emit('foo', 'bar'); <add> ee.emit('foo', 42); <add> }); <add> <add> for await (const event of on(ee, 'foo', { signal: ac.signal })) { <add> // The execution of this inner block is synchronous and it <add> // processes one event at a time (even with await). Do not use <add> // if concurrent execution is required. <add> console.log(event); // prints ['bar'] [42] <add> } <add> // Unreachable here <add>})(); <add> <add>process.nextTick(() => ac.abort()); <add>``` <add> <ide> ## `EventTarget` and `Event` API <ide> <!-- YAML <ide> added: v14.5.0 <ide><path>lib/events.js <ide> function eventTargetAgnosticAddListener(emitter, name, listener, flags) { <ide> } <ide> } <ide> <del>function on(emitter, event) { <add>function on(emitter, event, options) { <add> const { signal } = { ...options }; <add> validateAbortSignal(signal, 'options.signal'); <add> if (signal && signal.aborted) { <add> throw lazyDOMException('The operation was aborted', 'AbortError'); <add> } <add> <ide> const unconsumedEvents = []; <ide> const unconsumedPromises = []; <ide> let error = null; <ide> function on(emitter, event) { <ide> return() { <ide> eventTargetAgnosticRemoveListener(emitter, event, eventHandler); <ide> eventTargetAgnosticRemoveListener(emitter, 'error', errorHandler); <add> <add> if (signal) { <add> eventTargetAgnosticRemoveListener( <add> signal, <add> 'abort', <add> abortListener, <add> { once: true }); <add> } <add> <ide> finished = true; <ide> <ide> for (const promise of unconsumedPromises) { <ide> function on(emitter, event) { <ide> addErrorHandlerIfEventEmitter(emitter, errorHandler); <ide> } <ide> <add> if (signal) { <add> eventTargetAgnosticAddListener( <add> signal, <add> 'abort', <add> abortListener, <add> { once: true }); <add> } <ide> <ide> return iterator; <ide> <add> function abortListener() { <add> errorHandler(lazyDOMException('The operation was aborted', 'AbortError')); <add> } <add> <ide> function eventHandler(...args) { <ide> const promise = unconsumedPromises.shift(); <ide> if (promise) { <ide><path>test/parallel/test-event-on-async-iterator.js <del>// Flags: --expose-internals <add>// Flags: --expose-internals --no-warnings <ide> 'use strict'; <ide> <ide> const common = require('../common'); <ide> async function nodeEventTarget() { <ide> clearInterval(interval); <ide> } <ide> <add>async function abortableOnBefore() { <add> const ee = new EventEmitter(); <add> const ac = new AbortController(); <add> ac.abort(); <add> [1, {}, null, false, 'hi'].forEach((signal) => { <add> assert.throws(() => on(ee, 'foo', { signal }), { <add> code: 'ERR_INVALID_ARG_TYPE' <add> }); <add> }); <add> assert.throws(() => on(ee, 'foo', { signal: ac.signal }), { <add> name: 'AbortError' <add> }); <add>} <add> <add>async function eventTargetAbortableOnBefore() { <add> const et = new EventTarget(); <add> const ac = new AbortController(); <add> ac.abort(); <add> [1, {}, null, false, 'hi'].forEach((signal) => { <add> assert.throws(() => on(et, 'foo', { signal }), { <add> code: 'ERR_INVALID_ARG_TYPE' <add> }); <add> }); <add> assert.throws(() => on(et, 'foo', { signal: ac.signal }), { <add> name: 'AbortError' <add> }); <add>} <add> <add>async function abortableOnAfter() { <add> const ee = new EventEmitter(); <add> const ac = new AbortController(); <add> <add> const i = setInterval(() => ee.emit('foo', 'foo'), 10); <add> <add> async function foo() { <add> for await (const f of on(ee, 'foo', { signal: ac.signal })) { <add> assert.strictEqual(f, 'foo'); <add> } <add> } <add> <add> foo().catch(common.mustCall((error) => { <add> assert.strictEqual(error.name, 'AbortError'); <add> })).finally(() => { <add> clearInterval(i); <add> }); <add> <add> process.nextTick(() => ac.abort()); <add>} <add> <add>async function eventTargetAbortableOnAfter() { <add> const et = new EventTarget(); <add> const ac = new AbortController(); <add> <add> const i = setInterval(() => et.dispatchEvent(new Event('foo')), 10); <add> <add> async function foo() { <add> for await (const f of on(et, 'foo', { signal: ac.signal })) { <add> assert(f); <add> } <add> } <add> <add> foo().catch(common.mustCall((error) => { <add> assert.strictEqual(error.name, 'AbortError'); <add> })).finally(() => { <add> clearInterval(i); <add> }); <add> <add> process.nextTick(() => ac.abort()); <add>} <add> <add>async function eventTargetAbortableOnAfter2() { <add> const et = new EventTarget(); <add> const ac = new AbortController(); <add> <add> const i = setInterval(() => et.dispatchEvent(new Event('foo')), 10); <add> <add> async function foo() { <add> for await (const f of on(et, 'foo', { signal: ac.signal })) { <add> assert(f); <add> // Cancel after a single event has been triggered. <add> ac.abort(); <add> } <add> } <add> <add> foo().catch(common.mustCall((error) => { <add> assert.strictEqual(error.name, 'AbortError'); <add> })).finally(() => { <add> clearInterval(i); <add> }); <add>} <add> <add>async function abortableOnAfterDone() { <add> const ee = new EventEmitter(); <add> const ac = new AbortController(); <add> <add> const i = setInterval(() => ee.emit('foo', 'foo'), 1); <add> let count = 0; <add> <add> async function foo() { <add> for await (const f of on(ee, 'foo', { signal: ac.signal })) { <add> assert.strictEqual(f[0], 'foo'); <add> if (++count === 5) <add> break; <add> } <add> ac.abort(); // No error will occur <add> } <add> <add> foo().finally(() => { <add> clearInterval(i); <add> }); <add>} <ide> <ide> async function run() { <ide> const funcs = [ <ide> async function run() { <ide> iterableThrow, <ide> eventTarget, <ide> errorListenerCount, <del> nodeEventTarget <add> nodeEventTarget, <add> abortableOnBefore, <add> abortableOnAfter, <add> eventTargetAbortableOnBefore, <add> eventTargetAbortableOnAfter, <add> eventTargetAbortableOnAfter2, <add> abortableOnAfterDone <ide> ]; <ide> <ide> for (const fn of funcs) {
3
Javascript
Javascript
add queue typing
6b583e375103a66ce7de857dced19f572f620f48
<ide><path>lib/util/Queue.js <ide> "use strict"; <ide> <del>module.exports = class Queue { <add>/** <add> * @template T <add> */ <add>class Queue { <add> /** <add> * @param {IterableIterator<T>=} items The initial elements. <add> */ <ide> constructor(items) { <add> /** @private @type {Set<T>} */ <ide> this.set = new Set(items); <add> /** @private @type {Iterator<T>} */ <ide> this.iterator = this.set[Symbol.iterator](); <ide> } <ide> <add> /** <add> * Returns the number of elements in this queue. <add> * @return {number} The number of elements in this queue. <add> */ <ide> get length() { <ide> return this.set.size; <ide> } <ide> <add> /** <add> * Appends the specified element to this queue. <add> * @param {T} item The element to add. <add> * @return {void} <add> */ <ide> enqueue(item) { <ide> this.set.add(item); <ide> } <ide> <add> /** <add> * Retrieves and removes the head of this queue. <add> * @return {T | undefined} The head of the queue of `undefined` if this queue is empty. <add> */ <ide> dequeue() { <ide> const result = this.iterator.next(); <ide> if (result.done) return undefined; <ide> this.set.delete(result.value); <ide> return result.value; <ide> } <del>}; <add>} <add> <add>module.exports = Queue;
1
Text
Text
update a typo in the guide's requested updated
5d0ae7efb921c6abf742e96f7b65a6c282b0c952
<ide><path>guides/source/active_storage_overview.md <ide> message.video.open do |file| <ide> end <ide> ``` <ide> <del>It's important to know that the file are not yet available in the `after_create` commit but in the `after_create_commit` only <add>It's important to know that the file are not yet available in the `after_create` callback but in the `after_create_commit` only <ide> <ide> Transforming Images <ide> -------------------
1
Text
Text
add 1.4.6 changes
4dd10fd964ca7904fe3f89a128fc1438a555ba93
<ide><path>CHANGELOG.md <add><a name="1.4.6"></a> <add># 1.4.6 multiplicative-elevation (2015-09-17) <add> <add> <add>## Bug Fixes <add> <add>- **$animate:** invalid CSS class names should not break subsequent elements <add> ([c3a654b7](https://github.com/angular/angular.js/commit/c3a654b7c8e585b8fb9f90ece10ef54d19fd74c8), <add> [#12674](https://github.com/angular/angular.js/issues/12674), [#12725](https://github.com/angular/angular.js/issues/12725)) <add>- **$browser:** handle async updates to location <add> ([8d39bd8a](https://github.com/angular/angular.js/commit/8d39bd8abf423517b5bff70137c2a29e32bff76d), <add> [#12241](https://github.com/angular/angular.js/issues/12241), [#12819](https://github.com/angular/angular.js/issues/12819)) <add>- **$http:** propagate status -1 for timed out requests <add> ([38520a1a](https://github.com/angular/angular.js/commit/38520a1a73fffb6cfeffc7edfcab5be33e1619eb), <add> [#4491](https://github.com/angular/angular.js/issues/4491), [#8756](https://github.com/angular/angular.js/issues/8756)) <add>- **$httpBackend:** send `null` when post-data is undefined <add> ([6f39f108](https://github.com/angular/angular.js/commit/6f39f1082773921e79b48a78aa6cd8a7d1921da7), <add> [#12141](https://github.com/angular/angular.js/issues/12141), [#12739](https://github.com/angular/angular.js/issues/12739)) <add>- **$parse:** <add> - throw error when accessing a restricted property indirectly <add> ([b2f8b0b8](https://github.com/angular/angular.js/commit/b2f8b0b875dbabf7bba0ba6e9bd553c7a8b910d0), <add> [#12833](https://github.com/angular/angular.js/issues/12833)) <add> - `assign` returns the new value <add> ([7d2c6eee](https://github.com/angular/angular.js/commit/7d2c6eeef8ad61690737b6298c94f066082eff58), <add> [#12675](https://github.com/angular/angular.js/issues/12675), [#12708](https://github.com/angular/angular.js/issues/12708)) <add>- **angular.copy:** support copying XML nodes <add> ([122ab074](https://github.com/angular/angular.js/commit/122ab074cac6401ecded51fa031af139360f40aa), <add> [#5429](https://github.com/angular/angular.js/issues/5429), [#12786](https://github.com/angular/angular.js/issues/12786)) <add>- **form, ngModel:** correctly notify parent form when children are added <add> ([c6110e8b](https://github.com/angular/angular.js/commit/c6110e8b08c7e9bb2b7da5ecc5c42d1a834ea92d)) <add>- **input:** ignore min/max if they are empty on all input types <add> ([544001f5](https://github.com/angular/angular.js/commit/544001f5a331de06961c0201d69ecc92893abd0b), <add> [#12363](https://github.com/angular/angular.js/issues/12363), [#12785](https://github.com/angular/angular.js/issues/12785)) <add>- **ngAnimateMock:** $animate.flush should work for looping animations <add> ([472d076c](https://github.com/angular/angular.js/commit/472d076cca2ffb99bd87d3c026ef69afc713268d)) <add>- **ngAria:** clean up tabindex usage <add> ([f48244ce](https://github.com/angular/angular.js/commit/f48244ce5e6d11637aab97af1aff3430bda12429), <add> [#11500](https://github.com/angular/angular.js/issues/11500)) <add>- **ngJq:** properly detect when `ng-jq` is empty <add> ([19ecdb54](https://github.com/angular/angular.js/commit/19ecdb54bf85fc4e7bd3cde453aa6843f869a1ab), <add> [#12741](https://github.com/angular/angular.js/issues/12741)) <add>- **ngModel:** <add> - remove reference to parentForm from removed control <add> ([290b5049](https://github.com/angular/angular.js/commit/290b5049c2de4aa0d6ba8eea624bc6dce027b197), <add> [#12263](https://github.com/angular/angular.js/issues/12263)) <add> - let aliased validator directives work on any element <add> ([43769fb6](https://github.com/angular/angular.js/commit/43769fb676ae904852582a2c88a5523f0b9f58fc), <add> [#12158](https://github.com/angular/angular.js/issues/12158), [#12658](https://github.com/angular/angular.js/issues/12658)) <add>- **ngRepeat:** add support to iterate an object's properties even if it does not inherit from Object <add> ([7ea2c7f3](https://github.com/angular/angular.js/commit/7ea2c7f36ef854391df3f6b127ad42a2d5cbf1a3), <add> [#9964](https://github.com/angular/angular.js/issues/9964)) <add>- **rootScope:** add support for watchCollection to watch an object which does not inherit from Object <add> ([20fb626b](https://github.com/angular/angular.js/commit/20fb626b78ed8fbd02f59f5b26df9387a2a6ea0e), <add> [#9964](https://github.com/angular/angular.js/issues/9964)) <add>- **select:** update option if interpolated value attribute changes <add> ([82b0929e](https://github.com/angular/angular.js/commit/82b0929e4ea0ae087f766f2ee26f9570c8a3c8ac), <add> [#12005](https://github.com/angular/angular.js/issues/12005), [#12582](https://github.com/angular/angular.js/issues/12582)) <add>- **toDebugString:** change replacement string <add> ([0ca8b1df](https://github.com/angular/angular.js/commit/0ca8b1df201044019596db7173d784aeebdea0a7), <add> [#10103](https://github.com/angular/angular.js/issues/10103)) <add> <add> <add>## Performance Improvements <add> <add>- **Angular:** only create new collection in getBlockNodes if the block has changed <add> ([0202663e](https://github.com/angular/angular.js/commit/0202663e938a477cd86145bb158bf7a02efd8fb5), <add> [#9899](https://github.com/angular/angular.js/issues/9899)) <add> <add> <add> <add> <add><a name="1.3.19"></a> <add># 1.3.19 glutinous-shriek (2015-09-15) <add> <add>## Bug Fixes <add> <add>- **$http:** propagate status -1 for timed out requests <add> ([f13055a0](https://github.com/angular/angular.js/commit/f13055a0a53a39b160448713a5617edee6042801), <add> [#4491](https://github.com/angular/angular.js/issues/4491), [#8756](https://github.com/angular/angular.js/issues/8756)) <add>- **$location:** don't crash if navigating outside the app base <add> ([623ce1ad](https://github.com/angular/angular.js/commit/623ce1ad2cf68024719c5cae5d682d00195df30c), <add> [#11667](https://github.com/angular/angular.js/issues/11667)) <add>- **$parse:** throw error when accessing a restricted property indirectly <add> ([ec98c94c](https://github.com/angular/angular.js/commit/ec98c94ccbfc97b655447956738d5f6ff98b2f33), <add> [#12833](https://github.com/angular/angular.js/issues/12833)) <add>- **ngModel:** validate pattern against the viewValue <add> ([274e9353](https://github.com/angular/angular.js/commit/274e93537ed4e95aefeacea48909eb334894f0ac), <add> [#12344](https://github.com/angular/angular.js/issues/12344)) <add> <add> <add>## Features <add> <add>- **ngAnimate:** introduce `$animate.flush` for unit testing <add> ([f98e0384](https://github.com/angular/angular.js/commit/f98e038418f7367b2373adcf4887f64a8e8bdcb0)) <add> <add> <add>## Possible Breaking Changes <add> <add>- **ngModel:** due to [274e9353](https://github.com/angular/angular.js/commit/274e93537ed4e95aefeacea48909eb334894f0ac), <add> <add> <add>The `ngPattern` and `pattern` directives will validate the regex <add>against the `viewValue` of `ngModel`, i.e. the value of the model <add>before the $parsers are applied. Previously, the modelValue <add>(the result of the $parsers) was validated. <add> <add>This fixes issues where `input[date]` and `input[number]` cannot <add>be validated because the viewValue string is parsed into <add>`Date` and `Number` respectively (starting with Angular 1.3). <add>It also brings the directives in line with HTML5 constraint <add>validation, which validates against the input value. <add> <add>This change is unlikely to cause applications to fail, because even <add>in Angular 1.2, the value that was validated by pattern could have <add>been manipulated by the $parsers, as all validation was done <add>inside this pipeline. <add> <add>If you rely on the pattern being validated against the modelValue, <add>you must create your own validator directive that overwrites <add>the built-in pattern validator: <add> <add>``` <add>.directive('patternModelOverwrite', function patternModelOverwriteDirective() { <add> return { <add> restrict: 'A', <add> require: '?ngModel', <add> priority: 1, <add> compile: function() { <add> var regexp, patternExp; <add> <add> return { <add> pre: function(scope, elm, attr, ctrl) { <add> if (!ctrl) return; <add> <add> attr.$observe('pattern', function(regex) { <add> /** <add> * The built-in directive will call our overwritten validator <add> * (see below). We just need to update the regex. <add> * The preLink fn guaranetees our observer is called first. <add> */ <add> if (isString(regex) && regex.length > 0) { <add> regex = new RegExp('^' + regex + '$'); <add> } <add> <add> if (regex && !regex.test) { <add> //The built-in validator will throw at this point <add> return; <add> } <add> <add> regexp = regex || undefined; <add> }); <add> <add> }, <add> post: function(scope, elm, attr, ctrl) { <add> if (!ctrl) return; <add> <add> regexp, patternExp = attr.ngPattern || attr.pattern; <add> <add> //The postLink fn guarantees we overwrite the built-in pattern validator <add> ctrl.$validators.pattern = function(value) { <add> return ctrl.$isEmpty(value) || <add> isUndefined(regexp) || <add> regexp.test(value); <add> }; <add> } <add> }; <add> } <add> }; <add>}); <add>``` <add> <add> <add> <add> <ide> <a name="1.4.5"></a> <ide> # 1.4.5 permanent-internship (2015-08-28) <ide>
1
Python
Python
show rss memory usag at start and end of benchmark
ce89e3dc37505dab3dcbb3342ba547a2baf70a81
<ide><path>celery/utils/compat.py <ide> def emit(self, record): <ide> stat = os.stat(self.baseFilename) <ide> self.dev, self.ino = stat[ST_DEV], stat[ST_INO] <ide> logging.FileHandler.emit(self, record) <add> <add> <add>############## format(int, ',d') ########################## <add> <add>if sys.version_info >= (2, 7): <add> def format_d(i): <add> return format(i, ',d') <add>else: <add> def format_d(i): <add> s = '%d' % i <add> groups = [] <add> while s and s[-1].isdigit(): <add> groups.append(s[-3:]) <add> s = s[:-3] <add> return s + ','.join(reversed(groups)) <ide><path>celery/worker/state.py <ide> def task_ready(request): <ide> <ide> from time import time <ide> from billiard import current_process <add> from celery.utils.compat import format_d <ide> <ide> all_count = 0 <ide> bench_first = None <add> bench_mem_first = None <ide> bench_start = None <ide> bench_last = None <ide> bench_every = int(os.environ.get("CELERY_BENCH_EVERY", 1000)) <ide> bench_sample = [] <ide> __reserved = task_reserved <ide> __ready = task_ready <add> _process = None <add> <add> def ps(): <add> global _process <add> if _process is None: <add> try: <add> from psutil import Process <add> except ImportError: <add> return None <add> _process = Process(os.getpid()) <add> return _process <add> <add> def mem_rss(): <add> p = ps() <add> if p is None: <add> return "(psutil not installed)" <add> return "%s MB" % (format_d(p.get_memory_info().rss // 1024), ) <ide> <ide> if current_process()._name == 'MainProcess': <ide> @atexit.register <ide> def on_shutdown(): <ide> print("\n- Time spent in benchmark: %r" % ( <ide> bench_last - bench_first)) <ide> print("- Avg: %s" % (sum(bench_sample) / len(bench_sample))) <add> print("- RSS: %s --> %s" % (bench_mem_first, mem_rss())) <ide> <ide> def task_reserved(request): # noqa <ide> global bench_start <ide> global bench_first <add> global bench_mem_first <ide> now = None <ide> if bench_start is None: <ide> bench_start = now = time() <ide> if bench_first is None: <ide> bench_first = now <add> if bench_mem_first is None: <add> bench_mem_first = mem_rss() <ide> <ide> return __reserved(request) <ide>
2
Text
Text
fix process.stdin example
86ae2c182b425e08e8a98fcd47220d452cc94f53
<ide><path>doc/api/process.md <ide> a [Readable][] stream. <ide> process.stdin.setEncoding('utf8'); <ide> <ide> process.stdin.on('readable', () => { <del> const chunk = process.stdin.read(); <del> if (chunk !== null) { <add> let chunk; <add> // Use a loop to make sure we read all available data. <add> while ((chunk = process.stdin.read()) !== null) { <ide> process.stdout.write(`data: ${chunk}`); <ide> } <ide> });
1
PHP
PHP
remove parameter that does nothing
0b8569fea17c0a0c64320d9919a658d11f141017
<ide><path>tests/TestCase/View/Helper/FormHelperTest.php <ide> public function testCreateEndGetNoSecurity() { <ide> ]); <ide> $this->assertNotContains('testKey', $result); <ide> <del> $result = $this->Form->end('Save'); <add> $result = $this->Form->end(); <ide> $this->assertNotContains('testKey', $result); <ide> } <ide>
1
Text
Text
fix error notes in `buffer.from()` variants
e4c6c3bf2e6f8ee8f2d1d9b95fc731a5ac224fdf
<ide><path>doc/api/buffer.md <ide> Allocates a new `Buffer` using an `array` of octets. <ide> const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); <ide> ``` <ide> <del>A `TypeError` will be thrown if `array` is not an `Array`. <add>A `TypeError` will be thrown if `array` is not an `Array` or other type <add>appropriate for `Buffer.from()` variants. <ide> <ide> ### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) <ide> <!-- YAML <ide> console.log(buf.length); <ide> ``` <ide> <ide> A `TypeError` will be thrown if `arrayBuffer` is not an [`ArrayBuffer`] or a <del>[`SharedArrayBuffer`]. <add>[`SharedArrayBuffer`] or other type appropriate for `Buffer.from()` variants. <ide> <ide> ### Class Method: Buffer.from(buffer) <ide> <!-- YAML <ide> console.log(buf2.toString()); <ide> // Prints: buffer <ide> ``` <ide> <del>A `TypeError` will be thrown if `buffer` is not a `Buffer`. <add>A `TypeError` will be thrown if `buffer` is not a `Buffer` or other type <add>appropriate for `Buffer.from()` variants. <ide> <ide> ### Class Method: Buffer.from(object[, offsetOrEncoding[, length]]) <ide> <!-- YAML <ide> const buf = Buffer.from(new Foo(), 'utf8'); <ide> // Prints: <Buffer 74 68 69 73 20 69 73 20 61 20 74 65 73 74> <ide> ``` <ide> <add>A `TypeError` will be thrown if `object` has not mentioned methods or is not of <add>other type appropriate for `Buffer.from()` variants. <add> <ide> ### Class Method: Buffer.from(string[, encoding]) <ide> <!-- YAML <ide> added: v5.10.0 <ide> console.log(buf1.toString('ascii')); <ide> // Prints: this is a tC)st <ide> ``` <ide> <del>A `TypeError` will be thrown if `string` is not a string. <add>A `TypeError` will be thrown if `string` is not a string or other type <add>appropriate for `Buffer.from()` variants. <ide> <ide> ### Class Method: Buffer.isBuffer(obj) <ide> <!-- YAML
1
Ruby
Ruby
define symbol#to_proc for ruby 1.8.6
0949d952dc1a0eda002108b35c7fda38df7448fc
<ide><path>Library/Homebrew/extend/symbol.rb <add>class Symbol <add> def to_proc <add> proc { |obj, *args| obj.send(self, *args) } <add> end unless method_defined?(:to_proc) <add>end <ide><path>Library/Homebrew/global.rb <ide> require 'extend/pathname' <ide> require 'extend/ARGV' <ide> require 'extend/string' <add>require 'extend/symbol' <ide> require 'utils' <ide> require 'exceptions' <ide> require 'set'
2
Go
Go
remove tp4 support from test code
b0e24c73931d70ef543a3d69c41b0add7130cb80
<ide><path>integration-cli/docker_api_containers_test.go <ide> func (s *DockerSuite) TestStartWithTooLowMemoryLimit(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestContainerApiRename(c *check.C) { <del> // TODO Windows: Enable for TP5. Fails on TP4. <add> // TODO Windows: Debug why this sometimes fails on TP5. For now, leave disabled <ide> testRequires(c, DaemonIsLinux) <ide> out, _ := dockerCmd(c, "run", "--name", "TestContainerApiRename", "-d", "busybox", "sh") <ide> <ide><path>integration-cli/docker_cli_events_test.go <ide> func (s *DockerSuite) TestEventsContainerFailStartDie(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestEventsLimit(c *check.C) { <del> // TODO Windows CI: This test is not reliable enough on Windows TP4. Reports <del> // multiple errors in the analytic log sometimes. <del> // [NetSetupHelper::InstallVirtualMiniport()@2153] NetSetup install of ROOT\VMS_MP\0001 failed with error 0x80070002 <del> // This should be able to be enabled on TP5. <del> testRequires(c, DaemonIsLinux) <ide> var waitGroup sync.WaitGroup <ide> errChan := make(chan error, 17) <ide> <ide> func (s *DockerSuite) TestEventsResize(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestEventsAttach(c *check.C) { <del> // TODO Windows CI: Figure out why this test fails intermittently (TP4 and TP5). <add> // TODO Windows CI: Figure out why this test fails intermittently (TP5). <ide> testRequires(c, DaemonIsLinux) <ide> since := daemonTime(c).Unix() <ide> <ide> func (s *DockerSuite) TestEventsDefaultEmpty(c *check.C) { <ide> <ide> // #14316 <ide> func (s *DockerRegistrySuite) TestEventsImageFilterPush(c *check.C) { <del> // Problematic to port for Windows CI during TP4/TP5 timeframe while <del> // not supporting push <add> // Problematic to port for Windows CI during TP5 timeframe until <add> // supporting push <ide> testRequires(c, DaemonIsLinux) <ide> testRequires(c, Network) <ide> since := daemonTime(c).Unix() <ide><path>integration-cli/docker_cli_logs_test.go <ide> func (s *DockerSuite) TestLogsSince(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestLogsSinceFutureFollow(c *check.C) { <del> // TODO Windows: Flakey on TP4. Enable for next technical preview. <del> testRequires(c, DaemonIsLinux) <ide> name := "testlogssincefuturefollow" <ide> out, _ := dockerCmd(c, "run", "-d", "--name", name, "busybox", "/bin/sh", "-c", `for i in $(seq 1 5); do echo log$i; sleep 1; done`) <ide> <ide> func (s *DockerSuite) TestLogsSinceFutureFollow(c *check.C) { <ide> <ide> // Regression test for #8832 <ide> func (s *DockerSuite) TestLogsFollowSlowStdoutConsumer(c *check.C) { <del> // TODO Windows: Consider enabling post-TP4. Too expensive to run on TP4 <add> // TODO Windows: Fix this test for TP5. <ide> testRequires(c, DaemonIsLinux) <ide> out, _ := dockerCmd(c, "run", "-d", "busybox", "/bin/sh", "-c", `usleep 600000;yes X | head -c 200000`) <ide> <ide><path>integration-cli/docker_cli_rmi_test.go <ide> func (*DockerSuite) TestRmiParentImageFail(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestRmiWithParentInUse(c *check.C) { <del> // TODO Windows. There is a bug either in Windows TP4, or the TP4 compatible <del> // docker which means this test fails. It has been verified to have been fixed <del> // in TP5 and docker/master, hence enable it once CI switch to TP5. <del> testRequires(c, DaemonIsLinux) <ide> out, _ := dockerCmd(c, "create", "busybox") <ide> cID := strings.TrimSpace(out) <ide> <ide> func (s *DockerSuite) TestRmiWithParentInUse(c *check.C) { <ide> <ide> // #18873 <ide> func (s *DockerSuite) TestRmiByIDHardConflict(c *check.C) { <del> // TODO Windows CI. This will work on a TP5 compatible docker which <del> // has content addressibility fixes. Do not run this on TP4 as it <del> // will end up deleting the busybox image causing subsequent tests to fail. <del> testRequires(c, DaemonIsLinux) <ide> dockerCmd(c, "create", "busybox") <ide> <ide> imgID := inspectField(c, "busybox:latest", "Id") <ide><path>integration-cli/docker_cli_run_test.go <ide> func (s *DockerSuite) TestRunCreateVolumesInSymlinkDir2(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestRunVolumesMountedAsReadonly(c *check.C) { <del> // TODO Windows (Post TP4): This test cannot run on a Windows daemon as <add> // TODO Windows (Post TP5): This test cannot run on a Windows daemon as <ide> // Windows does not support read-only bind mounts. <ide> testRequires(c, DaemonIsLinux) <ide> if _, code, err := dockerCmdWithError("run", "-v", "/test:/test:ro", "busybox", "touch", "/test/somefile"); err == nil || code == 0 { <ide> func (s *DockerSuite) TestRunVolumesMountedAsReadonly(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestRunVolumesFromInReadonlyModeFails(c *check.C) { <del> // TODO Windows (Post TP4): This test cannot run on a Windows daemon as <add> // TODO Windows (Post TP5): This test cannot run on a Windows daemon as <ide> // Windows does not support read-only bind mounts. Modified for when ro is supported. <ide> testRequires(c, DaemonIsLinux) <ide> var ( <ide> func (s *DockerSuite) TestRunVolumesFromInReadWriteMode(c *check.C) { <ide> <ide> func (s *DockerSuite) TestVolumesFromGetsProperMode(c *check.C) { <ide> // TODO Windows: This test cannot yet run on a Windows daemon as Windows does <del> // not support read-only bind mounts as at TP4 <add> // not support read-only bind mounts as at TP5 <ide> testRequires(c, DaemonIsLinux) <ide> dockerCmd(c, "run", "--name", "parent", "-v", "/test:/test:ro", "busybox", "true") <ide> <ide> func (s *DockerSuite) TestRunExitCode(c *check.C) { <ide> func (s *DockerSuite) TestRunUserDefaults(c *check.C) { <ide> expected := "uid=0(root) gid=0(root)" <ide> if daemonPlatform == "windows" { <del> // TODO Windows: Remove this check once TP4 is no longer supported. <del> if windowsDaemonKV < 14250 { <del> expected = "uid=1000(SYSTEM) gid=1000(SYSTEM)" <del> } else { <del> expected = "uid=1000(ContainerAdministrator) gid=1000(ContainerAdministrator)" <del> } <add> expected = "uid=1000(ContainerAdministrator) gid=1000(ContainerAdministrator)" <ide> } <ide> out, _ := dockerCmd(c, "run", "busybox", "id") <ide> if !strings.Contains(out, expected) { <ide> func (s *DockerSuite) TestRunUserNotFound(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestRunTwoConcurrentContainers(c *check.C) { <del> // TODO Windows. There are two bugs in TP4 which means this test cannot <del> // be reliably enabled. The first is a race condition where sometimes <del> // HCS CreateComputeSystem() will fail "Invalid class string". #4985252 and <del> // #4493430. <del> // <del> // The second, which is seen more readily by increasing the number of concurrent <del> // containers to 5 or more, is that CSRSS hangs. This may fixed in the TP4 ZDP. <del> // #4898773. <del> testRequires(c, DaemonIsLinux) <ide> sleepTime := "2" <ide> if daemonPlatform == "windows" { <del> sleepTime = "5" // Make more reliable on Windows <add> sleepTime = "20" // Make more reliable on Windows <ide> } <ide> group := sync.WaitGroup{} <ide> group.Add(2) <ide> func (s *DockerSuite) TestRunCopyVolumeUidGid(c *check.C) { <ide> <ide> // Test for #1582 <ide> func (s *DockerSuite) TestRunCopyVolumeContent(c *check.C) { <del> // TODO Windows, post TP4. Windows does not yet support volume functionality <add> // TODO Windows, post TP5. Windows does not yet support volume functionality <ide> // that copies from the image to the volume. <ide> testRequires(c, DaemonIsLinux) <ide> name := "testruncopyvolumecontent" <ide> func (s *DockerSuite) TestRunCleanupCmdOnEntrypoint(c *check.C) { <ide> out = strings.TrimSpace(out) <ide> expected := "root" <ide> if daemonPlatform == "windows" { <del> // TODO Windows: Remove this check once TP4 is no longer supported. <del> if windowsDaemonKV < 14250 { <del> expected = `nt authority\system` <del> } else { <del> expected = `user manager\containeradministrator` <del> } <add> expected = `user manager\containeradministrator` <ide> } <ide> if out != expected { <ide> c.Fatalf("Expected output %s, got %q", expected, out) <ide> func (s *DockerSuite) TestRunBindMounts(c *check.C) { <ide> defer os.RemoveAll(tmpDir) <ide> writeFile(path.Join(tmpDir, "touch-me"), "", c) <ide> <del> // TODO Windows Post TP4. Windows does not yet support :ro binds <add> // TODO Windows Post TP5. Windows does not yet support :ro binds <ide> if daemonPlatform != "windows" { <ide> // Test reading from a read-only bind mount <ide> out, _ := dockerCmd(c, "run", "-v", fmt.Sprintf("%s:/tmp:ro", tmpDir), "busybox", "ls", "/tmp") <ide> func (s *DockerSuite) TestRunAllocatePortInReservedRange(c *check.C) { <ide> <ide> // Regression test for #7792 <ide> func (s *DockerSuite) TestRunMountOrdering(c *check.C) { <del> // TODO Windows: Post TP4. Updated, but Windows does not support nested mounts currently. <add> // TODO Windows: Post TP5. Updated, but Windows does not support nested mounts currently. <ide> testRequires(c, SameHostDaemon, DaemonIsLinux, NotUserNamespace) <ide> prefix, _ := getPrefixAndSlashFromDaemonPlatform() <ide> <ide> func (s *DockerSuite) TestRunCreateVolumeEtc(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestVolumesNoCopyData(c *check.C) { <del> // TODO Windows (Post TP4). Windows does not support volumes which <add> // TODO Windows (Post TP5). Windows does not support volumes which <ide> // are pre-populated such as is built in the dockerfile used in this test. <ide> testRequires(c, DaemonIsLinux) <ide> if _, err := buildImage("dataimage", <ide> func (s *DockerSuite) TestRunCapAddCHOWN(c *check.C) { <ide> <ide> // https://github.com/docker/docker/pull/14498 <ide> func (s *DockerSuite) TestVolumeFromMixedRWOptions(c *check.C) { <del> // TODO Windows post TP4. Enable the read-only bits once they are <add> // TODO Windows post TP5. Enable the read-only bits once they are <ide> // supported on the platform. <ide> prefix, slash := getPrefixAndSlashFromDaemonPlatform() <ide> <ide><path>integration-cli/docker_cli_run_unix_test.go <ide> func (s *DockerSuite) TestRunWithShmSize(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestRunTmpfsMounts(c *check.C) { <del> // TODO Windows (Post TP4): This test cannot run on a Windows daemon as <add> // TODO Windows (Post TP5): This test cannot run on a Windows daemon as <ide> // Windows does not support tmpfs mounts. <ide> testRequires(c, DaemonIsLinux) <ide> if out, _, err := dockerCmdWithError("run", "--tmpfs", "/run", "busybox", "touch", "/run/somefile"); err != nil { <ide><path>integration-cli/docker_cli_tag_test.go <ide> func (s *DockerSuite) TestTagExistedNameWithForce(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestTagWithPrefixHyphen(c *check.C) { <del> // TODO Windows CI. This fails on TP4 docker, but has since been fixed. <del> // Enable these tests for TP5. <del> testRequires(c, DaemonIsLinux) <ide> // Don't attempt to pull on Windows as not in hub. It's installed <ide> // as an image through .ensure-frozen-images-windows <ide> if daemonPlatform != "windows" { <ide> func (s *DockerSuite) TestTagWithPrefixHyphen(c *check.C) { <ide> // ensure tagging using official names works <ide> // ensure all tags result in the same name <ide> func (s *DockerSuite) TestTagOfficialNames(c *check.C) { <del> // TODO Windows CI. This fails on TP4 docker, but has since been fixed. <del> // Enable these tests for TP5. <del> testRequires(c, DaemonIsLinux) <ide> names := []string{ <ide> "docker.io/busybox", <ide> "index.docker.io/busybox", <ide> func (s *DockerSuite) TestTagOfficialNames(c *check.C) { <ide> <ide> // ensure tags can not match digests <ide> func (s *DockerSuite) TestTagMatchesDigest(c *check.C) { <del> // TODO Windows CI. This can be enabled for TP5, but will fail on TP4. <del> // This is due to the content addressibility changes which are not <del> // in the TP4 version of Docker. <del> testRequires(c, DaemonIsLinux) <ide> // Don't attempt to pull on Windows as not in hub. It's installed <ide> // as an image through .ensure-frozen-images-windows <ide> if daemonPlatform != "windows" { <ide> func (s *DockerSuite) TestTagMatchesDigest(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestTagInvalidRepoName(c *check.C) { <del> // TODO Windows CI. This can be enabled for TP5, but will fail on the <del> // TP4 version of docker. <del> testRequires(c, DaemonIsLinux) <ide> // Don't attempt to pull on Windows as not in hub. It's installed <ide> // as an image through .ensure-frozen-images-windows <ide> if daemonPlatform != "windows" { <ide><path>integration-cli/docker_test_vars.go <ide> var ( <ide> <ide> // windowsDaemonKV is used on Windows to distinguish between different <ide> // versions. This is necessary to enable certain tests based on whether <del> // the platform supports it. For example, Windows Server 2016 TP3 does <del> // not support volumes, but TP4 does. <add> // the platform supports it. For example, Windows Server 2016 TP3 did <add> // not support volumes, but TP4 did. <ide> windowsDaemonKV int <ide> <ide> // daemonDefaultImage is the name of the default image to use when running <ide><path>volume/volume_test.go <ide> func TestParseMountSpec(t *testing.T) { <ide> `d:`, <ide> `d:\path`, <ide> `d:\path with space`, <del> // TODO Windows post TP4 - readonly support `d:\pathandmode:ro`, <add> // TODO Windows post TP5 - readonly support `d:\pathandmode:ro`, <ide> `c:\:d:\`, <ide> `c:\windows\:d:`, <ide> `c:\windows:d:\s p a c e`, <ide> func TestParseMountSpec(t *testing.T) { <ide> `name:D:`, <ide> `name:D::rW`, <ide> `name:D::RW`, <del> // TODO Windows post TP4 - readonly support `name:D::RO`, <add> // TODO Windows post TP5 - readonly support `name:D::RO`, <ide> `c:/:d:/forward/slashes/are/good/too`, <del> // TODO Windows post TP4 - readonly support `c:/:d:/including with/spaces:ro`, <add> // TODO Windows post TP5 - readonly support `c:/:d:/including with/spaces:ro`, <ide> `c:\Windows`, // With capital <ide> `c:\Program Files (x86)`, // With capitals and brackets <ide> } <ide> func TestParseMountSpecSplit(t *testing.T) { <ide> cases = []testParseMountSpec{ <ide> {`c:\:d:`, "local", `d:`, `c:\`, ``, "", true, false}, <ide> {`c:\:d:\`, "local", `d:\`, `c:\`, ``, "", true, false}, <del> // TODO Windows post TP4 - Add readonly support {`c:\:d:\:ro`, "local", `d:\`, `c:\`, ``, "", false, false}, <add> // TODO Windows post TP5 - Add readonly support {`c:\:d:\:ro`, "local", `d:\`, `c:\`, ``, "", false, false}, <ide> {`c:\:d:\:rw`, "local", `d:\`, `c:\`, ``, "", true, false}, <ide> {`c:\:d:\:foo`, "local", `d:\`, `c:\`, ``, "", false, true}, <ide> {`name:d::rw`, "local", `d:`, ``, `name`, "local", true, false}, <ide> {`name:d:`, "local", `d:`, ``, `name`, "local", true, false}, <del> // TODO Windows post TP4 - Add readonly support {`name:d::ro`, "local", `d:`, ``, `name`, "local", false, false}, <add> // TODO Windows post TP5 - Add readonly support {`name:d::ro`, "local", `d:`, ``, `name`, "local", false, false}, <ide> {`name:c:`, "", ``, ``, ``, "", true, true}, <ide> {`driver/name:c:`, "", ``, ``, ``, "", true, true}, <ide> }
9
Ruby
Ruby
fix typographical error
abed77fb30b13e154a0f6cd2fc334d53bbae734b
<ide><path>actionpack/lib/action_dispatch/routing.rb <ide> module ActionDispatch <ide> # <ide> # controller 'geocode' do <ide> # get 'geocode/:postalcode' => :show, constraints: { <del> # postalcode: /# Postcode format <add> # postalcode: /# Postalcode format <ide> # \d{5} #Prefix <ide> # (-\d{4})? #Suffix <ide> # /x
1
Go
Go
enable image push when files have spaces
2c8522b0a3ccaea946cc459ec650625235c8f4ad
<ide><path>daemon/graphdriver/lcow/lcow.go <ide> func (fgc *fileGetCloserFromSVM) Get(filename string) (io.ReadCloser, error) { <ide> return nil, fmt.Errorf("inconsistency detected: couldn't get short container path for %+v in utility VM %s", fgc.mvd, fgc.svm.config.Name) <ide> } <ide> file := path.Join(actualContainerPath, filename) <add> <add> // Ugly fix for MSFT internal bug VSO#19696554 <add> // If a file name contains a space, pushing an image fails. <add> // Using solution from https://groups.google.com/forum/#!topic/Golang-Nuts/DpldsmrhPio to escape for shell execution <add> file = "'" + strings.Join(strings.Split(file, "'"), `'"'"'`) + "'" <ide> if err := fgc.svm.runProcess(fmt.Sprintf("cat %s", file), nil, outOut, errOut); err != nil { <ide> logrus.Debugf("cat %s failed: %s", file, errOut.String()) <ide> return nil, err
1
Text
Text
add poojadurgad to collaborators
68b6c1a30d3a153bf9ba55f1fad477d78f4cf682
<ide><path>README.md <ide> For information about the governance of the Node.js project, see <ide> **Ali Ijaz Sheikh** &lt;[email protected]&gt; (he/him) <ide> * [oyyd](https://github.com/oyyd) - <ide> **Ouyang Yadong** &lt;[email protected]&gt; (he/him) <add>* [PoojaDurgad](https://github.com/PoojaDurgad) - <add>**Pooja D P** &lt;[email protected]&gt; (she/her) <ide> * [psmarshall](https://github.com/psmarshall) - <ide> **Peter Marshall** &lt;[email protected]&gt; (he/him) <ide> * [puzpuzpuz](https://github.com/puzpuzpuz) -
1
PHP
PHP
fix tests for pr
ebe243b596c2e56508c0ef28c6bce94abbc2b6b9
<ide><path>tests/Database/DatabaseEloquentModelTest.php <ide> public function testCloneModelMakesAFreshCopyOfTheModel() <ide> public function testModelObserversCanBeAttachedToModels() <ide> { <ide> EloquentModelStub::setEventDispatcher($events = m::mock('Illuminate\Events\Dispatcher')); <del> $events->shouldReceive('listen')->once()->with('eloquent.creating: EloquentModelStub', 'EloquentTestObserverStub@creating'); <del> $events->shouldReceive('listen')->once()->with('eloquent.saved: EloquentModelStub', 'EloquentTestObserverStub@saved'); <add> $events->shouldReceive('listen')->once()->with('eloquent.creating: EloquentModelStub', 'EloquentTestObserverStub@creating', 0); <add> $events->shouldReceive('listen')->once()->with('eloquent.saved: EloquentModelStub', 'EloquentTestObserverStub@saved', 0); <ide> $events->shouldReceive('forget'); <ide> EloquentModelStub::observe(new EloquentTestObserverStub); <ide> EloquentModelStub::flushEventListeners();
1
PHP
PHP
fix error when clearing filecache
b999b4d9ae90cc187965dd1ca442585a5138127e
<ide><path>lib/Cake/Cache/Engine/FileEngine.php <ide> protected function _clearDirectory($path, $now, $threshold) { <ide> continue; <ide> } <ide> $filePath = $path . $entry; <add> if (is_dir($filePath)) { <add> continue; <add> } <ide> $file = new SplFileObject($path . $entry, 'r'); <ide> <ide> if ($threshold) {
1
Python
Python
skip some test_multi_gpu_data_parallel_forward
9edff453621d0f8f02b36cae6766cd102f852201
<ide><path>tests/models/beit/test_modeling_beit.py <ide> <ide> from transformers import BeitConfig <ide> from transformers.models.auto import get_values <del>from transformers.testing_utils import require_torch, require_vision, slow, torch_device <add>from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device <ide> from transformers.utils import cached_property, is_torch_available, is_vision_available <ide> <ide> from ...test_configuration_common import ConfigTester <ide> def test_config(self): <ide> def test_inputs_embeds(self): <ide> pass <ide> <add> @require_torch_multi_gpu <add> @unittest.skip(reason="BEiT has some layers using `add_module` which doesn't work well with `nn.DataParallel`") <add> def test_multi_gpu_data_parallel_forward(self): <add> pass <add> <ide> def test_model_common_attributes(self): <ide> config, _ = self.model_tester.prepare_config_and_inputs_for_common() <ide> <ide><path>tests/models/data2vec/test_modeling_data2vec_vision.py <ide> <ide> from transformers import Data2VecVisionConfig <ide> from transformers.models.auto import get_values <del>from transformers.testing_utils import require_torch, require_vision, slow, torch_device <add>from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device <ide> from transformers.utils import cached_property, is_torch_available, is_vision_available <ide> <ide> from ...test_configuration_common import ConfigTester <ide> def test_inputs_embeds(self): <ide> # Data2VecVision does not use inputs_embeds <ide> pass <ide> <add> @require_torch_multi_gpu <add> @unittest.skip( <add> reason="Data2VecVision has some layers using `add_module` which doesn't work well with `nn.DataParallel`" <add> ) <add> def test_multi_gpu_data_parallel_forward(self): <add> pass <add> <ide> def test_model_common_attributes(self): <ide> config, _ = self.model_tester.prepare_config_and_inputs_for_common() <ide>
2
PHP
PHP
add warnings that were missed before
e2d81d4c8baeadc4a9bd51063f628c441882ac41
<ide><path>src/Mailer/Email.php <ide> public function getFrom() <ide> */ <ide> public function from($email = null, $name = null) <ide> { <add> deprecationWarning('Email::from() is deprecated. Use Email::setFrom() or Email::getFrom() instead.'); <ide> if ($email === null) { <ide> return $this->getFrom(); <ide> } <ide> public function getReturnPath() <ide> */ <ide> public function returnPath($email = null, $name = null) <ide> { <add> deprecationWarning('Email::returnPath() is deprecated. Use Email::setReturnPath() or Email::getReturnPath() instead.'); <ide> if ($email === null) { <ide> return $this->getReturnPath(); <ide> }
1
Ruby
Ruby
add an integration test for dym
76e7ddc6e0d32bb4caaa01043c5c03c0396cf8d2
<ide><path>actionpack/lib/action_controller/metal/exceptions.rb <ide> def initialize(error) <ide> end <ide> <ide> def corrections <del> maybe_these = @error.routes.named_routes.helper_names.grep(/#{@error.route_name}/) <del> maybe_these -= [@error.method_name.to_s] # remove exact match <del> <del> maybe_these.sort_by { |n| <del> DidYouMean::Jaro.distance(@error.route_name, n) <del> }.reverse.first(4) <add> if @error.method_name <add> maybe_these = @error.routes.named_routes.helper_names.grep(/#{@error.route_name}/) <add> maybe_these -= [@error.method_name.to_s] # remove exact match <add> <add> maybe_these.sort_by { |n| <add> DidYouMean::Jaro.distance(@error.route_name, n) <add> }.reverse.first(4) <add> else <add> [] <add> end <ide> end <ide> end <ide> <ide><path>actionpack/test/dispatch/routing_test.rb <ide> def app; APP end <ide> error = assert_raises(ActionController::UrlGenerationError, message) { product_path(id: nil, "id" => "url-tested") } <ide> assert_match message, error.message <ide> end <add> <add> if defined?(DidYouMean) && DidYouMean.respond_to?(:correct_error) <add> test "exceptions have suggestions for fix" do <add> error = assert_raises(ActionController::UrlGenerationError) { product_path(nil, "id" => "url-tested") } <add> assert_match "Did you mean?", error.message <add> end <add> end <add> <add> # FIXME: we should fix all locations that raise this exception to provide <add> # the info DidYouMean needs and then delete this test. Just adding the <add> # test for now because some parameters to the constructor are optional, and <add> # we don't want to break other code. <add> test "correct for empty UrlGenerationError" do <add> err = ActionController::UrlGenerationError.new("oh no!") <add> correction = ActionController::UrlGenerationError::Correction.new(err) <add> assert_equal [], correction.corrections <add> end <ide> end <ide> <ide> class TestDefaultUrlOptions < ActionDispatch::IntegrationTest
2
Ruby
Ruby
remove plain method
38c0bf7dfa7ed110b6c1c1205bbb04c9040065ee
<ide><path>activemodel/lib/active_model/attribute_methods.rb <ide> def match(method_name) <ide> def method_name(attr_name) <ide> @method_name % attr_name <ide> end <del> <del> def plain? <del> prefix.empty? && suffix.empty? <del> end <ide> end <ide> end <ide>
1
Javascript
Javascript
add imperative api to statusbar
edbe6a2b248fa552da6ab4a9d4e274766a10ad04
<ide><path>Examples/UIExplorer/StatusBarExample.js <ide> const showHideTransitions = [ <ide> 'slide', <ide> ]; <ide> <add>function getValue(values: Array<any>, index: number): any { <add> return values[index % values.length]; <add>} <add> <ide> const StatusBarExample = React.createClass({ <ide> getInitialState(): State { <ide> return { <ide> animated: true, <del> backgroundColor: this._getValue(colors, 0), <del> showHideTransition: this._getValue(showHideTransitions, 0), <add> backgroundColor: getValue(colors, 0), <add> showHideTransition: getValue(showHideTransitions, 0), <ide> }; <ide> }, <ide> <ide> _colorIndex: 0, <ide> _barStyleIndex: 0, <ide> _showHideTransitionIndex: 0, <ide> <del> _getValue(values: Array<any>, index: number): any { <del> return values[index % values.length]; <del> }, <del> <ide> render() { <ide> return ( <ide> <View> <ide> const StatusBarExample = React.createClass({ <ide> style={styles.wrapper} <ide> onPress={() => { <ide> this._barStyleIndex++; <del> this.setState({barStyle: this._getValue(barStyles, this._barStyleIndex)}); <add> this.setState({barStyle: getValue(barStyles, this._barStyleIndex)}); <ide> }}> <ide> <View style={styles.button}> <del> <Text>style: '{this._getValue(barStyles, this._barStyleIndex)}'</Text> <add> <Text>style: '{getValue(barStyles, this._barStyleIndex)}'</Text> <ide> </View> <ide> </TouchableHighlight> <ide> </View> <ide> const StatusBarExample = React.createClass({ <ide> this._showHideTransitionIndex++; <ide> this.setState({ <ide> showHideTransition: <del> this._getValue(showHideTransitions, this._showHideTransitionIndex), <add> getValue(showHideTransitions, this._showHideTransitionIndex), <ide> }); <ide> }}> <ide> <View style={styles.button}> <ide> <Text> <ide> showHideTransition: <del> '{this._getValue(showHideTransitions, this._showHideTransitionIndex)}' <add> '{getValue(showHideTransitions, this._showHideTransitionIndex)}' <ide> </Text> <ide> </View> <ide> </TouchableHighlight> <ide> const StatusBarExample = React.createClass({ <ide> style={styles.wrapper} <ide> onPress={() => { <ide> this._colorIndex++; <del> this.setState({backgroundColor: this._getValue(colors, this._colorIndex)}); <add> this.setState({backgroundColor: getValue(colors, this._colorIndex)}); <ide> }}> <ide> <View style={styles.button}> <del> <Text>backgroundColor: '{this._getValue(colors, this._colorIndex)}'</Text> <add> <Text>backgroundColor: '{getValue(colors, this._colorIndex)}'</Text> <ide> </View> <ide> </TouchableHighlight> <ide> </View> <ide> const StatusBarExample = React.createClass({ <ide> }, <ide> }); <ide> <add>const StatusBarStaticExample = React.createClass({ <add> _colorIndex: 0, <add> _barStyleIndex: 0, <add> _showHideTransitionIndex: 0, <add> <add> getInitialState() { <add> return { <add> backgroundColor: getValue(colors, 0), <add> barStyle: getValue(barStyles, 0), <add> hidden: false, <add> networkActivityIndicatorVisible: false, <add> translucent: false, <add> }; <add> }, <add> <add> render() { <add> return ( <add> <View> <add> <View> <add> <TouchableHighlight <add> style={styles.wrapper} <add> onPress={() => { <add> const hidden = !this.state.hidden; <add> StatusBar.setHidden(hidden, 'slide'); <add> this.setState({hidden}); <add> }}> <add> <View style={styles.button}> <add> <Text>hidden: {this.state.hidden ? 'true' : 'false'}</Text> <add> </View> <add> </TouchableHighlight> <add> </View> <add> <Text style={styles.title}>iOS</Text> <add> <View> <add> <TouchableHighlight <add> style={styles.wrapper} <add> onPress={() => { <add> this._barStyleIndex++; <add> const barStyle = getValue(barStyles, this._barStyleIndex); <add> StatusBar.setBarStyle(barStyle, true); <add> this.setState({barStyle}); <add> }}> <add> <View style={styles.button}> <add> <Text>style: '{getValue(barStyles, this._barStyleIndex)}'</Text> <add> </View> <add> </TouchableHighlight> <add> </View> <add> <View> <add> <TouchableHighlight <add> style={styles.wrapper} <add> onPress={() => { <add> const networkActivityIndicatorVisible = !this.state.networkActivityIndicatorVisible; <add> StatusBar.setNetworkActivityIndicatorVisible(networkActivityIndicatorVisible); <add> this.setState({networkActivityIndicatorVisible}); <add> }}> <add> <View style={styles.button}> <add> <Text> <add> networkActivityIndicatorVisible: <add> {this.state.networkActivityIndicatorVisible ? 'true' : 'false'} <add> </Text> <add> </View> <add> </TouchableHighlight> <add> </View> <add> <Text style={styles.title}>Android</Text> <add> <View> <add> <TouchableHighlight <add> style={styles.wrapper} <add> onPress={() => { <add> this._colorIndex++; <add> const backgroundColor = getValue(colors, this._colorIndex); <add> StatusBar.setBackgroundColor(backgroundColor, true); <add> this.setState({backgroundColor}); <add> }}> <add> <View style={styles.button}> <add> <Text>backgroundColor: '{getValue(colors, this._colorIndex)}'</Text> <add> </View> <add> </TouchableHighlight> <add> </View> <add> <View> <add> <TouchableHighlight <add> style={styles.wrapper} <add> onPress={() => { <add> const translucent = !this.state.translucent; <add> const backgroundColor = !this.state.translucent ? 'rgba(0, 0, 0, 0.4)' : 'black'; <add> StatusBar.setTranslucent(translucent); <add> StatusBar.setBackgroundColor(backgroundColor, true); <add> this.setState({ <add> translucent, <add> backgroundColor, <add> }); <add> }}> <add> <View style={styles.button}> <add> <Text>translucent: {this.state.translucent ? 'true' : 'false'}</Text> <add> </View> <add> </TouchableHighlight> <add> </View> <add> </View> <add> ); <add> }, <add>}); <add> <ide> exports.examples = [{ <del> title: 'Status Bar', <add> title: 'StatusBar', <ide> render() { <ide> return <StatusBarExample />; <ide> }, <add>}, { <add> title: 'StatusBar static API', <add> render() { <add> return <StatusBarStaticExample />; <add> }, <ide> }]; <ide> <ide> var styles = StyleSheet.create({ <ide><path>Libraries/Components/StatusBar/StatusBar.js <ide> const processColor = require('processColor'); <ide> <ide> const StatusBarManager = require('NativeModules').StatusBarManager; <ide> <add>export type StatusBarStyle = $Enum<{ <add> 'default': string, <add> 'light-content': string, <add>}>; <add> <add>export type StatusBarAnimation = $Enum<{ <add> 'none': string, <add> 'fade': string, <add> 'slide': string, <add>}>; <add> <ide> type DefaultProps = { <ide> animated: boolean; <ide> }; <ide> <ide> /** <ide> * Merges the prop stack with the default values. <ide> */ <del>function mergePropsStack(propsStack: Array<Object>): Object { <add>function mergePropsStack(propsStack: Array<Object>, defaultValues: Object): Object { <ide> return propsStack.reduce((prev, cur) => { <ide> return Object.assign(prev, cur); <del> }, { <del> backgroundColor: 'black', <del> barStyle: 'default', <del> translucent: false, <del> hidden: false, <del> networkActivityIndicatorVisible: false, <del> }); <add> }, defaultValues); <ide> } <ide> <ide> /** <ide> function mergePropsStack(propsStack: Array<Object>): Object { <ide> * /> <ide> * </View> <ide> * ``` <add> * <add> * ### Imperative API <add> * <add> * For cases where using a component is not ideal, there is also an imperative <add> * API exposed as static functions on the component. It is however not recommended <add> * to use the static API and the compoment for the same prop because any value <add> * set by the static API will get overriden by the one set by the component in <add> * the next render. <ide> */ <ide> const StatusBar = React.createClass({ <ide> statics: { <ide> _propsStack: [], <add> _defaultProps: { <add> backgroundColor: 'black', <add> barStyle: 'default', <add> translucent: false, <add> hidden: false, <add> networkActivityIndicatorVisible: false, <add> }, <add> <add> // Provide an imperative API as static functions of the component. <add> // See the corresponding prop for more detail. <add> setHidden(hidden: boolean, animation?: StatusBarAnimation) { <add> animation = animation || 'none'; <add> StatusBar._defaultProps.hidden = hidden; <add> if (Platform.OS === 'ios') { <add> StatusBarManager.setHidden(hidden, animation); <add> } else if (Platform.OS === 'android') { <add> StatusBarManager.setHidden(hidden); <add> } <add> }, <add> <add> setBarStyle(style: StatusBarStyle, animated?: boolean) { <add> if (Platform.OS !== 'ios') { <add> console.warn('`setBarStyle` is only available on iOS'); <add> return; <add> } <add> animated = animated || false; <add> StatusBar._defaultProps.barStyle = style; <add> StatusBarManager.setStyle(style, animated); <add> }, <add> <add> setNetworkActivityIndicatorVisible(visible: boolean) { <add> if (Platform.OS !== 'ios') { <add> console.warn('`setNetworkActivityIndicatorVisible` is only available on iOS'); <add> return; <add> } <add> StatusBar._defaultProps.networkActivityIndicatorVisible = visible; <add> StatusBarManager.setNetworkActivityIndicatorVisible(visible); <add> }, <add> <add> setBackgroundColor(color, animated?: boolean) { <add> if (Platform.OS !== 'android') { <add> console.warn('`setBackgroundColor` is only available on Android'); <add> return; <add> } <add> animated = animated || false; <add> StatusBar._defaultProps.backgroundColor = color; <add> StatusBarManager.setColor(processColor(color), animated); <add> }, <add> <add> setTranslucent(translucent: boolean) { <add> if (Platform.OS !== 'android') { <add> console.warn('`setTranslucent` is only available on Android'); <add> return; <add> } <add> StatusBar._defaultProps.translucent = translucent; <add> StatusBarManager.setTranslucent(translucent); <add> }, <ide> }, <ide> <ide> propTypes: { <ide> const StatusBar = React.createClass({ <ide> * Updates the native status bar with the props from the stack. <ide> */ <ide> _updatePropsStack() { <del> const mergedProps = mergePropsStack(StatusBar._propsStack); <add> const mergedProps = mergePropsStack(StatusBar._propsStack, StatusBar._defaultProps); <ide> <ide> if (Platform.OS === 'ios') { <ide> if (mergedProps.barStyle !== undefined) { <ide><path>Libraries/Components/StatusBar/StatusBarIOS.ios.js <ide> */ <ide> 'use strict'; <ide> <del>var RCTStatusBarManager = require('NativeModules').StatusBarManager; <add>const StatusBar = require('StatusBar'); <ide> <del>type StatusBarStyle = $Enum<{ <del> 'default': string, <del> 'light-content': string, <del>}>; <add>import type {StatusBarStyle, StatusBarAnimation} from 'StatusBar'; <ide> <del>type StatusBarAnimation = $Enum<{ <del> 'none': string, <del> 'fade': string, <del> 'slide': string, <del>}>; <del> <del>var StatusBarIOS = { <add>/** <add> * Deprecated. Use `StatusBar` instead. <add> */ <add>const StatusBarIOS = { <ide> <ide> setStyle(style: StatusBarStyle, animated?: boolean) { <del> animated = animated || false; <del> RCTStatusBarManager.setStyle(style, animated); <add> console.warn('`StatusBarIOS.setStyle` is deprecated. Use `StatusBar.setBarStyle` instead.'); <add> StatusBar.setBarStyle(style, animated); <ide> }, <ide> <ide> setHidden(hidden: boolean, animation?: StatusBarAnimation) { <del> animation = animation || 'none'; <del> RCTStatusBarManager.setHidden(hidden, animation); <add> console.warn('`StatusBarIOS.setHidden` is deprecated. Use `StatusBar.setHidden` instead.'); <add> StatusBar.setHidden(hidden, animation); <ide> }, <ide> <ide> setNetworkActivityIndicatorVisible(visible: boolean) { <del> RCTStatusBarManager.setNetworkActivityIndicatorVisible(visible); <add> console.warn( <add> '`StatusBarIOS.setNetworkActivityIndicatorVisible` is deprecated. ' + <add> 'Use `StatusBar.setNetworkActivityIndicatorVisible` instead.' <add> ); <add> StatusBar.setNetworkActivityIndicatorVisible(visible); <ide> }, <ide> }; <ide>
3
Go
Go
check existence of network chain before creating
6cff09f710e64fec53ced2b289864bf89bc5ac9c
<ide><path>libnetwork/drivers/overlay/filter.go <ide> func rawIPTables(args ...string) error { <ide> return nil <ide> } <ide> <add>func chainExists(cname string) bool { <add> if err := rawIPTables("-L", cname); err != nil { <add> return false <add> } <add> <add> return true <add>} <add> <ide> func setupGlobalChain() { <ide> if err := rawIPTables("-N", globalChain); err != nil { <del> logrus.Errorf("could not create global overlay chain: %v", err) <del> return <add> logrus.Debugf("could not create global overlay chain: %v", err) <ide> } <ide> <ide> if err := rawIPTables("-A", globalChain, "-j", "RETURN"); err != nil { <del> logrus.Errorf("could not install default return chain in the overlay global chain: %v", err) <del> return <add> logrus.Debugf("could not install default return chain in the overlay global chain: %v", err) <ide> } <ide> } <ide> <ide> func setNetworkChain(cname string, remove bool) error { <ide> // Initialize the onetime global overlay chain <ide> filterOnce.Do(setupGlobalChain) <ide> <add> exists := chainExists(cname) <add> <ide> opt := "-N" <ide> // In case of remove, make sure to flush the rules in the chain <del> if remove { <add> if remove && exists { <ide> if err := rawIPTables("-F", cname); err != nil { <ide> return fmt.Errorf("failed to flush overlay network chain %s rules: %v", cname, err) <ide> } <ide> opt = "-X" <ide> } <ide> <del> if err := rawIPTables(opt, cname); err != nil { <del> return fmt.Errorf("failed network chain operation %q for chain %s: %v", opt, cname, err) <add> if (!remove && !exists) || (remove && exists) { <add> if err := rawIPTables(opt, cname); err != nil { <add> return fmt.Errorf("failed network chain operation %q for chain %s: %v", opt, cname, err) <add> } <ide> } <ide> <ide> if !remove { <del> if err := rawIPTables("-A", cname, "-j", "DROP"); err != nil { <del> return fmt.Errorf("failed adding default drop rule to overlay network chain %s: %v", cname, err) <add> if !iptables.Exists(iptables.Filter, cname, "-j", "DROP") { <add> if err := rawIPTables("-A", cname, "-j", "DROP"); err != nil { <add> return fmt.Errorf("failed adding default drop rule to overlay network chain %s: %v", cname, err) <add> } <ide> } <ide> } <ide> <ide><path>libnetwork/drivers/overlay/ov_network.go <ide> func (n *network) destroySandbox() { <ide> } <ide> <ide> if s.vxlanName != "" { <del> err := deleteVxlan(s.vxlanName) <add> err := deleteInterface(s.vxlanName) <ide> if err != nil { <ide> logrus.Warnf("could not cleanup sandbox properly: %v", err) <ide> } <ide> func setHostMode() { <ide> return <ide> } <ide> <del> defer deleteVxlan("testvxlan") <add> defer deleteInterface("testvxlan") <ide> <ide> path := "/proc/self/ns/net" <ide> f, err := os.OpenFile(path, os.O_RDONLY, 0) <ide> func isOverlap(nw *net.IPNet) bool { <ide> } <ide> <ide> func (n *network) initSubnetSandbox(s *subnet) error { <del> if hostMode && isOverlap(s.subnetIP) { <del> return fmt.Errorf("overlay subnet %s has conflicts in the host while running in host mode", s.subnetIP.String()) <add> brName := n.generateBridgeName(s) <add> vxlanName := n.generateVxlanName(s) <add> <add> if hostMode { <add> // Try to delete stale bridge interface if it exists <add> deleteInterface(brName) <add> // Try to delete the vxlan interface by vni if already present <add> deleteVxlanByVNI(n.vxlanID(s)) <add> <add> if isOverlap(s.subnetIP) { <add> return fmt.Errorf("overlay subnet %s has conflicts in the host while running in host mode", s.subnetIP.String()) <add> } <ide> } <ide> <ide> // create a bridge and vxlan device for this subnet and move it to the sandbox <del> brName := n.generateBridgeName(s) <ide> sbox := n.sandbox() <ide> <ide> if err := sbox.AddInterface(brName, "br", <ide> func (n *network) initSubnetSandbox(s *subnet) error { <ide> return fmt.Errorf("bridge creation in sandbox failed for subnet %q: %v", s.subnetIP.String(), err) <ide> } <ide> <del> vxlanName := n.generateVxlanName(s) <del> <del> // Try to delete the vxlan interface by vni if already present <del> deleteVxlanByVNI(n.vxlanID(s)) <del> <ide> err := createVxlan(vxlanName, n.vxlanID(s)) <ide> if err != nil { <ide> return err <ide><path>libnetwork/drivers/overlay/ov_utils.go <ide> func createVxlan(name string, vni uint32) error { <ide> return nil <ide> } <ide> <del>func deleteVxlan(name string) error { <add>func deleteInterface(name string) error { <ide> defer osl.InitOSContext()() <ide> <ide> link, err := netlink.LinkByName(name) <ide> if err != nil { <del> return fmt.Errorf("failed to find vxlan interface with name %s: %v", name, err) <add> return fmt.Errorf("failed to find interface with name %s: %v", name, err) <ide> } <ide> <ide> if err := netlink.LinkDel(link); err != nil { <del> return fmt.Errorf("error deleting vxlan interface: %v", err) <add> return fmt.Errorf("error deleting interface with name %s: %v", name, err) <ide> } <ide> <ide> return nil
3
Python
Python
fix weight save
b2ab55611bbb01875cdac7a58717291ebccb1d4d
<ide><path>tests/keras/test_models.py <ide> def test_siamese_1(): <ide> model.get_config(verbose=0) <ide> <ide> # test weight saving <del> fname = 'test_merge_sum_temp.h5' <add> fname = 'test_siamese_1.h5' <add> model.save_weights(fname, overwrite=True) <ide> left = Sequential() <ide> left.add(Dense(nb_hidden, input_shape=(input_dim,))) <ide> left.add(Activation('relu')) <ide> def test_siamese_2(): <ide> model.get_config(verbose=0) <ide> <ide> # test weight saving <del> fname = 'test_merge_sum_temp.h5' <add> fname = 'test_siamese_2.h5' <ide> model.save_weights(fname, overwrite=True) <ide> left = Sequential() <ide> left.add(Dense(nb_hidden, input_shape=(input_dim,)))
1
Python
Python
add missing packages to setup.py after refactoring
e524a8cb29fa5d748ff4347dc04d0381da09e3fb
<ide><path>setup.py <ide> def run(self): <ide> requires=([], ['ssl', 'simplejson'],)[pre_python26], <ide> packages=[ <ide> 'libcloud', <del> 'libcloud.drivers' <add> 'libcloud.common', <add> 'libcloud.compute', <add> 'libcloud.compute.drivers', <add> 'libcloud.drivers', <ide> ], <ide> package_dir={ <ide> 'libcloud': 'libcloud',
1
Python
Python
fix exception handling in list_records
ea472407ada12b9a5e6832ccada7723a317bfafd
<ide><path>libcloud/dns/drivers/linode.py <ide> def list_zones(self): <ide> <ide> def list_records(self, zone): <ide> params = {'api_action': 'domain.resource.list', 'DOMAINID': zone.id} <del> data = self.connection.request(API_ROOT, params=params).objects[0] <add> <add> try: <add> data = self.connection.request(API_ROOT, params=params).objects[0] <add> except LinodeException, e: <add> # TODO: Refactor LinodeException, args[0] should be error_id <add> if e.args[0] == 5: <add> raise ZoneDoesNotExistError(value='', driver=self, <add> zone_id=zone.id) <add> <ide> records = self._to_records(items=data, zone=zone) <ide> return records <ide>
1
Javascript
Javascript
set charset as 'utf-8' in sourcemap plugin
af4c2e88c04df94d47057a667e682cb1e8c76a32
<ide><path>lib/EvalSourceMapDevToolModuleTemplatePlugin.js <ide> EvalSourceMapDevToolModuleTemplatePlugin.prototype.apply = function(moduleTempla <ide> } <ide> sourceMap.sourceRoot = ""; <ide> sourceMap.file = module.id + ".js"; <del> var footer = self.sourceMapComment.replace(/\[url\]/g, "data:application/json;base64," + new Buffer(JSON.stringify(sourceMap)).toString("base64")); <add> var footer = self.sourceMapComment.replace(/\[url\]/g, "data:application/json;charset=utf-8;base64," + new Buffer(JSON.stringify(sourceMap)).toString("base64")); <ide> source.__EvalSourceMapDevToolData = new RawSource("eval(" + JSON.stringify(content + footer) + ");"); <ide> return source.__EvalSourceMapDevToolData; <ide> }); <ide><path>lib/SourceMapDevToolPlugin.js <ide> SourceMapDevToolPlugin.prototype.apply = function(compiler) { <ide> return JSON.stringify(sourceMap); <ide> }) <ide> .replace(/\[url\]/g, function() { <del> return "data:application/json;base64," + <add> return "data:application/json;charset=utf-8;base64," + <ide> new Buffer(JSON.stringify(sourceMap)).toString("base64"); <ide> }) <ide> );
2
Javascript
Javascript
remove workaround for unsupported openssls
ba7551cad8abd2e460763b06efa4207be96a7a19
<ide><path>test/parallel/test-https-agent-session-eviction.js <ide> const { readKey } = require('../common/fixtures'); <ide> if (!common.hasCrypto) <ide> common.skip('missing crypto'); <ide> <del>const assert = require('assert'); <ide> const https = require('https'); <del>const { OPENSSL_VERSION_NUMBER, SSL_OP_NO_TICKET } = <del> require('crypto').constants; <add>const { SSL_OP_NO_TICKET } = require('crypto').constants; <ide> <ide> const options = { <ide> key: readKey('agent1-key.pem'), <ide> function second(server, session) { <ide> res.resume(); <ide> }); <ide> <del> if (OPENSSL_VERSION_NUMBER >= 0x10100000) { <del> // Although we have a TLS 1.2 session to offer to the TLS 1.0 server, <del> // connection to the TLS 1.0 server should work. <del> req.on('response', common.mustCall(function(res) { <del> // The test is now complete for OpenSSL 1.1.0. <del> server.close(); <del> })); <del> } else { <del> // OpenSSL 1.0.x mistakenly locked versions based on the session it was <del> // offering. This causes this sequent request to fail. Let it fail, but <del> // test that this is mitigated on the next try by invalidating the session. <del> req.on('error', common.mustCall(function(err) { <del> assert(/wrong version number/.test(err.message)); <del> <del> req.on('close', function() { <del> third(server); <del> }); <del> })); <del> } <del> req.end(); <del>} <del> <del>// Try one more time - session should be evicted! <del>function third(server) { <del> const req = https.request({ <del> port: server.address().port, <del> rejectUnauthorized: false <del> }, function(res) { <del> res.resume(); <del> assert(!req.socket.isSessionReused()); <add> // Although we have a TLS 1.2 session to offer to the TLS 1.0 server, <add> // connection to the TLS 1.0 server should work. <add> req.on('response', common.mustCall(function(res) { <add> // The test is now complete for OpenSSL 1.1.0. <ide> server.close(); <del> }); <del> req.on('error', common.mustNotCall()); <add> })); <add> <ide> req.end(); <ide> }
1
Java
Java
fix spelling of word 'recommendation'
9b87ea017bc58c0921c0ba20cc1ff099c42eea67
<ide><path>spring-web/src/main/java/org/springframework/http/HttpHeaders.java <ide> * @author Sebastien Deleuze <ide> * @author Brian Clozel <ide> * @author Juergen Hoeller <add> * @author Josh Long <ide> * @since 3.0 <ide> */ <ide> public class HttpHeaders implements MultiValueMap<String, String>, Serializable { <ide> public class HttpHeaders implements MultiValueMap<String, String>, Serializable <ide> public static final String ACCEPT_RANGES = "Accept-Ranges"; <ide> /** <ide> * The CORS {@code Access-Control-Allow-Credentials} response header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_ALLOW_CREDENTIALS = "Access-Control-Allow-Credentials"; <ide> /** <ide> * The CORS {@code Access-Control-Allow-Headers} response header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_ALLOW_HEADERS = "Access-Control-Allow-Headers"; <ide> /** <ide> * The CORS {@code Access-Control-Allow-Methods} response header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods"; <ide> /** <ide> * The CORS {@code Access-Control-Allow-Origin} response header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_ALLOW_ORIGIN = "Access-Control-Allow-Origin"; <ide> /** <ide> * The CORS {@code Access-Control-Expose-Headers} response header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_EXPOSE_HEADERS = "Access-Control-Expose-Headers"; <ide> /** <ide> * The CORS {@code Access-Control-Max-Age} response header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_MAX_AGE = "Access-Control-Max-Age"; <ide> /** <ide> * The CORS {@code Access-Control-Request-Headers} request header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_REQUEST_HEADERS = "Access-Control-Request-Headers"; <ide> /** <ide> * The CORS {@code Access-Control-Request-Method} request header field name. <del> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommandation</a> <add> * @see <a href="http://www.w3.org/TR/cors/">CORS W3C recommendation</a> <ide> */ <ide> public static final String ACCESS_CONTROL_REQUEST_METHOD = "Access-Control-Request-Method"; <ide> /**
1
PHP
PHP
move tests around
b9859682318c937938da60d79ec15d66e3a7c0a1
<ide><path>tests/TestCase/Routing/RouteCollectionTest.php <ide> public function testParseRequest() <ide> ]; <ide> $this->assertEquals($expected, $result); <ide> <del> $request = new ServerRequest(['url' => '/b/the-thing?one=two']); <del> $result = $this->collection->parseRequest($request); <del> $expected = [ <del> 'controller' => 'Articles', <del> 'action' => 'view', <del> 'id' => 'the-thing', <del> 'pass' => [], <del> 'plugin' => null, <del> 'key' => 'value', <del> '?' => ['one' => 'two'], <del> '_matchedRoute' => '/b/:id', <del> ]; <del> $this->assertEquals($expected, $result); <del> <ide> $request = new ServerRequest(['url' => '/b/media/search']); <ide> $result = $this->collection->parseRequest($request); <ide> $expected = [ <ide> public function testParseRequest() <ide> '_matchedRoute' => '/b/media/search/*', <ide> ]; <ide> $this->assertEquals($expected, $result); <add> <add> $request = new ServerRequest(['url' => '/b/the-thing?one=two']); <add> $result = $this->collection->parseRequest($request); <add> $expected = [ <add> 'controller' => 'Articles', <add> 'action' => 'view', <add> 'id' => 'the-thing', <add> 'pass' => [], <add> 'plugin' => null, <add> 'key' => 'value', <add> '?' => ['one' => 'two'], <add> '_matchedRoute' => '/b/:id', <add> ]; <add> $this->assertEquals($expected, $result); <add> <ide> } <ide> <ide> /**
1
Python
Python
fix french tag map
9d13288f730c212aecc38255b3a777e0555e34ca
<ide><path>spacy/lang/fr/tag_map.py <ide> from __future__ import unicode_literals <ide> <ide> from ...symbols import POS, PUNCT, ADJ, CCONJ, NUM, DET, ADV, ADP, X, VERB <del>from ...symbols import NOUN, PROPN, PART, INTJ, SPACE, PRON <add>from ...symbols import NOUN, PROPN, PART, INTJ, SPACE, PRON, AUX, SCONJ <ide> <ide> <ide> TAG_MAP = { <ide> "ADV__Polarity=Neg": {POS: ADV}, <ide> "ADV__PronType=Int": {POS: ADV}, <ide> "ADV___": {POS: ADV}, <del> "AUX__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part": {POS: "AUX"}, <del> "AUX__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {POS: "AUX"}, <del> "AUX__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {POS: "AUX"}, <del> "AUX__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {POS: "AUX"}, <del> "AUX__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part": {POS: "AUX"}, <del> "AUX__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {POS: "AUX"}, <del> "AUX__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {POS: "AUX"}, <del> "AUX__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {POS: "AUX"}, <del> "AUX__Mood=Cnd|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Cnd|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Cnd|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Cnd|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Cnd|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Imp|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Past|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Sing|Person=2|Tense=Imp|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Sub|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Sub|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Sub|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Sub|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Mood=Sub|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {POS: "AUX"}, <del> "AUX__Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {POS: "AUX"}, <del> "AUX__Tense=Past|VerbForm=Part": {POS: "AUX"}, <del> "AUX__Tense=Pres|VerbForm=Part": {POS: "AUX"}, <del> "AUX__VerbForm=Inf": {POS: "AUX"}, <add> "AUX__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part": {POS: AUX}, <add> "AUX__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {POS: AUX}, <add> "AUX__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {POS: AUX}, <add> "AUX__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {POS: AUX}, <add> "AUX__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part": {POS: AUX}, <add> "AUX__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {POS: AUX}, <add> "AUX__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {POS: AUX}, <add> "AUX__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {POS: AUX}, <add> "AUX__Mood=Cnd|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Cnd|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Cnd|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Cnd|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Cnd|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Imp|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Past|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Sing|Person=2|Tense=Imp|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Sub|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Sub|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Sub|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Sub|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Mood=Sub|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {POS: AUX}, <add> "AUX__Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {POS: AUX}, <add> "AUX__Tense=Past|VerbForm=Part": {POS: AUX}, <add> "AUX__Tense=Pres|VerbForm=Part": {POS: AUX}, <add> "AUX__VerbForm=Inf": {POS: AUX}, <ide> "CCONJ___": {POS: CCONJ}, <ide> "DET__Definite=Def|Gender=Fem|Number=Sing|PronType=Art": {POS: DET}, <ide> "DET__Definite=Def|Gender=Masc|Number=Sing|PronType=Art": {POS: DET}, <ide> "PROPN__Number=Sing": {POS: PROPN}, <ide> "PROPN___": {POS: PROPN}, <ide> "PUNCT___": {POS: PUNCT}, <del> "SCONJ___": {POS: "SCONJ"}, <add> "SCONJ___": {POS: SCONJ}, <ide> "VERB__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part": {POS: VERB}, <ide> "VERB__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {POS: VERB}, <ide> "VERB__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {POS: VERB},
1
Ruby
Ruby
use active_vault as the table prefix
3a92cbf6b358c983f3b1b9f628fb441e051a9984
<ide><path>lib/active_vault/blob.rb <ide> <ide> # Schema: id, key, filename, content_type, metadata, byte_size, checksum, created_at <ide> class ActiveVault::Blob < ActiveRecord::Base <del> self.table_name = "rails_blobs" <add> self.table_name = "active_vault_blobs" <ide> <ide> has_secure_token :key <ide> store :metadata, coder: JSON <ide><path>lib/active_vault/migration.rb <ide> class ActiveVault::CreateBlobs < ActiveRecord::Migration[5.1] <ide> def change <del> create_table :rails_blobs do |t| <ide> t.string :key <ide> t.string :filename <ide> t.string :content_type <ide> t.text :metadata <add> create_table :active_vault_blobs do |t| <ide> t.integer :byte_size <ide> t.string :checksum <ide> t.time :created_at
2
PHP
PHP
add test for
f0f1531faca68553012fd2ffe9153e0c7af76a79
<ide><path>lib/Cake/Test/Case/Routing/RouterTest.php <ide> public function testCanLeavePlugin() { <ide> $this->assertEquals('/admin/other/posts/index', $result); <ide> } <ide> <add>/** <add> * Test that URL's fail to parse when they are prefixed with // <add> * <add> * @return void <add> */ <add> public function testUrlParseFailureDoubleSlash() { <add> Router::connect('/posts', array('controller' => 'posts', 'action' => 'index')); <add> $result = Router::parse('/posts'); <add> $this->assertEquals( <add> array('pass' => array(), 'named' => array(), 'plugin' => null, 'controller' => 'posts', 'action' => 'index'), <add> $result <add> ); <add> <add> $result = Router::parse('//posts'); <add> $this->assertEquals(array(), $result); <add> } <add> <ide> /** <ide> * testUrlParsing method <ide> *
1
Python
Python
fix wildcard import
14a9c3ee7a40e5384194305f44d7b930f3f52a4f
<ide><path>spacy/language_data/tag_map.py <ide> # coding: utf8 <ide> from __future__ import unicode_literals <ide> <del>from ..symbols import * <add>from ..symbols import POS, ADV, NOUN, ADP, PRON, SCONJ, PROPN, DET, SYM, INTJ <add>from ..symbols import PUNCT, NUM, AUX, X, CONJ, ADJ, VERB, PART, SPACE <ide> <ide> <ide> TAG_MAP = {
1
Go
Go
remove rc4 from the list of registry cipher suites
37846bff586e6f3d6bf35b66cce560ce4c57a619
<ide><path>registry/registry.go <ide> import ( <ide> "github.com/docker/docker/autogen/dockerversion" <ide> "github.com/docker/docker/pkg/parsers/kernel" <ide> "github.com/docker/docker/pkg/timeoutconn" <add> "github.com/docker/docker/pkg/tlsconfig" <ide> "github.com/docker/docker/pkg/transport" <ide> "github.com/docker/docker/pkg/useragent" <ide> ) <ide> func NewTransport(timeout TimeoutType, secure bool) http.RoundTripper { <ide> // Avoid fallback to SSL protocols < TLS1.0 <ide> MinVersion: tls.VersionTLS10, <ide> InsecureSkipVerify: !secure, <add> CipherSuites: tlsconfig.DefaultServerAcceptedCiphers, <ide> } <ide> <ide> tr := &http.Transport{
1
Java
Java
integrate class proxy generation in aot processing
7c2453c3738426327d1f489006775c619c497694
<ide><path>spring-context/src/main/java/org/springframework/context/aot/ApplicationContextAotGenerator.java <ide> <ide> package org.springframework.context.aot; <ide> <add>import java.util.function.Supplier; <add> <ide> import org.springframework.aot.generate.GenerationContext; <ide> import org.springframework.beans.factory.BeanFactory; <ide> import org.springframework.beans.factory.support.DefaultListableBeanFactory; <add>import org.springframework.cglib.core.ReflectUtils; <ide> import org.springframework.context.ApplicationContext; <ide> import org.springframework.context.ApplicationContextInitializer; <ide> import org.springframework.context.support.GenericApplicationContext; <ide> public class ApplicationContextAotGenerator { <ide> */ <ide> public ClassName processAheadOfTime(GenericApplicationContext applicationContext, <ide> GenerationContext generationContext) { <del> applicationContext.refreshForAotProcessing(); <del> DefaultListableBeanFactory beanFactory = applicationContext.getDefaultListableBeanFactory(); <del> ApplicationContextInitializationCodeGenerator codeGenerator = <del> new ApplicationContextInitializationCodeGenerator(generationContext); <del> new BeanFactoryInitializationAotContributions(beanFactory).applyTo(generationContext, codeGenerator); <del> return codeGenerator.getGeneratedClass().getName(); <add> return withGeneratedClassHandler(new GeneratedClassHandler(generationContext), () -> { <add> applicationContext.refreshForAotProcessing(); <add> DefaultListableBeanFactory beanFactory = applicationContext.getDefaultListableBeanFactory(); <add> ApplicationContextInitializationCodeGenerator codeGenerator = <add> new ApplicationContextInitializationCodeGenerator(generationContext); <add> new BeanFactoryInitializationAotContributions(beanFactory).applyTo(generationContext, codeGenerator); <add> return codeGenerator.getGeneratedClass().getName(); <add> }); <add> } <add> <add> private <T> T withGeneratedClassHandler(GeneratedClassHandler generatedClassHandler, Supplier<T> task) { <add> try { <add> ReflectUtils.setGeneratedClassHandler(generatedClassHandler); <add> return task.get(); <add> } <add> finally { <add> ReflectUtils.setGeneratedClassHandler(null); <add> } <ide> } <ide> <ide> } <ide><path>spring-context/src/main/java/org/springframework/context/aot/GeneratedClassHandler.java <add>/* <add> * Copyright 2002-2022 the original author or authors. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * https://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.springframework.context.aot; <add> <add>import java.util.function.BiConsumer; <add>import java.util.function.Consumer; <add> <add>import org.springframework.aot.generate.GeneratedFiles; <add>import org.springframework.aot.generate.GeneratedFiles.Kind; <add>import org.springframework.aot.generate.GenerationContext; <add>import org.springframework.aot.hint.MemberCategory; <add>import org.springframework.aot.hint.RuntimeHints; <add>import org.springframework.aot.hint.TypeHint.Builder; <add>import org.springframework.aot.hint.TypeReference; <add>import org.springframework.cglib.core.ReflectUtils; <add>import org.springframework.core.io.ByteArrayResource; <add> <add>/** <add> * Handle generated classes by adding them to a {@link GenerationContext}, <add> * and register the necessary hints so that they can be instantiated. <add> * <add> * @author Stephane Nicoll <add> * @see ReflectUtils#setGeneratedClassHandler(BiConsumer) <add> */ <add>class GeneratedClassHandler implements BiConsumer<String, byte[]> { <add> <add> private static final Consumer<Builder> asCglibProxy = hint -> <add> hint.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS); <add> <add> private final RuntimeHints runtimeHints; <add> <add> private final GeneratedFiles generatedFiles; <add> <add> GeneratedClassHandler(GenerationContext generationContext) { <add> this.runtimeHints = generationContext.getRuntimeHints(); <add> this.generatedFiles = generationContext.getGeneratedFiles(); <add> } <add> <add> @Override <add> public void accept(String className, byte[] content) { <add> this.runtimeHints.reflection().registerType(TypeReference.of(className), asCglibProxy); <add> String path = className.replace(".", "/") + ".class"; <add> this.generatedFiles.addFile(Kind.CLASS, path, new ByteArrayResource(content)); <add> } <add> <add>} <ide><path>spring-context/src/test/java/org/springframework/context/aot/ApplicationContextAotGeneratorTests.java <ide> <ide> package org.springframework.context.aot; <ide> <add>import java.io.IOException; <ide> import java.util.function.BiConsumer; <ide> <ide> import org.junit.jupiter.api.Test; <ide> <add>import org.springframework.aot.generate.GeneratedFiles.Kind; <add>import org.springframework.aot.hint.MemberCategory; <add>import org.springframework.aot.hint.TypeReference; <add>import org.springframework.aot.hint.predicate.RuntimeHintsPredicates; <ide> import org.springframework.aot.test.generator.compile.Compiled; <ide> import org.springframework.aot.test.generator.compile.TestCompiler; <ide> import org.springframework.beans.BeansException; <ide> import org.springframework.beans.factory.support.RegisteredBean; <ide> import org.springframework.beans.factory.support.RootBeanDefinition; <ide> import org.springframework.context.ApplicationContextInitializer; <add>import org.springframework.context.annotation.AnnotationConfigApplicationContext; <ide> import org.springframework.context.annotation.AnnotationConfigUtils; <ide> import org.springframework.context.annotation.CommonAnnotationBeanPostProcessor; <ide> import org.springframework.context.support.GenericApplicationContext; <ide> import org.springframework.context.testfixture.context.generator.SimpleComponent; <ide> import org.springframework.context.testfixture.context.generator.annotation.AutowiredComponent; <add>import org.springframework.context.testfixture.context.generator.annotation.CglibConfiguration; <ide> import org.springframework.context.testfixture.context.generator.annotation.InitDestroyComponent; <ide> import org.springframework.core.testfixture.aot.generate.TestGenerationContext; <ide> <ide> void processAheadOfTimeWhenHasBeanRegistrationAotProcessorExcludesProcessor() { <ide> }); <ide> } <ide> <del> @SuppressWarnings({ "rawtypes", "unchecked" }) <del> private void testCompiledResult(GenericApplicationContext applicationContext, <del> BiConsumer<ApplicationContextInitializer<GenericApplicationContext>, Compiled> result) { <add> @Test <add> void processAheadOfTimeWhenHasCglibProxyWriteProxyAndGenerateReflectionHints() throws IOException { <add> GenericApplicationContext applicationContext = new AnnotationConfigApplicationContext(); <add> applicationContext.registerBean(CglibConfiguration.class); <add> TestGenerationContext context = processAheadOfTime(applicationContext); <add> String proxyClassName = CglibConfiguration.class.getName() + "$$SpringCGLIB$$0"; <add> assertThat(context.getGeneratedFiles() <add> .getGeneratedFileContent(Kind.CLASS, proxyClassName.replace('.', '/') + ".class")).isNotNull(); <add> assertThat(RuntimeHintsPredicates.reflection().onType(TypeReference.of(proxyClassName)) <add> .withMemberCategory(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS)).accepts(context.getRuntimeHints()); <add> } <add> <add> private static TestGenerationContext processAheadOfTime(GenericApplicationContext applicationContext) { <ide> ApplicationContextAotGenerator generator = new ApplicationContextAotGenerator(); <ide> TestGenerationContext generationContext = new TestGenerationContext(); <ide> generator.processAheadOfTime(applicationContext, generationContext); <ide> generationContext.writeGeneratedContent(); <add> return generationContext; <add> } <add> <add> @SuppressWarnings({ "rawtypes", "unchecked" }) <add> private void testCompiledResult(GenericApplicationContext applicationContext, <add> BiConsumer<ApplicationContextInitializer<GenericApplicationContext>, Compiled> result) { <add> TestGenerationContext generationContext = processAheadOfTime(applicationContext); <ide> TestCompiler.forSystem().withFiles(generationContext.getGeneratedFiles()).compile(compiled -> <ide> result.accept(compiled.getInstance(ApplicationContextInitializer.class), compiled)); <ide> } <ide><path>spring-context/src/test/java/org/springframework/context/aot/GeneratedClassHandlerTests.java <add>/* <add> * Copyright 2002-2022 the original author or authors. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * https://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.springframework.context.aot; <add> <add>import java.io.ByteArrayOutputStream; <add>import java.io.IOException; <add> <add>import org.junit.jupiter.api.Test; <add> <add>import org.springframework.aot.generate.GeneratedFiles.Kind; <add>import org.springframework.aot.generate.InMemoryGeneratedFiles; <add>import org.springframework.aot.hint.MemberCategory; <add>import org.springframework.aot.hint.TypeReference; <add>import org.springframework.aot.hint.predicate.RuntimeHintsPredicates; <add>import org.springframework.core.io.InputStreamSource; <add>import org.springframework.core.testfixture.aot.generate.TestGenerationContext; <add> <add>import static org.assertj.core.api.Assertions.assertThat; <add> <add>/** <add> * Tests for {@link GeneratedClassHandler}. <add> * <add> * @author Stephane Nicoll <add> */ <add>class GeneratedClassHandlerTests { <add> <add> private static final byte[] TEST_CONTENT = new byte[] { 'a' }; <add> <add> private final TestGenerationContext generationContext; <add> <add> private final GeneratedClassHandler handler; <add> <add> public GeneratedClassHandlerTests() { <add> this.generationContext = new TestGenerationContext(); <add> this.handler = new GeneratedClassHandler(this.generationContext); <add> } <add> <add> @Test <add> void handlerGenerateRuntimeHints() { <add> String className = "com.example.Test$$Proxy$$1"; <add> this.handler.accept(className, TEST_CONTENT); <add> assertThat(RuntimeHintsPredicates.reflection().onType(TypeReference.of(className)) <add> .withMemberCategory(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS)) <add> .accepts(this.generationContext.getRuntimeHints()); <add> } <add> <add> @Test <add> void handlerRegisterGeneratedClass() throws IOException { <add> String className = "com.example.Test$$Proxy$$1"; <add> this.handler.accept(className, TEST_CONTENT); <add> InMemoryGeneratedFiles generatedFiles = this.generationContext.getGeneratedFiles(); <add> assertThat(generatedFiles.getGeneratedFiles(Kind.SOURCE)).isEmpty(); <add> assertThat(generatedFiles.getGeneratedFiles(Kind.RESOURCE)).isEmpty(); <add> String expectedPath = "com/example/Test$$Proxy$$1.class"; <add> assertThat(generatedFiles.getGeneratedFiles(Kind.CLASS)).containsOnlyKeys(expectedPath); <add> assertContent(generatedFiles.getGeneratedFiles(Kind.CLASS).get(expectedPath), TEST_CONTENT); <add> } <add> <add> private void assertContent(InputStreamSource source, byte[] expectedContent) throws IOException { <add> ByteArrayOutputStream out = new ByteArrayOutputStream(); <add> source.getInputStream().transferTo(out); <add> assertThat(out.toByteArray()).isEqualTo(expectedContent); <add> } <add> <add>} <ide><path>spring-context/src/testFixtures/java/org/springframework/context/testfixture/context/generator/annotation/CglibConfiguration.java <add>/* <add> * Copyright 2002-2022 the original author or authors. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * https://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.springframework.context.testfixture.context.generator.annotation; <add> <add>import java.util.concurrent.atomic.AtomicInteger; <add> <add>import org.springframework.context.annotation.Bean; <add>import org.springframework.context.annotation.Configuration; <add> <add>@Configuration <add>public class CglibConfiguration { <add> <add> private static final AtomicInteger counter = new AtomicInteger(); <add> <add> @Bean <add> public String prefix() { <add> return "Hello" + counter.getAndIncrement(); <add> } <add> <add> @Bean <add> public String text() { <add> return prefix() + " World"; <add> } <add> <add>} <ide><path>spring-core/src/main/java/org/springframework/cglib/core/ReflectUtils.java <ide> import java.beans.IntrospectionException; <ide> import java.beans.Introspector; <ide> import java.beans.PropertyDescriptor; <del>import java.io.ByteArrayInputStream; <del>import java.io.OutputStream; <ide> import java.lang.invoke.MethodHandles; <ide> import java.lang.reflect.Constructor; <ide> import java.lang.reflect.InvocationTargetException; <ide> import java.lang.reflect.Member; <ide> import java.lang.reflect.Method; <ide> import java.lang.reflect.Modifier; <del>import java.nio.file.Files; <del>import java.nio.file.Path; <ide> import java.security.ProtectionDomain; <ide> import java.util.ArrayList; <ide> import java.util.Arrays; <ide> import java.util.List; <ide> import java.util.Map; <ide> import java.util.Set; <add>import java.util.function.BiConsumer; <ide> <ide> import org.springframework.asm.Type; <ide> <ide> private ReflectUtils() { <ide> <ide> private static final List<Method> OBJECT_METHODS = new ArrayList<Method>(); <ide> <add> private static BiConsumer<String, byte[]> generatedClassHandler; <add> <ide> // SPRING PATCH BEGIN <ide> static { <ide> // Resolve protected ClassLoader.defineClass method for fallback use <ide> public static Class defineClass(String className, byte[] b, ClassLoader loader, <ide> return defineClass(className, b, loader, protectionDomain, null); <ide> } <ide> <add> public static void setGeneratedClassHandler(BiConsumer<String, byte[]> handler) { <add> generatedClassHandler = handler; <add> } <add> <ide> @SuppressWarnings({"deprecation", "serial"}) <ide> public static Class defineClass(String className, byte[] b, ClassLoader loader, <ide> ProtectionDomain protectionDomain, Class<?> contextClass) throws Exception { <ide> <ide> Class c = null; <ide> Throwable t = THROWABLE; <ide> <del> String generatedClasses = System.getProperty("cglib.generatedClasses"); <del> if (generatedClasses != null) { <del> Path path = Path.of(generatedClasses + "/" + className.replace(".", "/") + ".class"); <del> Files.createDirectories(path.getParent()); <del> try (OutputStream os = Files.newOutputStream(path)) { <del> new ByteArrayInputStream(b).transferTo(os); <del> } <add> BiConsumer<String, byte[]> handlerToUse = generatedClassHandler; <add> if (handlerToUse != null) { <add> handlerToUse.accept(className, b); <ide> } <ide> <ide> // Preferred option: JDK 9+ Lookup.defineClass API if ClassLoader matches <ide><path>spring-core/src/main/java/org/springframework/cglib/core/SpringNamingPolicy.java <ide> * and using a plain counter suffix instead of a hash code suffix (as of 6.0). <ide> * <ide> * <p>This allows for reliably discovering pre-generated Spring proxy classes <del> * in the classpath (as written at runtime when the "cglib.generatedClasses" <del> * system property points to a specific directory to store the proxy classes). <add> * in the classpath. <ide> * <ide> * @author Juergen Hoeller <ide> * @since 3.2.8 / 6.0
7
Java
Java
fix code style
b238cc80ba3eb1041e737bb09bb34e7854c66ab2
<ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableReduceSeedSingle.java <ide> protected void subscribeActual(SingleObserver<? super R> observer) { <ide> <ide> Subscription s; <ide> <del> public ReduceSeedObserver(SingleObserver<? super R> actual, BiFunction<R, ? super T, R> reducer, R value) { <add> ReduceSeedObserver(SingleObserver<? super R> actual, BiFunction<R, ? super T, R> reducer, R value) { <ide> this.actual = actual; <ide> this.value = value; <ide> this.reducer = reducer; <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableReduceMaybe.java <ide> protected void subscribeActual(MaybeObserver<? super T> observer) { <ide> <ide> Disposable d; <ide> <del> public ReduceObserver(MaybeObserver<? super T> observer, BiFunction<T, T, T> reducer) { <add> ReduceObserver(MaybeObserver<? super T> observer, BiFunction<T, T, T> reducer) { <ide> this.actual = observer; <ide> this.reducer = reducer; <ide> } <ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableReduceSeedSingle.java <ide> protected void subscribeActual(SingleObserver<? super R> observer) { <ide> <ide> Disposable d; <ide> <del> public ReduceSeedObserver(SingleObserver<? super R> actual, BiFunction<R, ? super T, R> reducer, R value) { <add> ReduceSeedObserver(SingleObserver<? super R> actual, BiFunction<R, ? super T, R> reducer, R value) { <ide> this.actual = actual; <ide> this.value = value; <ide> this.reducer = reducer;
3
Python
Python
remove an unused function
801918603cb075f96ff6c08a6fc6fb165f1eecda
<ide><path>flask/helpers.py <ide> json_available = False <ide> <ide> <del>from werkzeug import Headers, wrap_file, is_resource_modified, cached_property <add>from werkzeug import Headers, wrap_file, cached_property <ide> from werkzeug.exceptions import NotFound <ide> <ide> from jinja2 import FileSystemLoader
1
Text
Text
add extra step to contributing steps
988f5d102d1feaed825ff6da4527b37b8b198095
<ide><path>contributing.md <ide> Our Commitment to Open Source can be found [here](https://zeit.co/blog/oss) <ide> <ide> 1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device. <del>2. Install yarn: `npm install -g yarn` <del>3. Install the dependencies: `yarn` <del>4. Run `yarn dev` to build and watch for code changes <del>5. In a new terminal, run `yarn types` to compile declaration files from TypeScript <del>6. The development branch is `canary` (this is the branch pull requests should be made against). On a release, the relevant parts of the changes in the `canary` branch are rebased into `master`. <add>2. Create a new branch `git checkout -b MY_BRANCH_NAME` <add>3. Install yarn: `npm install -g yarn` <add>4. Install the dependencies: `yarn` <add>5. Run `yarn dev` to build and watch for code changes <add>6. In a new terminal, run `yarn types` to compile declaration files from TypeScript <add>7. The development branch is `canary` (this is the branch pull requests should be made against). On a release, the relevant parts of the changes in the `canary` branch are rebased into `master`. <ide> <ide> > You may need to run `yarn types` again if your types get outdated. <ide>
1
PHP
PHP
fix coding standards
063a3eab1ae067230ab079be8ec00624a1300b3a
<ide><path>lib/Cake/Database/Schema/MysqlSchema.php <ide> namespace Cake\Database\Schema; <ide> <ide> use Cake\Database\Schema\Table; <add>use Cake\Error; <ide> <ide> /** <ide> * Schema dialect/support for MySQL <ide> class MysqlSchema { <ide> * <ide> * @var Cake\Database\Driver\Mysql <ide> */ <del> protected $driver; <add> protected $_driver; <ide> <ide> /** <ide> * Constructor <ide> public function describeTableSql($table) { <ide> * <ide> * @param string $column The column type + length <ide> * @return array Array of column information. <add> * @throws Cake\Error\Exception When column type cannot be parsed. <ide> */ <ide> public function convertColumn($column) { <ide> preg_match('/([a-z]+)(?:\(([0-9,]+)\))?/i', $column, $matches); <ide><path>lib/Cake/Database/Schema/PostgresSchema.php <ide> namespace Cake\Database\Schema; <ide> <ide> use Cake\Database\Schema\Table; <add>use Cake\Error; <ide> <ide> class PostgresSchema { <ide> <ide> class PostgresSchema { <ide> * <ide> * @var Cake\Database\Driver\Postgres <ide> */ <del> protected $driver; <add> protected $_driver; <ide> <ide> public function __construct($driver) { <ide> $this->_driver = $driver; <ide> public function convertColumn($column) { <ide> } <ide> <ide> if (in_array($col, array('date', 'time', 'boolean'))) { <del> return ['type' =>$col, 'length' => null]; <add> return ['type' => $col, 'length' => null]; <ide> } <ide> if (strpos($col, 'timestamp') !== false) { <ide> return ['type' => 'datetime', 'length' => null]; <ide><path>lib/Cake/Database/Schema/SqliteSchema.php <ide> namespace Cake\Database\Schema; <ide> <ide> use Cake\Database\Schema\Table; <add>use Cake\Error; <ide> <ide> class SqliteSchema { <ide> <ide> class SqliteSchema { <ide> * <ide> * @var Cake\Database\Driver\Sqlite <ide> */ <del> protected $driver; <add> protected $_driver; <ide> <ide> public function __construct($driver) { <ide> $this->_driver = $driver; <ide> public function convertColumn($column) { <ide> return ['type' => 'integer', 'length' => $length]; <ide> } <ide> if ($col === 'char') { <del> return ['type' => 'string', 'fixed' => true, 'length' => $length]; <add> return ['type' => 'string', 'fixed', 'length' => $length]; <ide> } <ide> if (strpos($col, 'char') !== false) { <ide> return ['type' => 'string', 'length' => $length]; <ide><path>lib/Cake/Test/TestCase/Database/Driver/SqliteTest.php <ide> public function testConnectionConfigCustom() { <ide> $driver->connect($config); <ide> } <ide> <del> <ide> } <ide><path>lib/Cake/Test/TestCase/Database/Schema/CollectionTest.php <ide> namespace Cake\Test\TestCase\Database\Schema; <ide> <ide> use Cake\Core\Configure; <del>use Cake\Database\Schema\Table; <ide> use Cake\Database\Connection; <add>use Cake\Database\Schema\Table; <ide> use Cake\Database\Schema\Collection; <ide> use Cake\TestSuite\TestCase; <ide> <ide><path>lib/Cake/Test/TestCase/Database/Schema/MysqlSchemaTest.php <ide> public static function columnProvider() { <ide> ], <ide> [ <ide> 'DATE', <del> ['type' => 'date', 'length' => null] <add> ['type' => 'date', 'length' => null] <ide> ], <ide> [ <ide> 'TIME',
6
Javascript
Javascript
fix --debug-brk on symlinked scripts
ccf7b41a69a8037f721a3eb495c2a82a53613634
<ide><path>lib/module.js <ide> Module.prototype.require = function(path) { <ide> }; <ide> <ide> <add>// Resolved path to process.argv[1] will be lazily placed here <add>// (needed for setting breakpoint when called with --debug-brk) <add>var resolvedArgv; <add> <add> <ide> // Returns exception if any <ide> Module.prototype._compile = function(content, filename) { <ide> var self = this; <ide> Module.prototype._compile = function(content, filename) { <ide> var wrapper = Module.wrap(content); <ide> <ide> var compiledWrapper = runInThisContext(wrapper, filename, true); <del> if (filename === process.argv[1] && global.v8debug) { <del> global.v8debug.Debug.setBreakPoint(compiledWrapper, 0, 0); <add> if (global.v8debug) { <add> if (!resolvedArgv) { <add> resolvedArgv = Module._resolveFilename(process.argv[1], null)[1]; <add> } <add> <add> // Set breakpoint on module start <add> if (filename === resolvedArgv) { <add> global.v8debug.Debug.setBreakPoint(compiledWrapper, 0, 0); <add> } <ide> } <ide> var args = [self.exports, require, self, filename, dirname]; <ide> return compiledWrapper.apply(self.exports, args);
1
Ruby
Ruby
ensure prefix in routes are generated properly
542878304dd53bb8bd8962c61dd9404f2ba57ec7
<ide><path>actionpack/lib/action_dispatch/routing/mapper.rb <ide> def action_path(name, path = nil) <ide> <ide> def prefix_name_for_action(action, as) <ide> if as.present? <del> "#{as}_" <add> as.to_s <ide> elsif as <del> "" <add> nil <ide> elsif !canonical_action?(action, @scope[:scope_level]) <del> "#{action}_" <add> action.to_s <ide> end <ide> end <ide> <ide> def name_for_action(action, as=nil) <ide> if parent_resource <ide> collection_name = parent_resource.collection_name <ide> member_name = parent_resource.member_name <del> name_prefix = "#{name_prefix}_" if name_prefix.present? <ide> end <ide> <del> case @scope[:scope_level] <add> name = case @scope[:scope_level] <ide> when :collection <del> "#{prefix}#{name_prefix}#{collection_name}" <add> [name_prefix, collection_name] <ide> when :new <del> "#{prefix}new_#{name_prefix}#{member_name}" <add> [:new, name_prefix, member_name] <ide> else <del> if shallow_scoping? <del> shallow_prefix = "#{@scope[:shallow_prefix]}_" if @scope[:shallow_prefix].present? <del> "#{prefix}#{shallow_prefix}#{member_name}" <del> else <del> "#{prefix}#{name_prefix}#{member_name}" <del> end <add> [shallow_scoping? ? @scope[:shallow_prefix] : name_prefix, member_name] <ide> end <add> <add> name.unshift(prefix) <add> name.select(&:present?).join("_") <ide> end <ide> end <ide>
1
Javascript
Javascript
clarify possible options and fired events
494277e3d3668699133fc796e5bf632258e34802
<ide><path>src/ng/animate.js <ide> var $AnimateProvider = ['$provide', /** @this */ function($provide) { <ide> * ); <ide> * ``` <ide> * <add> * <div class="alert alert-warning"> <add> * **Note**: Generally, the events that are fired correspond 1:1 to `$animate` method names, <add> * e.g. {@link ng.$animate#addClass addClass()} will fire `addClass`, and {@link ng.ngClass} <add> * will fire `addClass` if classes are added, and `removeClass` if classes are removed. <add> * However, there are two exceptions: <add> * <add> * <ul> <add> * <li>if both an {@link ng.$animate#addClass addClass()} and a <add> * {@link ng.$animate#removeClass removeClass()} action are performed during the same <add> * animation, the event fired will be `setClass`. This is true even for `ngClass`.</li> <add> * <li>an {@link ng.$animate#animate animate()} call that adds and removes classes will fire <add> * the `setClass` event, but if it either removes or adds classes, <add> * it will fire `animate` instead.</li> <add> * </ul> <add> * <add> * </div> <add> * <ide> * @param {string} event the animation event that will be captured (e.g. enter, leave, move, addClass, removeClass, etc...) <ide> * @param {DOMElement} container the container element that will capture each of the animation events that are fired on itself <ide> * as well as among its children <del> * @param {Function} callback the callback function that will be fired when the listener is triggered <add> * @param {Function} callback the callback function that will be fired when the listener is triggered. <ide> * <ide> * The arguments present in the callback function are: <ide> * * `element` - The captured DOM element that the animation was fired on. <ide> * * `phase` - The phase of the animation. The two possible phases are **start** (when the animation starts) and **close** (when it ends). <add> * * `data` - an object with these properties: <add> * * addClass - `{string|null}` - space-separated CSS classes to add to the element <add> * * removeClass - `{string|null}` - space-separated CSS classes to remove from the element <add> * * from - `{Object|null}` - CSS properties & values at the beginning of the animation <add> * * to - `{Object|null}` - CSS properties & values at the end of the animation <add> * <add> * Note that the callback does not trigger a scope digest. Wrap your call into a <add> * {@link $rootScope.Scope#$apply scope.$apply} to propagate changes to the scope. <ide> */ <ide> on: $$animateQueue.on, <ide> <ide> var $AnimateProvider = ['$provide', /** @this */ function($provide) { <ide> * @param {object=} options an optional collection of options/styles that will be applied to the element. <ide> * The object can have the following properties: <ide> * <del> * - **addClass** - `{string}` - space-separated CSS classes to add to element <del> * - **from** - `{Object}` - CSS properties & values at the beginning of animation. Must have matching `to` <ide> * - **removeClass** - `{string}` - space-separated CSS classes to remove from element <add> * - **from** - `{Object}` - CSS properties & values at the beginning of animation. Must have matching `to` <ide> * - **to** - `{Object}` - CSS properties & values at end of animation. Must have matching `from` <ide> * <ide> * @return {Runner} animationRunner the animation runner <ide> var $AnimateProvider = ['$provide', /** @this */ function($provide) { <ide> * <ide> * - **addClass** - `{string}` - space-separated CSS classes to add to element <ide> * - **from** - `{Object}` - CSS properties & values at the beginning of animation. Must have matching `to` <del> * - **removeClass** - `{string}` - space-separated CSS classes to remove from element <ide> * - **to** - `{Object}` - CSS properties & values at end of animation. Must have matching `from` <ide> * <ide> * @return {Runner} the animation runner <ide> var $AnimateProvider = ['$provide', /** @this */ function($provide) { <ide> * The object can have the following properties: <ide> * <ide> * - **addClass** - `{string}` - space-separated CSS classes to add to element <del> * - **from** - `{Object}` - CSS properties & values at the beginning of animation. Must have matching `to` <ide> * - **removeClass** - `{string}` - space-separated CSS classes to remove from element <add> * - **from** - `{Object}` - CSS properties & values at the beginning of animation. Must have matching `to` <ide> * - **to** - `{Object}` - CSS properties & values at end of animation. Must have matching `from` <ide> * <ide> * @return {Runner} the animation runner <ide><path>src/ng/directive/ngClass.js <ide> function classDirective(name, selector) { <ide> * |----------------------------------|-------------------------------------| <ide> * | {@link ng.$animate#addClass addClass} | just before the class is applied to the element | <ide> * | {@link ng.$animate#removeClass removeClass} | just before the class is removed from the element | <add> * | {@link ng.$animate#setClass setClass} | just before classes are added and classes are removed from the element at the same time | <ide> * <ide> * ### ngClass and pre-existing CSS3 Transitions/Animations <ide> The ngClass directive still supports CSS3 Transitions/Animations even if they do not follow the ngAnimate CSS naming structure.
2
Python
Python
add head_mask/decoder_head_mask for tf bart models
1867d9a8d79e9d36496553a6f7b6857e4f33cdfd
<ide><path>src/transformers/models/bart/modeling_tf_bart.py <ide> def call( <ide> key_value_states: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, <ide> attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: <ide> """Input shape: Batch x Time x Channel""" <ide> def call( <ide> <ide> attn_weights = tf.nn.softmax(attn_weights, axis=-1) <ide> <add> if layer_head_mask is not None: <add> tf.debugging.assert_equal( <add> shape_list(layer_head_mask), <add> [self.num_heads], <add> message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", <add> ) <add> attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( <add> attn_weights, (bsz, self.num_heads, tgt_len, src_len) <add> ) <add> attn_weights = attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) <add> <ide> attn_probs = self.dropout(attn_weights, training=training) <ide> <ide> attn_output = tf.matmul(attn_probs, value_states) <ide> def __init__(self, config: BartConfig, **kwargs): <ide> self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") <ide> self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") <ide> <del> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, training=False): <add> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False): <ide> """ <ide> Args: <ide> hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` <ide> attention_mask (:obj:`tf.Tensor`): attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> """ <ide> residual = hidden_states <ide> hidden_states, self_attn_weights, _ = self.self_attn( <del> hidden_states=hidden_states, attention_mask=attention_mask <add> hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask <ide> ) <ide> tf.debugging.assert_equal( <ide> shape_list(hidden_states), <ide> def call( <ide> attention_mask: Optional[tf.Tensor] = None, <ide> encoder_hidden_states: Optional[tf.Tensor] = None, <ide> encoder_attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <add> encoder_layer_head_mask: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[tf.Tensor]] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: <ide> def call( <ide> encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` <ide> encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(decoder_attention_heads,)` <add> encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states <ide> """ <ide> residual = hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> past_key_value=self_attn_past_key_value, <ide> attention_mask=attention_mask, <add> layer_head_mask=layer_head_mask, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> hidden_states = residual + hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> key_value_states=encoder_hidden_states, <ide> attention_mask=encoder_attention_mask, <add> layer_head_mask=encoder_layer_head_mask, <ide> past_key_value=cross_attn_past_key_value, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> def serving(self, inputs): <ide> the right for denoising pre-training following the paper. <ide> decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`): <ide> will be made by default and ignore pad tokens. It is not recommended to set this for most use cases. <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the head is **masked**. <add> <ide> encoder_outputs (:obj:`tf.FloatTensor`, `optional`): <ide> hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. <ide> of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of <ide> def call( <ide> input_ids=None, <ide> inputs_embeds=None, <ide> attention_mask=None, <add> head_mask=None, <ide> output_attentions=None, <ide> output_hidden_states=None, <ide> return_dict=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): <ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded <ide> representation. This is useful if you want more control over how to convert :obj:`input_ids` indices <ide> def call( <ide> config=self.config, <ide> input_ids=input_ids, <ide> attention_mask=attention_mask, <add> head_mask=head_mask, <ide> inputs_embeds=inputs_embeds, <ide> output_attentions=output_attentions, <ide> output_hidden_states=output_hidden_states, <ide> def call( <ide> encoder_states = () if inputs["output_hidden_states"] else None <ide> all_attentions = () if inputs["output_attentions"] else None <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> # encoder layers <del> for encoder_layer in self.layers: <add> for idx, encoder_layer in enumerate(self.layers): <ide> <ide> if inputs["output_hidden_states"]: <ide> encoder_states = encoder_states + (hidden_states,) <ide> def call( <ide> if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer <ide> continue <ide> <del> hidden_states, attn = encoder_layer(hidden_states, attention_mask) <add> hidden_states, attn = encoder_layer( <add> hidden_states, <add> attention_mask, <add> inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> ) <ide> <ide> if inputs["output_attentions"]: <ide> all_attentions += (attn,) <ide> def call( <ide> attention_mask=None, <ide> encoder_hidden_states=None, <ide> encoder_attention_mask=None, <add> head_mask=None, <add> encoder_head_mask=None, <ide> past_key_values=None, <ide> use_cache=None, <ide> output_attentions=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> encoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention <add> on hidden heads. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): <ide> Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up <ide> decoding. <ide> def call( <ide> attention_mask=attention_mask, <ide> encoder_hidden_states=encoder_hidden_states, <ide> encoder_attention_mask=encoder_attention_mask, <add> head_mask=head_mask, <add> encoder_head_mask=encoder_head_mask, <ide> inputs_embeds=inputs_embeds, <ide> past_key_values=past_key_values, <ide> use_cache=use_cache, <ide> def call( <ide> all_self_attns = () if inputs["output_attentions"] else None <ide> present_key_values = () if inputs["use_cache"] else None <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> for idx, decoder_layer in enumerate(self.layers): <ide> # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) <ide> if inputs["output_hidden_states"]: <ide> def call( <ide> attention_mask=combined_attention_mask, <ide> encoder_hidden_states=inputs["encoder_hidden_states"], <ide> encoder_attention_mask=inputs["encoder_attention_mask"], <add> layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> encoder_layer_head_mask=inputs["encoder_head_mask"][idx] <add> if inputs["encoder_head_mask"] is not None <add> else None, <ide> past_key_value=past_key_value, <ide> ) <ide> <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> inputs["encoder_outputs"] = self.encoder( <ide> input_ids=inputs["input_ids"], <ide> attention_mask=inputs["attention_mask"], <add> head_mask=inputs["head_mask"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> output_attentions=inputs["output_attentions"], <ide> output_hidden_states=inputs["output_hidden_states"], <ide> def call( <ide> attention_mask=inputs["decoder_attention_mask"], <ide> encoder_hidden_states=inputs["encoder_outputs"][0], <ide> encoder_attention_mask=inputs["attention_mask"], <add> head_mask=inputs["decoder_head_mask"], <add> encoder_head_mask=inputs["head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["decoder_inputs_embeds"], <ide> use_cache=inputs["use_cache"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> attention_mask=inputs["attention_mask"], <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[TFBaseModelOutput] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> decoder_inputs_embeds=inputs["decoder_inputs_embeds"], <ide> def serving_output(self, output): <ide> encoder_attentions=enc_attns, <ide> ) <ide> <del> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, use_cache, **kwargs) -> Dict: <add> def prepare_inputs_for_generation( <add> self, <add> decoder_input_ids, <add> past, <add> attention_mask, <add> head_mask=None, <add> use_cache=None, <add> **kwargs, <add> ) -> Dict: <ide> assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}" <ide> if len(past) == 1: <ide> assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}" <ide> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, <ide> "past_key_values": past_key_values, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <add> "head_mask": head_mask, <ide> "use_cache": use_cache, # change this to avoid caching (presumably for debugging) <ide> } <ide> <ide><path>src/transformers/models/blenderbot/modeling_tf_blenderbot.py <ide> def call( <ide> key_value_states: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, <ide> attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: <ide> """Input shape: Batch x Time x Channel""" <ide> def call( <ide> <ide> attn_weights = tf.nn.softmax(attn_weights, axis=-1) <ide> <add> if layer_head_mask is not None: <add> tf.debugging.assert_equal( <add> shape_list(layer_head_mask), <add> [self.num_heads], <add> message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", <add> ) <add> attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( <add> attn_weights, (bsz, self.num_heads, tgt_len, src_len) <add> ) <add> attn_weights = attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) <add> <ide> attn_probs = self.dropout(attn_weights, training=training) <ide> <ide> attn_output = tf.matmul(attn_probs, value_states) <ide> def __init__(self, config: BlenderbotConfig, **kwargs): <ide> self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") <ide> self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") <ide> <del> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, training=False): <add> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False): <ide> """ <ide> Args: <ide> hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` <ide> attention_mask (:obj:`tf.Tensor`): attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> """ <ide> residual = hidden_states <ide> hidden_states = self.self_attn_layer_norm(hidden_states) <ide> hidden_states, self_attn_weights, _ = self.self_attn( <del> hidden_states=hidden_states, attention_mask=attention_mask <add> hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask <ide> ) <ide> tf.debugging.assert_equal( <ide> shape_list(hidden_states), <ide> def call( <ide> attention_mask: Optional[tf.Tensor] = None, <ide> encoder_hidden_states: Optional[tf.Tensor] = None, <ide> encoder_attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <add> encoder_layer_head_mask: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[tf.Tensor]] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: <ide> def call( <ide> encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` <ide> encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(decoder_attention_heads,)` <add> encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states <ide> """ <ide> residual = hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> past_key_value=self_attn_past_key_value, <ide> attention_mask=attention_mask, <add> layer_head_mask=layer_head_mask, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> hidden_states = residual + hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> key_value_states=encoder_hidden_states, <ide> attention_mask=encoder_attention_mask, <add> layer_head_mask=encoder_layer_head_mask, <ide> past_key_value=cross_attn_past_key_value, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> def serving(self, inputs): <ide> :obj:`past_key_values`). <ide> decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`): <ide> will be made by default and ignore pad tokens. It is not recommended to set this for most use cases. <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the head is **masked**. <add> <ide> encoder_outputs (:obj:`tf.FloatTensor`, `optional`): <ide> hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. <ide> of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of <ide> def call( <ide> input_ids=None, <ide> inputs_embeds=None, <ide> attention_mask=None, <add> head_mask=None, <ide> output_attentions=None, <ide> output_hidden_states=None, <ide> return_dict=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): <ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded <ide> representation. This is useful if you want more control over how to convert :obj:`input_ids` indices <ide> def call( <ide> config=self.config, <ide> input_ids=input_ids, <ide> attention_mask=attention_mask, <add> head_mask=head_mask, <ide> inputs_embeds=inputs_embeds, <ide> output_attentions=output_attentions, <ide> output_hidden_states=output_hidden_states, <ide> def call( <ide> encoder_states = () if inputs["output_hidden_states"] else None <ide> all_attentions = () if inputs["output_attentions"] else None <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> # encoder layers <del> for encoder_layer in self.layers: <add> for idx, encoder_layer in enumerate(self.layers): <ide> <ide> if inputs["output_hidden_states"]: <ide> encoder_states = encoder_states + (hidden_states,) <ide> def call( <ide> if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer <ide> continue <ide> <del> hidden_states, attn = encoder_layer(hidden_states, attention_mask) <add> hidden_states, attn = encoder_layer( <add> hidden_states, <add> attention_mask, <add> inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> ) <ide> <ide> if inputs["output_attentions"]: <ide> all_attentions += (attn,) <ide> def call( <ide> attention_mask=None, <ide> encoder_hidden_states=None, <ide> encoder_attention_mask=None, <add> head_mask=None, <add> encoder_head_mask=None, <ide> past_key_values=None, <ide> use_cache=None, <ide> output_attentions=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> encoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention <add> on hidden heads. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): <ide> Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up <ide> decoding. <ide> def call( <ide> attention_mask=attention_mask, <ide> encoder_hidden_states=encoder_hidden_states, <ide> encoder_attention_mask=encoder_attention_mask, <add> head_mask=head_mask, <add> encoder_head_mask=encoder_head_mask, <ide> inputs_embeds=inputs_embeds, <ide> past_key_values=past_key_values, <ide> use_cache=use_cache, <ide> def call( <ide> all_hidden_states = () <ide> all_self_attns = () <ide> present_key_values = () <add> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> for idx, decoder_layer in enumerate(self.layers): <ide> # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) <ide> if inputs["output_hidden_states"]: <ide> def call( <ide> attention_mask=combined_attention_mask, <ide> encoder_hidden_states=inputs["encoder_hidden_states"], <ide> encoder_attention_mask=inputs["encoder_attention_mask"], <add> layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> encoder_layer_head_mask=inputs["encoder_head_mask"][idx] <add> if inputs["encoder_head_mask"] is not None <add> else None, <ide> past_key_value=past_key_value, <ide> ) <ide> <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> inputs["encoder_outputs"] = self.encoder( <ide> input_ids=inputs["input_ids"], <ide> attention_mask=inputs["attention_mask"], <add> head_mask=inputs["head_mask"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> output_attentions=inputs["output_attentions"], <ide> output_hidden_states=inputs["output_hidden_states"], <ide> def call( <ide> attention_mask=inputs["decoder_attention_mask"], <ide> encoder_hidden_states=inputs["encoder_outputs"][0], <ide> encoder_attention_mask=inputs["attention_mask"], <add> head_mask=inputs["decoder_head_mask"], <add> encoder_head_mask=inputs["head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["decoder_inputs_embeds"], <ide> use_cache=inputs["use_cache"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> attention_mask=inputs["attention_mask"], <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[TFBaseModelOutput] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> decoder_inputs_embeds=inputs["decoder_inputs_embeds"], <ide> def serving_output(self, output): <ide> ) <ide> <ide> # Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation <del> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, use_cache, **kwargs) -> Dict: <add> def prepare_inputs_for_generation( <add> self, <add> decoder_input_ids, <add> past, <add> attention_mask, <add> head_mask=None, <add> use_cache=None, <add> **kwargs, <add> ) -> Dict: <ide> assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}" <ide> if len(past) == 1: <ide> assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}" <ide> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, <ide> "past_key_values": past_key_values, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <add> "head_mask": head_mask, <ide> "use_cache": use_cache, # change this to avoid caching (presumably for debugging) <ide> } <ide> <ide><path>src/transformers/models/blenderbot_small/modeling_tf_blenderbot_small.py <ide> def call( <ide> key_value_states: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, <ide> attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: <ide> """Input shape: Batch x Time x Channel""" <ide> def call( <ide> <ide> attn_weights = tf.nn.softmax(attn_weights, axis=-1) <ide> <add> if layer_head_mask is not None: <add> tf.debugging.assert_equal( <add> shape_list(layer_head_mask), <add> [self.num_heads], <add> message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", <add> ) <add> attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( <add> attn_weights, (bsz, self.num_heads, tgt_len, src_len) <add> ) <add> attn_weights = attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) <add> <ide> attn_probs = self.dropout(attn_weights, training=training) <ide> <ide> attn_output = tf.matmul(attn_probs, value_states) <ide> def __init__(self, config: BlenderbotSmallConfig, **kwargs): <ide> self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") <ide> self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") <ide> <del> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, training=False): <add> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False): <ide> """ <ide> Args: <ide> hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` <ide> attention_mask (:obj:`tf.Tensor`): attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> """ <ide> residual = hidden_states <ide> hidden_states, self_attn_weights, _ = self.self_attn( <del> hidden_states=hidden_states, attention_mask=attention_mask <add> hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask <ide> ) <ide> tf.debugging.assert_equal( <ide> shape_list(hidden_states), <ide> def call( <ide> attention_mask: Optional[tf.Tensor] = None, <ide> encoder_hidden_states: Optional[tf.Tensor] = None, <ide> encoder_attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <add> encoder_layer_head_mask: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[tf.Tensor]] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: <ide> def call( <ide> encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` <ide> encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(decoder_attention_heads,)` <add> encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states <ide> """ <ide> residual = hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> past_key_value=self_attn_past_key_value, <ide> attention_mask=attention_mask, <add> layer_head_mask=layer_head_mask, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> hidden_states = residual + hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> key_value_states=encoder_hidden_states, <ide> attention_mask=encoder_attention_mask, <add> layer_head_mask=encoder_layer_head_mask, <ide> past_key_value=cross_attn_past_key_value, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> def serving(self, inputs): <ide> :obj:`past_key_values`). <ide> decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`): <ide> will be made by default and ignore pad tokens. It is not recommended to set this for most use cases. <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the head is **masked**. <add> <ide> encoder_outputs (:obj:`tf.FloatTensor`, `optional`): <ide> hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. <ide> of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of <ide> def call( <ide> input_ids=None, <ide> inputs_embeds=None, <ide> attention_mask=None, <add> head_mask=None, <ide> output_attentions=None, <ide> output_hidden_states=None, <ide> return_dict=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): <ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded <ide> representation. This is useful if you want more control over how to convert :obj:`input_ids` indices <ide> def call( <ide> config=self.config, <ide> input_ids=input_ids, <ide> attention_mask=attention_mask, <add> head_mask=head_mask, <ide> inputs_embeds=inputs_embeds, <ide> output_attentions=output_attentions, <ide> output_hidden_states=output_hidden_states, <ide> def call( <ide> encoder_states = () if inputs["output_hidden_states"] else None <ide> all_attentions = () if inputs["output_attentions"] else None <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> # encoder layers <del> for encoder_layer in self.layers: <add> for idx, encoder_layer in enumerate(self.layers): <ide> <ide> if inputs["output_hidden_states"]: <ide> encoder_states = encoder_states + (hidden_states,) <ide> def call( <ide> if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer <ide> continue <ide> <del> hidden_states, attn = encoder_layer(hidden_states, attention_mask) <add> hidden_states, attn = encoder_layer( <add> hidden_states, <add> attention_mask, <add> inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> ) <ide> <ide> if inputs["output_attentions"]: <ide> all_attentions += (attn,) <ide> def call( <ide> attention_mask=None, <ide> encoder_hidden_states=None, <ide> encoder_attention_mask=None, <add> head_mask=None, <add> encoder_head_mask=None, <ide> past_key_values=None, <ide> use_cache=None, <ide> output_attentions=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> encoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention <add> on hidden heads. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): <ide> Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up <ide> decoding. <ide> def call( <ide> attention_mask=attention_mask, <ide> encoder_hidden_states=encoder_hidden_states, <ide> encoder_attention_mask=encoder_attention_mask, <add> head_mask=head_mask, <add> encoder_head_mask=encoder_head_mask, <ide> inputs_embeds=inputs_embeds, <ide> past_key_values=past_key_values, <ide> use_cache=use_cache, <ide> def call( <ide> all_self_attns = () <ide> present_key_values = () <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> for idx, decoder_layer in enumerate(self.layers): <ide> # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) <ide> if inputs["output_hidden_states"]: <ide> def call( <ide> attention_mask=combined_attention_mask, <ide> encoder_hidden_states=inputs["encoder_hidden_states"], <ide> encoder_attention_mask=inputs["encoder_attention_mask"], <add> layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> encoder_layer_head_mask=inputs["encoder_head_mask"][idx] <add> if inputs["encoder_head_mask"] is not None <add> else None, <ide> past_key_value=past_key_value, <ide> ) <ide> <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> inputs["encoder_outputs"] = self.encoder( <ide> input_ids=inputs["input_ids"], <ide> attention_mask=inputs["attention_mask"], <add> head_mask=inputs["head_mask"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> output_attentions=inputs["output_attentions"], <ide> output_hidden_states=inputs["output_hidden_states"], <ide> def call( <ide> attention_mask=inputs["decoder_attention_mask"], <ide> encoder_hidden_states=inputs["encoder_outputs"][0], <ide> encoder_attention_mask=inputs["attention_mask"], <add> head_mask=inputs["decoder_head_mask"], <add> encoder_head_mask=inputs["head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["decoder_inputs_embeds"], <ide> use_cache=inputs["use_cache"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> attention_mask=inputs["attention_mask"], <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[TFBaseModelOutput] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> decoder_inputs_embeds=inputs["decoder_inputs_embeds"], <ide> def serving_output(self, output): <ide> ) <ide> <ide> # Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation <del> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, use_cache, **kwargs) -> Dict: <add> def prepare_inputs_for_generation( <add> self, <add> decoder_input_ids, <add> past, <add> attention_mask, <add> head_mask=None, <add> use_cache=None, <add> **kwargs, <add> ) -> Dict: <ide> assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}" <ide> if len(past) == 1: <ide> assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}" <ide> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, <ide> "past_key_values": past_key_values, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <add> "head_mask": head_mask, <ide> "use_cache": use_cache, # change this to avoid caching (presumably for debugging) <ide> } <ide> <ide><path>src/transformers/models/marian/modeling_tf_marian.py <ide> def call( <ide> key_value_states: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, <ide> attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: <ide> """Input shape: Batch x Time x Channel""" <ide> def call( <ide> <ide> attn_weights = tf.nn.softmax(attn_weights, axis=-1) <ide> <add> if layer_head_mask is not None: <add> tf.debugging.assert_equal( <add> shape_list(layer_head_mask), <add> [self.num_heads], <add> message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", <add> ) <add> attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( <add> attn_weights, (bsz, self.num_heads, tgt_len, src_len) <add> ) <add> attn_weights = attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) <add> <ide> attn_probs = self.dropout(attn_weights, training=training) <ide> <ide> attn_output = tf.matmul(attn_probs, value_states) <ide> def __init__(self, config: MarianConfig, **kwargs): <ide> self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") <ide> self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") <ide> <del> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, training=False): <add> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False): <ide> """ <ide> Args: <ide> hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` <ide> attention_mask (:obj:`tf.Tensor`): attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> """ <ide> residual = hidden_states <ide> hidden_states, self_attn_weights, _ = self.self_attn( <del> hidden_states=hidden_states, attention_mask=attention_mask <add> hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask <ide> ) <ide> tf.debugging.assert_equal( <ide> shape_list(hidden_states), <ide> def call( <ide> attention_mask: Optional[tf.Tensor] = None, <ide> encoder_hidden_states: Optional[tf.Tensor] = None, <ide> encoder_attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <add> encoder_layer_head_mask: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[tf.Tensor]] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: <ide> def call( <ide> encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` <ide> encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(decoder_attention_heads,)` <add> encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states <ide> """ <ide> residual = hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> past_key_value=self_attn_past_key_value, <ide> attention_mask=attention_mask, <add> layer_head_mask=layer_head_mask, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> hidden_states = residual + hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> key_value_states=encoder_hidden_states, <ide> attention_mask=encoder_attention_mask, <add> layer_head_mask=encoder_layer_head_mask, <ide> past_key_value=cross_attn_past_key_value, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> def serving(self, inputs): <ide> :obj:`past_key_values`). <ide> decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`): <ide> will be made by default and ignore pad tokens. It is not recommended to set this for most use cases. <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the head is **masked**. <add> <ide> encoder_outputs (:obj:`tf.FloatTensor`, `optional`): <ide> hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. <ide> of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of <ide> def call( <ide> input_ids=None, <ide> inputs_embeds=None, <ide> attention_mask=None, <add> head_mask=None, <ide> output_attentions=None, <ide> output_hidden_states=None, <ide> return_dict=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): <ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded <ide> representation. This is useful if you want more control over how to convert :obj:`input_ids` indices <ide> def call( <ide> config=self.config, <ide> input_ids=input_ids, <ide> attention_mask=attention_mask, <add> head_mask=head_mask, <ide> inputs_embeds=inputs_embeds, <ide> output_attentions=output_attentions, <ide> output_hidden_states=output_hidden_states, <ide> def call( <ide> encoder_states = () if inputs["output_hidden_states"] else None <ide> all_attentions = () if inputs["output_attentions"] else None <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> # encoder layers <del> for encoder_layer in self.layers: <add> for idx, encoder_layer in enumerate(self.layers): <ide> <ide> if inputs["output_hidden_states"]: <ide> encoder_states = encoder_states + (hidden_states,) <ide> def call( <ide> if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer <ide> continue <ide> <del> hidden_states, attn = encoder_layer(hidden_states, attention_mask) <add> hidden_states, attn = encoder_layer( <add> hidden_states, <add> attention_mask, <add> inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> ) <ide> <ide> if inputs["output_attentions"]: <ide> all_attentions += (attn,) <ide> def call( <ide> attention_mask=None, <ide> encoder_hidden_states=None, <ide> encoder_attention_mask=None, <add> head_mask=None, <add> encoder_head_mask=None, <ide> past_key_values=None, <ide> use_cache=None, <ide> output_attentions=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> encoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention <add> on hidden heads. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): <ide> Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up <ide> decoding. <ide> def call( <ide> attention_mask=attention_mask, <ide> encoder_hidden_states=encoder_hidden_states, <ide> encoder_attention_mask=encoder_attention_mask, <add> head_mask=head_mask, <add> encoder_head_mask=encoder_head_mask, <ide> inputs_embeds=inputs_embeds, <ide> past_key_values=past_key_values, <ide> use_cache=use_cache, <ide> def call( <ide> all_hidden_states = () <ide> all_self_attns = () <ide> present_key_values = () <add> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> for idx, decoder_layer in enumerate(self.layers): <ide> # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) <ide> if inputs["output_hidden_states"]: <ide> def call( <ide> attention_mask=combined_attention_mask, <ide> encoder_hidden_states=inputs["encoder_hidden_states"], <ide> encoder_attention_mask=inputs["encoder_attention_mask"], <add> layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> encoder_layer_head_mask=inputs["encoder_head_mask"][idx] <add> if inputs["encoder_head_mask"] is not None <add> else None, <ide> past_key_value=past_key_value, <ide> ) <ide> <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> inputs["encoder_outputs"] = self.encoder( <ide> input_ids=inputs["input_ids"], <ide> attention_mask=inputs["attention_mask"], <add> head_mask=inputs["head_mask"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> output_attentions=inputs["output_attentions"], <ide> output_hidden_states=inputs["output_hidden_states"], <ide> def call( <ide> attention_mask=inputs["decoder_attention_mask"], <ide> encoder_hidden_states=inputs["encoder_outputs"][0], <ide> encoder_attention_mask=inputs["attention_mask"], <add> head_mask=inputs["decoder_head_mask"], <add> encoder_head_mask=inputs["head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["decoder_inputs_embeds"], <ide> use_cache=inputs["use_cache"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> input_ids=input_ids, <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> decoder_attention_mask=decoder_attention_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> def call( <ide> attention_mask=inputs["attention_mask"], <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[TFBaseModelOutput] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> decoder_inputs_embeds=inputs["decoder_inputs_embeds"], <ide> def serving_output(self, output): <ide> ) <ide> <ide> # Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation <del> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, use_cache, **kwargs) -> Dict: <add> def prepare_inputs_for_generation( <add> self, <add> decoder_input_ids, <add> past, <add> attention_mask, <add> head_mask=None, <add> use_cache=None, <add> **kwargs, <add> ) -> Dict: <ide> assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}" <ide> if len(past) == 1: <ide> assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}" <ide> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, <ide> "past_key_values": past_key_values, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <add> "head_mask": head_mask, <ide> "use_cache": use_cache, # change this to avoid caching (presumably for debugging) <ide> } <ide> <ide><path>src/transformers/models/mbart/modeling_tf_mbart.py <ide> def call( <ide> key_value_states: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, <ide> attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: <ide> """Input shape: Batch x Time x Channel""" <ide> def call( <ide> <ide> attn_weights = tf.nn.softmax(attn_weights, axis=-1) <ide> <add> if layer_head_mask is not None: <add> tf.debugging.assert_equal( <add> shape_list(layer_head_mask), <add> [self.num_heads], <add> message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", <add> ) <add> attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( <add> attn_weights, (bsz, self.num_heads, tgt_len, src_len) <add> ) <add> attn_weights = attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) <add> <ide> attn_probs = self.dropout(attn_weights, training=training) <ide> <ide> attn_output = tf.matmul(attn_probs, value_states) <ide> def __init__(self, config: MBartConfig, **kwargs): <ide> self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") <ide> self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") <ide> <del> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, training=False): <add> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False): <ide> """ <ide> Args: <ide> hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` <ide> attention_mask (:obj:`tf.Tensor`): attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> """ <ide> residual = hidden_states <ide> hidden_states = self.self_attn_layer_norm(hidden_states) <ide> hidden_states, self_attn_weights, _ = self.self_attn( <del> hidden_states=hidden_states, attention_mask=attention_mask <add> hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask <ide> ) <ide> tf.debugging.assert_equal( <ide> shape_list(hidden_states), <ide> def call( <ide> attention_mask: Optional[tf.Tensor] = None, <ide> encoder_hidden_states: Optional[tf.Tensor] = None, <ide> encoder_attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <add> encoder_layer_head_mask: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[tf.Tensor]] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: <ide> def call( <ide> encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` <ide> encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(decoder_attention_heads,)` <add> encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states <ide> """ <ide> residual = hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> past_key_value=self_attn_past_key_value, <ide> attention_mask=attention_mask, <add> layer_head_mask=layer_head_mask, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> hidden_states = residual + hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> key_value_states=encoder_hidden_states, <ide> attention_mask=encoder_attention_mask, <add> layer_head_mask=encoder_layer_head_mask, <ide> past_key_value=cross_attn_past_key_value, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> def serving(self, inputs): <ide> the right for denoising pre-training following the paper. <ide> decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`): <ide> will be made by default and ignore pad tokens. It is not recommended to set this for most use cases. <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the head is **masked**. <add> <ide> encoder_outputs (:obj:`tf.FloatTensor`, `optional`): <ide> hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. <ide> of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of <ide> def call( <ide> input_ids=None, <ide> inputs_embeds=None, <ide> attention_mask=None, <add> head_mask=None, <ide> output_attentions=None, <ide> output_hidden_states=None, <ide> return_dict=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): <ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded <ide> representation. This is useful if you want more control over how to convert :obj:`input_ids` indices <ide> def call( <ide> config=self.config, <ide> input_ids=input_ids, <ide> attention_mask=attention_mask, <add> head_mask=head_mask, <ide> inputs_embeds=inputs_embeds, <ide> output_attentions=output_attentions, <ide> output_hidden_states=output_hidden_states, <ide> def call( <ide> encoder_states = () if inputs["output_hidden_states"] else None <ide> all_attentions = () if inputs["output_attentions"] else None <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> # encoder layers <del> for encoder_layer in self.layers: <add> for idx, encoder_layer in enumerate(self.layers): <ide> <ide> if inputs["output_hidden_states"]: <ide> encoder_states = encoder_states + (hidden_states,) <ide> def call( <ide> if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer <ide> continue <ide> <del> hidden_states, attn = encoder_layer(hidden_states, attention_mask) <add> hidden_states, attn = encoder_layer( <add> hidden_states, <add> attention_mask, <add> inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> ) <ide> <ide> if inputs["output_attentions"]: <ide> all_attentions += (attn,) <ide> def call( <ide> attention_mask=None, <ide> encoder_hidden_states=None, <ide> encoder_attention_mask=None, <add> head_mask=None, <add> encoder_head_mask=None, <ide> past_key_values=None, <ide> use_cache=None, <ide> output_attentions=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> encoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention <add> on hidden heads. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): <ide> Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up <ide> decoding. <ide> def call( <ide> attention_mask=attention_mask, <ide> encoder_hidden_states=encoder_hidden_states, <ide> encoder_attention_mask=encoder_attention_mask, <add> head_mask=head_mask, <add> encoder_head_mask=encoder_head_mask, <ide> inputs_embeds=inputs_embeds, <ide> past_key_values=past_key_values, <ide> use_cache=use_cache, <ide> def call( <ide> all_hidden_states = () <ide> all_self_attns = () <ide> present_key_values = () <add> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> for idx, decoder_layer in enumerate(self.layers): <ide> # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) <ide> if inputs["output_hidden_states"]: <ide> def call( <ide> attention_mask=combined_attention_mask, <ide> encoder_hidden_states=inputs["encoder_hidden_states"], <ide> encoder_attention_mask=inputs["encoder_attention_mask"], <add> layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> encoder_layer_head_mask=inputs["encoder_head_mask"][idx] <add> if inputs["encoder_head_mask"] is not None <add> else None, <ide> past_key_value=past_key_value, <ide> ) <ide> <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> inputs["encoder_outputs"] = self.encoder( <ide> input_ids=inputs["input_ids"], <ide> attention_mask=inputs["attention_mask"], <add> head_mask=inputs["head_mask"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> output_attentions=inputs["output_attentions"], <ide> output_hidden_states=inputs["output_hidden_states"], <ide> def call( <ide> attention_mask=inputs["decoder_attention_mask"], <ide> encoder_hidden_states=inputs["encoder_outputs"][0], <ide> encoder_attention_mask=inputs["attention_mask"], <add> head_mask=inputs["decoder_head_mask"], <add> encoder_head_mask=inputs["head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["decoder_inputs_embeds"], <ide> use_cache=inputs["use_cache"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> attention_mask=inputs["attention_mask"], <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[TFBaseModelOutput] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> decoder_inputs_embeds=inputs["decoder_inputs_embeds"], <ide> def serving_output(self, output): <ide> ) <ide> <ide> # Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation <del> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, use_cache, **kwargs) -> Dict: <add> def prepare_inputs_for_generation( <add> self, <add> decoder_input_ids, <add> past, <add> attention_mask, <add> head_mask=None, <add> use_cache=None, <add> **kwargs, <add> ) -> Dict: <ide> assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}" <ide> if len(past) == 1: <ide> assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}" <ide> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, <ide> "past_key_values": past_key_values, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <add> "head_mask": head_mask, <ide> "use_cache": use_cache, # change this to avoid caching (presumably for debugging) <ide> } <ide> <ide><path>src/transformers/models/pegasus/modeling_tf_pegasus.py <ide> def call( <ide> key_value_states: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, <ide> attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: <ide> """Input shape: Batch x Time x Channel""" <ide> def call( <ide> <ide> attn_weights = tf.nn.softmax(attn_weights, axis=-1) <ide> <add> if layer_head_mask is not None: <add> tf.debugging.assert_equal( <add> shape_list(layer_head_mask), <add> [self.num_heads], <add> message=f"Head mask for a single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}", <add> ) <add> attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( <add> attn_weights, (bsz, self.num_heads, tgt_len, src_len) <add> ) <add> attn_weights = attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) <add> <ide> attn_probs = self.dropout(attn_weights, training=training) <ide> <ide> attn_output = tf.matmul(attn_probs, value_states) <ide> def __init__(self, config: PegasusConfig, **kwargs): <ide> self.fc2 = tf.keras.layers.Dense(self.embed_dim, name="fc2") <ide> self.final_layer_norm = tf.keras.layers.LayerNormalization(epsilon=1e-5, name="final_layer_norm") <ide> <del> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, training=False): <add> def call(self, hidden_states: tf.Tensor, attention_mask: tf.Tensor, layer_head_mask: tf.Tensor, training=False): <ide> """ <ide> Args: <ide> hidden_states (:obj:`tf.Tensor`): input to the layer of shape `(seq_len, batch, embed_dim)` <ide> attention_mask (:obj:`tf.Tensor`): attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> """ <ide> residual = hidden_states <ide> hidden_states = self.self_attn_layer_norm(hidden_states) <ide> hidden_states, self_attn_weights, _ = self.self_attn( <del> hidden_states=hidden_states, attention_mask=attention_mask <add> hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask <ide> ) <ide> tf.debugging.assert_equal( <ide> shape_list(hidden_states), <ide> def call( <ide> attention_mask: Optional[tf.Tensor] = None, <ide> encoder_hidden_states: Optional[tf.Tensor] = None, <ide> encoder_attention_mask: Optional[tf.Tensor] = None, <add> layer_head_mask: Optional[tf.Tensor] = None, <add> encoder_layer_head_mask: Optional[tf.Tensor] = None, <ide> past_key_value: Optional[Tuple[tf.Tensor]] = None, <ide> training=False, <ide> ) -> Tuple[tf.Tensor, tf.Tensor, Tuple[Tuple[tf.Tensor]]]: <ide> def call( <ide> encoder_hidden_states (:obj:`tf.Tensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` <ide> encoder_attention_mask (:obj:`tf.Tensor`): encoder attention mask of size <ide> `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. <add> layer_head_mask (:obj:`tf.Tensor`): mask for attention heads in a given layer of size <add> `(decoder_attention_heads,)` <add> encoder_layer_head_mask (:obj:`tf.Tensor`): mask for encoder attention heads in a given layer of size <add> `(encoder_attention_heads,)` <ide> past_key_value (:obj:`Tuple(tf.Tensor)`): cached past key and value projection states <ide> """ <ide> residual = hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> past_key_value=self_attn_past_key_value, <ide> attention_mask=attention_mask, <add> layer_head_mask=layer_head_mask, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> hidden_states = residual + hidden_states <ide> def call( <ide> hidden_states=hidden_states, <ide> key_value_states=encoder_hidden_states, <ide> attention_mask=encoder_attention_mask, <add> layer_head_mask=encoder_layer_head_mask, <ide> past_key_value=cross_attn_past_key_value, <ide> ) <ide> hidden_states = self.dropout(hidden_states, training=training) <ide> def serving(self, inputs): <ide> the right for denoising pre-training following the paper. <ide> decoder_attention_mask (:obj:`tf.Tensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`): <ide> will be made by default and ignore pad tokens. It is not recommended to set this for most use cases. <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> decoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the head is **masked**. <add> <ide> encoder_outputs (:obj:`tf.FloatTensor`, `optional`): <ide> hidden states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. <ide> of shape :obj:`(batch_size, sequence_length, hidden_size)` is a sequence of <ide> def call( <ide> input_ids=None, <ide> inputs_embeds=None, <ide> attention_mask=None, <add> head_mask=None, <ide> output_attentions=None, <ide> output_hidden_states=None, <ide> return_dict=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> inputs_embeds (:obj:`tf.Tensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): <ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded <ide> representation. This is useful if you want more control over how to convert :obj:`input_ids` indices <ide> def call( <ide> config=self.config, <ide> input_ids=input_ids, <ide> attention_mask=attention_mask, <add> head_mask=head_mask, <ide> inputs_embeds=inputs_embeds, <ide> output_attentions=output_attentions, <ide> output_hidden_states=output_hidden_states, <ide> def call( <ide> encoder_states = () if inputs["output_hidden_states"] else None <ide> all_attentions = () if inputs["output_attentions"] else None <ide> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> # encoder layers <del> for encoder_layer in self.layers: <add> for idx, encoder_layer in enumerate(self.layers): <ide> <ide> if inputs["output_hidden_states"]: <ide> encoder_states = encoder_states + (hidden_states,) <ide> def call( <ide> if inputs["training"] and (dropout_probability < self.layerdrop): # skip the layer <ide> continue <ide> <del> hidden_states, attn = encoder_layer(hidden_states, attention_mask) <add> hidden_states, attn = encoder_layer( <add> hidden_states, <add> attention_mask, <add> inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> ) <ide> <ide> if inputs["output_attentions"]: <ide> all_attentions += (attn,) <ide> def call( <ide> attention_mask=None, <ide> encoder_hidden_states=None, <ide> encoder_attention_mask=None, <add> head_mask=None, <add> encoder_head_mask=None, <ide> past_key_values=None, <ide> use_cache=None, <ide> output_attentions=None, <ide> def call( <ide> - 0 for tokens that are **masked**. <ide> <ide> `What are attention masks? <../glossary.html#attention-mask>`__ <add> head_mask (:obj:`tf.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <add> encoder_head_mask (:obj:`tf.Tensor` of shape :obj:`(encoder_layers, encoder_attention_heads)`, `optional`): <add> Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention <add> on hidden heads. Mask values selected in ``[0, 1]``: <add> <add> - 1 indicates the head is **not masked**, <add> - 0 indicates the heas is **masked**. <add> <ide> past_key_values (:obj:`Tuple[Tuple[tf.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): <ide> Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up <ide> decoding. <ide> def call( <ide> attention_mask=attention_mask, <ide> encoder_hidden_states=encoder_hidden_states, <ide> encoder_attention_mask=encoder_attention_mask, <add> head_mask=head_mask, <add> encoder_head_mask=encoder_head_mask, <ide> inputs_embeds=inputs_embeds, <ide> past_key_values=past_key_values, <ide> use_cache=use_cache, <ide> def call( <ide> all_hidden_states = () <ide> all_self_attns = () <ide> present_key_values = () <add> <add> # check if head_mask has a correct number of layers specified if desired <add> if inputs["head_mask"] is not None: <add> tf.debugging.assert_equal( <add> shape_list(inputs["head_mask"])[0], <add> len(self.layers), <add> message=f"The head_mask should be specified for {len(self.layers)} layers, but it is for {shape_list(inputs['head_mask'])[0]}.", <add> ) <ide> for idx, decoder_layer in enumerate(self.layers): <ide> # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) <ide> if inputs["output_hidden_states"]: <ide> def call( <ide> attention_mask=combined_attention_mask, <ide> encoder_hidden_states=inputs["encoder_hidden_states"], <ide> encoder_attention_mask=inputs["encoder_attention_mask"], <add> layer_head_mask=inputs["head_mask"][idx] if inputs["head_mask"] is not None else None, <add> encoder_layer_head_mask=inputs["encoder_head_mask"][idx] <add> if inputs["encoder_head_mask"] is not None <add> else None, <ide> past_key_value=past_key_value, <ide> ) <ide> <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> inputs["encoder_outputs"] = self.encoder( <ide> input_ids=inputs["input_ids"], <ide> attention_mask=inputs["attention_mask"], <add> head_mask=inputs["head_mask"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> output_attentions=inputs["output_attentions"], <ide> output_hidden_states=inputs["output_hidden_states"], <ide> def call( <ide> attention_mask=inputs["decoder_attention_mask"], <ide> encoder_hidden_states=inputs["encoder_outputs"][0], <ide> encoder_attention_mask=inputs["attention_mask"], <add> head_mask=inputs["decoder_head_mask"], <add> encoder_head_mask=inputs["head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["decoder_inputs_embeds"], <ide> use_cache=inputs["use_cache"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> attention_mask=inputs["attention_mask"], <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> def call( <ide> attention_mask=None, <ide> decoder_input_ids=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> encoder_outputs: Optional[TFBaseModelOutput] = None, <ide> past_key_values=None, <ide> inputs_embeds=None, <ide> def call( <ide> attention_mask=attention_mask, <ide> decoder_input_ids=decoder_input_ids, <ide> decoder_attention_mask=decoder_attention_mask, <add> head_mask=head_mask, <add> decoder_head_mask=decoder_head_mask, <ide> encoder_outputs=encoder_outputs, <ide> past_key_values=past_key_values, <ide> inputs_embeds=inputs_embeds, <ide> def call( <ide> decoder_input_ids=inputs["decoder_input_ids"], <ide> encoder_outputs=inputs["encoder_outputs"], <ide> decoder_attention_mask=inputs["decoder_attention_mask"], <add> head_mask=inputs["head_mask"], <add> decoder_head_mask=inputs["decoder_head_mask"], <ide> past_key_values=inputs["past_key_values"], <ide> inputs_embeds=inputs["inputs_embeds"], <ide> decoder_inputs_embeds=inputs["decoder_inputs_embeds"], <ide> def serving_output(self, output): <ide> ) <ide> <ide> # Copied from transformers.models.bart.modeling_tf_bart.TFBartForConditionalGeneration.prepare_inputs_for_generation <del> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, use_cache, **kwargs) -> Dict: <add> def prepare_inputs_for_generation( <add> self, <add> decoder_input_ids, <add> past, <add> attention_mask, <add> head_mask=None, <add> use_cache=None, <add> **kwargs, <add> ) -> Dict: <ide> assert past is not None and len(past) in {1, 2}, f"past has to be an iterable of length 1,2 got {past}" <ide> if len(past) == 1: <ide> assert isinstance(past[0], tf.Tensor), f"`past[0]` has to be of type `tf.Tensor`, but is {type(past[0])}" <ide> def prepare_inputs_for_generation(self, decoder_input_ids, past, attention_mask, <ide> "past_key_values": past_key_values, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <add> "head_mask": head_mask, <ide> "use_cache": use_cache, # change this to avoid caching (presumably for debugging) <ide> } <ide> <ide><path>tests/test_modeling_tf_albert.py <ide> class TFAlbertModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFAlbertModelTester(self) <ide><path>tests/test_modeling_tf_bart.py <ide> def check_decoder_model_past_large_inputs(self, config, inputs_dict): <ide> <ide> input_ids = input_ids[:1, :] <ide> attention_mask = inputs_dict["attention_mask"][:1, :] <add> head_mask = inputs_dict["head_mask"] <ide> self.batch_size = 1 <ide> <ide> # first forward pass <del> outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) <add> outputs = model(input_ids, attention_mask=attention_mask, head_mask=head_mask, use_cache=True) <ide> <ide> output, past_key_values = outputs.to_tuple() <ide> past_key_values = past_key_values[1] <ide> def prepare_bart_inputs_dict( <ide> decoder_input_ids, <ide> attention_mask=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> ): <ide> if attention_mask is None: <ide> attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) <ide> def prepare_bart_inputs_dict( <ide> ], <ide> axis=-1, <ide> ) <add> if head_mask is None: <add> head_mask = tf.ones((config.encoder_layers, config.encoder_attention_heads)) <add> if decoder_head_mask is None: <add> decoder_head_mask = tf.ones((config.decoder_layers, config.decoder_attention_heads)) <ide> return { <ide> "input_ids": input_ids, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <ide> "decoder_attention_mask": decoder_attention_mask, <add> "head_mask": head_mask, <add> "decoder_head_mask": head_mask, <ide> } <ide> <ide> <ide> class TFBartModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = (TFBartForConditionalGeneration,) if is_tf_available() else () <ide> is_encoder_decoder = True <ide> test_pruning = False <add> test_head_masking = True <ide> <ide> def setUp(self): <ide> self.model_tester = TFBartModelTester(self) <ide><path>tests/test_modeling_tf_bert.py <ide> class TFBertModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> # special case for ForPreTraining model <ide> def _prepare_for_class(self, inputs_dict, model_class, return_labels=False): <ide><path>tests/test_modeling_tf_blenderbot.py <ide> def check_decoder_model_past_large_inputs(self, config, inputs_dict): <ide> <ide> input_ids = input_ids[:1, :] <ide> attention_mask = inputs_dict["attention_mask"][:1, :] <add> head_mask = inputs_dict["head_mask"] <ide> self.batch_size = 1 <ide> <ide> # first forward pass <del> outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) <add> outputs = model(input_ids, attention_mask=attention_mask, head_mask=head_mask, use_cache=True) <ide> <ide> output, past_key_values = outputs.to_tuple() <ide> past_key_values = past_key_values[1] <ide> def prepare_blenderbot_inputs_dict( <ide> decoder_input_ids, <ide> attention_mask=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> ): <ide> if attention_mask is None: <ide> attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) <ide> def prepare_blenderbot_inputs_dict( <ide> ], <ide> axis=-1, <ide> ) <add> if head_mask is None: <add> head_mask = tf.ones((config.encoder_layers, config.encoder_attention_heads)) <add> if decoder_head_mask is None: <add> decoder_head_mask = tf.ones((config.decoder_layers, config.decoder_attention_heads)) <ide> return { <ide> "input_ids": input_ids, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <ide> "decoder_attention_mask": decoder_attention_mask, <add> "head_mask": head_mask, <add> "decoder_head_mask": decoder_head_mask, <ide> } <ide> <ide> <ide> class TFBlenderbotModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = (TFBlenderbotForConditionalGeneration,) if is_tf_available() else () <ide> is_encoder_decoder = True <ide> test_pruning = False <add> test_head_masking = True <ide> <ide> def setUp(self): <ide> self.model_tester = TFBlenderbotModelTester(self) <ide><path>tests/test_modeling_tf_blenderbot_small.py <ide> def check_decoder_model_past_large_inputs(self, config, inputs_dict): <ide> <ide> input_ids = input_ids[:1, :] <ide> attention_mask = inputs_dict["attention_mask"][:1, :] <add> head_mask = inputs_dict["head_mask"] <ide> self.batch_size = 1 <ide> <ide> # first forward pass <del> outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) <add> outputs = model(input_ids, attention_mask=attention_mask, head_mask=head_mask, use_cache=True) <ide> <ide> output, past_key_values = outputs.to_tuple() <ide> past_key_values = past_key_values[1] <ide> def prepare_blenderbot_small_inputs_dict( <ide> decoder_input_ids, <ide> attention_mask=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> ): <ide> if attention_mask is None: <ide> attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) <ide> def prepare_blenderbot_small_inputs_dict( <ide> ], <ide> axis=-1, <ide> ) <add> if head_mask is None: <add> head_mask = tf.ones((config.encoder_layers, config.encoder_attention_heads)) <add> if decoder_head_mask is None: <add> decoder_head_mask = tf.ones((config.decoder_layers, config.decoder_attention_heads)) <ide> return { <ide> "input_ids": input_ids, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <ide> "decoder_attention_mask": decoder_attention_mask, <add> "head_mask": head_mask, <add> "decoder_head_mask": decoder_head_mask, <ide> } <ide> <ide> <ide> class TFBlenderbotSmallModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = (TFBlenderbotSmallForConditionalGeneration,) if is_tf_available() else () <ide> is_encoder_decoder = True <ide> test_pruning = False <add> test_head_masking = True <ide> <ide> def setUp(self): <ide> self.model_tester = TFBlenderbotSmallModelTester(self) <ide><path>tests/test_modeling_tf_common.py <ide> def test_pt_tf_model_equivalence(self): <ide> <ide> def test_train_pipeline_custom_model(self): <ide> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() <add> # head_mask and decoder_head_mask has different shapes than other input args <add> if "head_mask" in inputs_dict: <add> del inputs_dict["head_mask"] <add> if "decoder_head_mask" in inputs_dict: <add> del inputs_dict["decoder_head_mask"] <ide> tf_main_layer_classes = set( <ide> module_member <ide> for model_class in self.all_model_classes <ide> def check_encoder_attentions_output(outputs): <ide> self.assertEqual(model.config.output_hidden_states, True) <ide> check_encoder_attentions_output(outputs) <ide> <add> def test_headmasking(self): <add> if not self.test_head_masking: <add> return <add> <add> random.Random().seed(42) <add> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() <add> random.Random().seed() <add> <add> inputs_dict["output_attentions"] = True <add> config.output_hidden_states = True <add> configs_no_init = _config_zero_init(config) # To be sure we have no Nan <add> for model_class in self.all_model_classes: <add> model = model_class(config=configs_no_init) <add> <add> # Prepare head_mask <add> def prepare_layer_head_mask(i, attention_heads, num_hidden_layers): <add> if i == 0: <add> return tf.concat( <add> (tf.zeros(1, dtype=tf.float32), tf.ones(attention_heads - 1, dtype=tf.float32)), 0 <add> ) <add> elif i == num_hidden_layers - 1: <add> return tf.concat( <add> (tf.zeros(attention_heads - 1, dtype=tf.float32), tf.ones(1, dtype=tf.float32)), 0 <add> ) <add> else: <add> return tf.ones(attention_heads, dtype=tf.float32) <add> <add> head_mask = tf.stack( <add> [ <add> prepare_layer_head_mask(i, config.num_attention_heads, config.num_hidden_layers) <add> for i in range(config.num_hidden_layers) <add> ], <add> 0, <add> ) <add> <add> inputs = self._prepare_for_class(inputs_dict, model_class).copy() <add> inputs["head_mask"] = head_mask <add> if model.config.is_encoder_decoder: <add> signature = inspect.signature(model.call) <add> arg_names = [*signature.parameters.keys()] <add> if "decoder_head_mask" in arg_names: # necessary diferentiation because of T5 model <add> inputs["decoder_head_mask"] = head_mask <add> <add> outputs = model(**inputs, return_dict=True) <add> <add> def check_attentions_validity(attentions): <add> # Remove Nan <add> for t in attentions: <add> self.assertLess( <add> (tf.math.reduce_sum(tf.cast(tf.math.is_nan(t), tf.float32))).numpy(), (tf.size(t) / 4).numpy() <add> ) # Check we don't have more than 25% nans (arbitrary) <add> <add> attentions = [ <add> tf.where(tf.math.is_nan(t), 0.0, t) for t in attentions <add> ] # remove them (the test is less complete) <add> <add> self.assertAlmostEqual(tf.math.reduce_sum(attentions[0][..., 0, :, :]).numpy(), 0.0) <add> self.assertNotEqual(tf.math.reduce_sum(attentions[0][..., -1, :, :]).numpy(), 0.0) <add> if len(attentions) > 2: # encoder-decodere models have only 2 layers in each modules <add> self.assertNotEqual(tf.math.reduce_sum(attentions[1][..., 0, :, :]).numpy(), 0.0) <add> self.assertAlmostEqual(tf.math.reduce_sum(attentions[-1][..., -2, :, :]).numpy(), 0.0) <add> self.assertNotEqual(tf.math.reduce_sum(attentions[-1][..., -1, :, :]).numpy(), 0.0) <add> <add> if model.config.is_encoder_decoder: <add> check_attentions_validity(outputs.encoder_attentions) <add> check_attentions_validity(outputs.decoder_attentions) <add> else: <add> check_attentions_validity(outputs.attentions) <add> <ide> def test_hidden_states_output(self): <ide> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() <ide> <ide><path>tests/test_modeling_tf_ctrl.py <ide> class TFCTRLModelTest(TFModelTesterMixin, unittest.TestCase): <ide> <ide> all_model_classes = (TFCTRLModel, TFCTRLLMHeadModel, TFCTRLForSequenceClassification) if is_tf_available() else () <ide> all_generative_model_classes = (TFCTRLLMHeadModel,) if is_tf_available() else () <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFCTRLModelTester(self) <ide><path>tests/test_modeling_tf_distilbert.py <ide> class TFDistilBertModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else None <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFDistilBertModelTester(self) <ide><path>tests/test_modeling_tf_electra.py <ide> class TFElectraModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFElectraModelTester(self) <ide><path>tests/test_modeling_tf_flaubert.py <ide> class TFFlaubertModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = ( <ide> (TFFlaubertWithLMHeadModel,) if is_tf_available() else () <ide> ) # TODO (PVP): Check other models whether language generation is also applicable <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFFlaubertModelTester(self) <ide><path>tests/test_modeling_tf_funnel.py <ide> class TFFunnelModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFFunnelModelTester(self) <ide> class TFFunnelBaseModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_model_classes = ( <ide> (TFFunnelBaseModel, TFFunnelForMultipleChoice, TFFunnelForSequenceClassification) if is_tf_available() else () <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFFunnelModelTester(self, base=True) <ide><path>tests/test_modeling_tf_gpt2.py <ide> class TFGPT2ModelTest(TFModelTesterMixin, unittest.TestCase): <ide> else () <ide> ) <ide> all_generative_model_classes = (TFGPT2LMHeadModel,) if is_tf_available() else () <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFGPT2ModelTester(self) <ide><path>tests/test_modeling_tf_led.py <ide> class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available() else () <ide> is_encoder_decoder = True <ide> test_pruning = False <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFLEDModelTester(self) <ide><path>tests/test_modeling_tf_longformer.py <ide> class TFLongformerModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFLongformerModelTester(self) <ide><path>tests/test_modeling_tf_lxmert.py <ide> def create_and_check_lxmert_for_pretraining( <ide> class TFLxmertModelTest(TFModelTesterMixin, unittest.TestCase): <ide> <ide> all_model_classes = (TFLxmertModel, TFLxmertForPreTraining) if is_tf_available() else () <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFLxmertModelTester(self) <ide><path>tests/test_modeling_tf_marian.py <ide> def check_decoder_model_past_large_inputs(self, config, inputs_dict): <ide> <ide> input_ids = input_ids[:1, :] <ide> attention_mask = inputs_dict["attention_mask"][:1, :] <add> head_mask = inputs_dict["head_mask"] <ide> self.batch_size = 1 <ide> <ide> # first forward pass <del> outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) <add> outputs = model(input_ids, attention_mask=attention_mask, head_mask=head_mask, use_cache=True) <ide> <ide> output, past_key_values = outputs.to_tuple() <ide> past_key_values = past_key_values[1] <ide> def prepare_marian_inputs_dict( <ide> decoder_input_ids, <ide> attention_mask=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> ): <ide> if attention_mask is None: <ide> attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) <ide> def prepare_marian_inputs_dict( <ide> ], <ide> axis=-1, <ide> ) <add> if head_mask is None: <add> head_mask = tf.ones((config.encoder_layers, config.encoder_attention_heads)) <add> if decoder_head_mask is None: <add> decoder_head_mask = tf.ones((config.decoder_layers, config.decoder_attention_heads)) <ide> return { <ide> "input_ids": input_ids, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <ide> "decoder_attention_mask": decoder_attention_mask, <add> "head_mask": head_mask, <add> "decoder_head_mask": decoder_head_mask, <ide> } <ide> <ide> <ide> class TFMarianModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = (TFMarianMTModel,) if is_tf_available() else () <ide> is_encoder_decoder = True <ide> test_pruning = False <add> test_head_masking = True <ide> <ide> def setUp(self): <ide> self.model_tester = TFMarianModelTester(self) <ide><path>tests/test_modeling_tf_mbart.py <ide> def check_decoder_model_past_large_inputs(self, config, inputs_dict): <ide> <ide> input_ids = input_ids[:1, :] <ide> attention_mask = inputs_dict["attention_mask"][:1, :] <add> head_mask = inputs_dict["head_mask"] <ide> self.batch_size = 1 <ide> <ide> # first forward pass <del> outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) <add> outputs = model(input_ids, attention_mask=attention_mask, head_mask=head_mask, use_cache=True) <ide> <ide> output, past_key_values = outputs.to_tuple() <ide> past_key_values = past_key_values[1] <ide> def prepare_mbart_inputs_dict( <ide> decoder_input_ids, <ide> attention_mask=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> ): <ide> if attention_mask is None: <ide> attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) <ide> def prepare_mbart_inputs_dict( <ide> ], <ide> axis=-1, <ide> ) <add> if head_mask is None: <add> head_mask = tf.ones((config.encoder_layers, config.encoder_attention_heads)) <add> if decoder_head_mask is None: <add> decoder_head_mask = tf.ones((config.decoder_layers, config.decoder_attention_heads)) <ide> return { <ide> "input_ids": input_ids, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <ide> "decoder_attention_mask": decoder_attention_mask, <add> "head_mask": head_mask, <add> "decoder_head_mask": head_mask, <ide> } <ide> <ide> <ide> class TFMBartModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = (TFMBartForConditionalGeneration,) if is_tf_available() else () <ide> is_encoder_decoder = True <ide> test_pruning = False <add> test_head_masking = True <ide> <ide> def setUp(self): <ide> self.model_tester = TFMBartModelTester(self) <ide><path>tests/test_modeling_tf_mobilebert.py <ide> class TFMobileBertModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> class TFMobileBertModelTester(object): <ide> def __init__( <ide><path>tests/test_modeling_tf_mpnet.py <ide> class TFMPNetModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFMPNetModelTester(self) <ide><path>tests/test_modeling_tf_openai.py <ide> class TFOpenAIGPTModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = ( <ide> (TFOpenAIGPTLMHeadModel,) if is_tf_available() else () <ide> ) # TODO (PVP): Add Double HeadsModel when generate() function is changed accordingly <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFOpenAIGPTModelTester(self) <ide><path>tests/test_modeling_tf_pegasus.py <ide> def check_decoder_model_past_large_inputs(self, config, inputs_dict): <ide> <ide> input_ids = input_ids[:1, :] <ide> attention_mask = inputs_dict["attention_mask"][:1, :] <add> head_mask = inputs_dict["head_mask"] <ide> self.batch_size = 1 <ide> <ide> # first forward pass <del> outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) <add> outputs = model(input_ids, attention_mask=attention_mask, head_mask=head_mask, use_cache=True) <ide> <ide> output, past_key_values = outputs.to_tuple() <ide> past_key_values = past_key_values[1] <ide> def prepare_pegasus_inputs_dict( <ide> decoder_input_ids, <ide> attention_mask=None, <ide> decoder_attention_mask=None, <add> head_mask=None, <add> decoder_head_mask=None, <ide> ): <ide> if attention_mask is None: <ide> attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) <ide> def prepare_pegasus_inputs_dict( <ide> ], <ide> axis=-1, <ide> ) <add> if head_mask is None: <add> head_mask = tf.ones((config.encoder_layers, config.encoder_attention_heads)) <add> if decoder_head_mask is None: <add> decoder_head_mask = tf.ones((config.decoder_layers, config.decoder_attention_heads)) <ide> return { <ide> "input_ids": input_ids, <ide> "decoder_input_ids": decoder_input_ids, <ide> "attention_mask": attention_mask, <ide> "decoder_attention_mask": decoder_attention_mask, <add> "head_mask": head_mask, <add> "decoder_head_mask": decoder_head_mask, <ide> } <ide> <ide> <ide> class TFPegasusModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = (TFPegasusForConditionalGeneration,) if is_tf_available() else () <ide> is_encoder_decoder = True <ide> test_pruning = False <add> test_head_masking = True <ide> <ide> def setUp(self): <ide> self.model_tester = TFPegasusModelTester(self) <ide><path>tests/test_modeling_tf_roberta.py <ide> class TFRobertaModelTest(TFModelTesterMixin, unittest.TestCase): <ide> if is_tf_available() <ide> else () <ide> ) <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFRobertaModelTester(self) <ide><path>tests/test_modeling_tf_t5.py <ide> class TFT5ModelTest(TFModelTesterMixin, unittest.TestCase): <ide> is_encoder_decoder = True <ide> all_model_classes = (TFT5Model, TFT5ForConditionalGeneration) if is_tf_available() else () <ide> all_generative_model_classes = (TFT5ForConditionalGeneration,) if is_tf_available() else () <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFT5ModelTester(self) <ide> def prepare_config_and_inputs_for_common(self): <ide> class TFT5EncoderOnlyModelTest(TFModelTesterMixin, unittest.TestCase): <ide> is_encoder_decoder = False <ide> all_model_classes = (TFT5EncoderModel,) if is_tf_available() else () <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFT5EncoderOnlyModelTester(self) <ide><path>tests/test_modeling_tf_transfo_xl.py <ide> class TFTransfoXLModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = () if is_tf_available() else () <ide> # TODO: add this test when TFTransfoXLLMHead has a linear output layer implemented <ide> test_resize_embeddings = False <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFTransfoXLModelTester(self) <ide><path>tests/test_modeling_tf_xlm.py <ide> class TFXLMModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = ( <ide> (TFXLMWithLMHeadModel,) if is_tf_available() else () <ide> ) # TODO (PVP): Check other models whether language generation is also applicable <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFXLMModelTester(self) <ide><path>tests/test_modeling_tf_xlnet.py <ide> class TFXLNetModelTest(TFModelTesterMixin, unittest.TestCase): <ide> all_generative_model_classes = ( <ide> (TFXLNetLMHeadModel,) if is_tf_available() else () <ide> ) # TODO (PVP): Check other models whether language generation is also applicable <add> test_head_masking = False <ide> <ide> def setUp(self): <ide> self.model_tester = TFXLNetModelTester(self)
32
Text
Text
clarify release process for new releasers
2f169ad58aefe7c9406fd2062da33172f87e3792
<ide><path>doc/contributing/releases.md <ide> failed build if you start again! <ide> Jenkins collects the artifacts from the builds, allowing you to download and <ide> install the new build. Make sure that the build appears correct. Check the <ide> version numbers, and perform some basic checks to confirm that all is well with <del>the build before moving forward. <add>the build before moving forward. Use the following list as a baseline: <add> <add>* `process.version` is as expected <add>* `process.release` is as expected <add>* `process.versions` is as expected (for example, `openssl` or `llhttp` version <add> must be in the expected updated version) <add>* npm version (check it matches what we expect) <add>* Run the test suite against the built binaries (optional) <add> <add>```console <add>./tools/test.py --shell ~/Downloads/node-v18.5.0-linux-x64/bin/node <add>``` <add> <add><sup>There may be test issues if the branch used to test does not match the Node.js binary.</sup> <ide> <ide> ### 11. Tag and sign the release commit <ide> <ide> include the release code name. <ide> The tag **must** be signed using the GPG key that's listed for you on the <ide> project README. <ide> <add>**Note**: Don't push the tag to remote at this point. <add> <ide> ### 12. Set up for the next release <ide> <ide> On release proposal branch, edit `src/node_version.h` again and:
1
PHP
PHP
fix deprecation warnings for event class
370c687456b27cd92469973c0f5a3edcce6330f0
<ide><path>src/Event/Event.php <ide> public function __get($attribute) <ide> { <ide> $method = 'get' . ucfirst($attribute); <ide> deprecationWarning( <del> "Event::${$attribute} is deprecated. " . <add> "Event::\${$attribute} is deprecated. " . <ide> "Use Event::{$method}() instead." <ide> ); <ide> if ($attribute === 'name' || $attribute === 'subject') { <ide> public function __set($attribute, $value) <ide> { <ide> $method = 'set' . ucfirst($attribute); <ide> deprecationWarning( <del> "Event::${$attribute} is deprecated. " . <add> "Event::\${$attribute} is deprecated. " . <ide> "Use Event::{$method}() instead." <ide> ); <ide> if ($attribute === 'data') {
1
Ruby
Ruby
fix typo in i18n_railtie.rb
70e3d1ea9d22c23f3d60fd1b5afad2a288a14221
<ide><path>activesupport/lib/active_support/i18n_railtie.rb <ide> def self.init_fallbacks(fallbacks) <ide> ActiveSupport::Deprecation.warn(<<-MSG.squish) <ide> Using I18n fallbacks with an empty `defaults` sets the defaults to <ide> include the `default_locale`. This behavior will change in Rails 6.1. <del> If you desire the default local to be included in the defaults, please <add> If you desire the default locale to be included in the defaults, please <ide> explicitly configure it with `config.i18n.fallbacks.defaults = <ide> [I18n.default_locale]` or `config.i18n.fallbacks = [I18n.default_locale, <ide> {...}]`
1
Text
Text
update language list and file link
edde1b7bf0217e76e962ec653ddaebc673292827
<ide><path>docs/how-to-work-on-localized-client-webapp.md <ide> Some of these files are translated on our translation platform (Crowdin), some a <ide> <ide> ## Testing the client app in a world language <ide> <del>You can test the client app in any language available in the [list of languages here](https://github.com/freeCodeCamp/freeCodeCamp/blob/6b4a6a02568b809fc216ea8566ff5df446d1da4e/config/i18n/all-langs.js#L5). <add>You can test the client app in any language available in the [list of languages here](https://github.com/freeCodeCamp/freeCodeCamp/blob/main/config/i18n/all-langs.ts). <ide> <ide> ```js <del> const availableLangs = { <del> client: ['english', 'espanol', 'chinese'], <del> ... <del> }; <add> export const availableLangs = { <add> client: [ <add> 'english', <add> 'espanol', <add> 'chinese', <add> 'chinese-traditional', <add> 'italian', <add> 'portuguese', <add> 'ukrainian', <add> 'japanese', <add> 'german' <add> ], <add> ... <add>}; <ide> ``` <ide> <ide> If you are testing a new language, create a folder with the language name as the title next to the other languages and copy the JSON files from another language into your new folder.
1
Text
Text
fix a tiny typo
1ee55a3b38ad2bcf4dea5d2dcc4f5371c84588c2
<ide><path>laravel/documentation/controllers.md <ide> <ide> Controllers are classes that are responsible for accepting user input and managing interactions between models, libraries, and views. Typically, they will ask a model for data, and then return a view that presents that data to the user. <ide> <del>The usage of controllers is the most common method of implementing application logic in modern web-development. However, Laravel also empowers developers to implement their application logic within routing declarations. This is explored in detail in the [routing document](/docs/routing). New users are encourage to start with controllers. There is nothing that route-based application logic can do that controllers can't. <add>The usage of controllers is the most common method of implementing application logic in modern web-development. However, Laravel also empowers developers to implement their application logic within routing declarations. This is explored in detail in the [routing document](/docs/routing). New users are encouraged to start with controllers. There is nothing that route-based application logic can do that controllers can't. <ide> <ide> Controller classes should be stored in **application/controllers** and should extend the Base\_Controller class. A Home\_Controller class is included with Laravel. <ide>
1
Javascript
Javascript
fix path resolution for states
4c3107d23844580eb81147c84af3a8fc21c48664
<ide><path>packages/ember-states/lib/state.js <del>var get = Ember.get, set = Ember.set; <add>var get = Ember.get, set = Ember.set, getPath = Ember.getPath; <ide> <ide> Ember.State = Ember.Object.extend({ <ide> isState: true, <ide> parentState: null, <ide> start: null, <add> name: null, <add> path: Ember.computed(function() { <add> var parentPath = getPath(this, 'parentState.path'), <add> path = get(this, 'name'); <add> <add> if (parentPath) { <add> path = parentPath + '.' + path; <add> } <add> <add> return path; <add> }).property().cacheable(), <ide> <ide> init: function() { <ide> var states = get(this, 'states'), foundStates; <add> var name; <ide> <ide> // As a convenience, loop over the properties <ide> // of this state and look for any that are other <ide> Ember.State = Ember.Object.extend({ <ide> if (!states) { <ide> states = {}; <ide> <del> for (var name in this) { <add> for (name in this) { <ide> if (name === "constructor") { continue; } <ide> this.setupChild(states, name, this[name]); <ide> } <ide> <ide> set(this, 'states', states); <ide> } else { <del> for (var name in states) { <add> for (name in states) { <ide> this.setupChild(states, name, states[name]); <ide> } <ide> } <ide> Ember.State = Ember.Object.extend({ <ide> if (!value) { return false; } <ide> <ide> if (Ember.State.detect(value)) { <del> value = value.create(); <add> value = value.create({ <add> name: name <add> }); <add> } else if (value.isState) { <add> set(value, 'name', name); <ide> } <ide> <ide> if (value.isState) { <ide> set(value, 'parentState', this); <del> set(value, 'name', (get(this, 'name') || '') + '.' + name); <ide> states[name] = value; <ide> } <ide> }, <ide><path>packages/ember-states/lib/state_manager.js <ide> Ember.StateManager = Ember.State.extend( <ide> if (parentState) { <ide> this.sendRecursively(event, parentState, context); <ide> } else if (get(this, 'errorOnUnhandledEvent')) { <del> throw new Ember.Error(this.toString() + " could not respond to event " + event + "."); <add> throw new Ember.Error(this.toString() + " could not respond to event " + event + " in state " + getPath(this, 'currentState.name') + "."); <ide> } <ide> } <ide> }, <ide><path>packages/ember-states/tests/state_test.js <ide> test("a state finds properties that are state classes and instantiates them", fu <ide> equal(get(states.state1, 'isState1'), true, "instantiated first state"); <ide> equal(get(states.state2, 'isState2'), true, "instantiated second state"); <ide> }); <add> <add>test("states set up proper names on their children", function() { <add> var manager = Ember.StateManager.create({ <add> states: { <add> first: Ember.State.extend({ <add> insideFirst: Ember.State.extend({ <add> <add> }) <add> }) <add> } <add> }); <add> <add> manager.goToState('first'); <add> equal(getPath(manager, 'currentState.path'), 'first'); <add> <add> manager.goToState('first.insideFirst'); <add> equal(getPath(manager, 'currentState.path'), 'first.insideFirst'); <add>}); <add> <add>test("states with child instances set up proper names on their children", function() { <add> var manager = Ember.StateManager.create({ <add> states: { <add> first: Ember.State.create({ <add> insideFirst: Ember.State.create({ <add> <add> }) <add> }) <add> } <add> }); <add> <add> manager.goToState('first'); <add> equal(getPath(manager, 'currentState.path'), 'first'); <add> <add> manager.goToState('first.insideFirst'); <add> equal(getPath(manager, 'currentState.path'), 'first.insideFirst'); <add>});
3
Javascript
Javascript
remove debug messages from module loading
a0f2b8a0c5da9de9d7f52ea0312b1a6ffc53277c
<ide><path>src/main.js <ide> node.path = new function () { <ide> this.target = target; <ide> <ide> this.load = function (base_directory, callback) { <del> node.debug("sub.load from <" + base_directory + "> " + this.toString()); <add> //node.debug("sub.load from <" + base_directory + "> " + this.toString()); <ide> findScript(base_directory, name, function (filename) { <ide> if (filename === null) { <ide> stderr.puts("Cannot find a script matching: " + name); <ide> node.path = new function () { <ide> var compiled = node.compile(source, filename); <ide> <ide> if (module.__on_load) { <del> node.debug("<"+ filename+"> has onload! this is bad"); <add> //node.debug("<"+ filename+"> has onload! this is bad"); <ide> } <ide> <ide> module.__subs = []; <ide> node.path = new function () { <ide> <ide> var scaffold = new Scaffold(content, filename, target); <ide> <del> node.debug("after scaffold <" + filename + ">"); <add> //node.debug("after scaffold <" + filename + ">"); <ide> <ide> function finish() { <del> node.debug("finish 1 load <" + filename + ">"); <add> //node.debug("finish 1 load <" + filename + ">"); <ide> if (scaffold.on_load instanceof Function) { <del> node.debug("foo bar <" + filename + ">"); <add> //node.debug("foo bar <" + filename + ">"); <ide> scaffold.on_load(); <ide> } <del> node.debug("finish 2 load <" + filename + ">"); <add> //node.debug("finish 2 load <" + filename + ">"); <ide> <ide> if (callback instanceof Function) <ide> callback(); <ide> node.path = new function () { <ide> var sub = scaffold.subs[i]; <ide> sub.load(node.path.dirname(filename), function () { <ide> ncomplete += 1; <del> node.debug("<" + filename + "> ncomplete = " + ncomplete.toString() + " scaffold.subs.length = " + scaffold.subs.length.toString()); <add> //node.debug("<" + filename + "> ncomplete = " + ncomplete.toString() + " scaffold.subs.length = " + scaffold.subs.length.toString()); <ide> if (ncomplete === scaffold.subs.length) <ide> finish(); <ide> }); <ide><path>test/test-test.js <ide> include("mjsunit"); <ide> var a = require("fixtures/a"); <ide> <ide> function on_load () { <del> stderr.puts("hello world"); <ide> assertFalse(false, "testing the test program."); <ide> <ide> assertInstanceof(a.A, Function);
2
PHP
PHP
apply seteventmanager() in tests
2172466db1d6d82f417f81e2f942f2a68e0dde4b
<ide><path>tests/TestCase/Controller/ComponentRegistryTest.php <ide> public function testLoadWithEnableFalse() <ide> $mock->expects($this->never()) <ide> ->method('attach'); <ide> <del> $this->Components->getController()->eventManager($mock); <add> $this->Components->getController()->setEventManager($mock); <ide> <ide> $result = $this->Components->load('Cookie', ['enabled' => false]); <ide> $this->assertInstanceOf('Cake\Controller\Component\CookieComponent', $result); <ide><path>tests/TestCase/Controller/ComponentTest.php <ide> public function testInnerComponentsAreNotEnabled() <ide> { <ide> $mock = $this->getMockBuilder(EventManager::class)->getMock(); <ide> $controller = new Controller(); <del> $controller->eventManager($mock); <add> $controller->setEventManager($mock); <ide> <ide> $mock->expects($this->once()) <ide> ->method('on') <ide> public function testEventsInnerComponent() <ide> ->with($this->isInstanceOf(AppleComponent::class)); <ide> <ide> $controller = new Controller(); <del> $controller->eventManager($eventManager); <add> $controller->setEventManager($eventManager); <ide> <ide> $Collection = new ComponentRegistry($controller); <ide> <ide> public function testNoEventsInnerComponent() <ide> $eventManager->expects($this->never())->method('on'); <ide> <ide> $controller = new Controller(); <del> $controller->eventManager($eventManager); <add> $controller->setEventManager($eventManager); <ide> <ide> $Collection = new ComponentRegistry($controller); <ide> <ide><path>tests/TestCase/Event/EventDispatcherTraitTest.php <ide> public function testSettingEventManager() <ide> { <ide> $eventManager = new EventManager(); <ide> <del> $this->subject->eventManager($eventManager); <add> $this->subject->setEventManager($eventManager); <ide> <ide> $this->assertSame($eventManager, $this->subject->getEventManager()); <ide> } <ide><path>tests/TestCase/View/HelperTest.php <ide> public function testThatHelperHelpersAreNotAttached() <ide> Plugin::loadAll(); <ide> <ide> $events = $this->getMockBuilder('\Cake\Event\EventManager')->getMock(); <del> $this->View->eventManager($events); <add> $this->View->setEventManager($events); <ide> <ide> $events->expects($this->never()) <ide> ->method('attach'); <ide><path>tests/TestCase/View/ViewTest.php <ide> public function testHelperCallbackTriggering() <ide> $View->templatePath($this->PostsController->name); <ide> <ide> $manager = $this->getMockBuilder('Cake\Event\EventManager')->getMock(); <del> $View->eventManager($manager); <add> $View->setEventManager($manager); <ide> <ide> $manager->expects($this->at(0))->method('dispatch') <ide> ->with(
5
PHP
PHP
remove unused dependencies
01e1ed09682f22034df90fe4b60e9fe739e304b6
<ide><path>Cake/View/Helper/PaginatorHelper.php <ide> */ <ide> namespace Cake\View\Helper; <ide> <del>use Cake\Core\App; <del>use Cake\Error; <del>use Cake\Utility\Inflector; <ide> use Cake\View\Helper; <ide> use Cake\View\StringTemplate; <ide> use Cake\View\View;
1
PHP
PHP
add tests for match()
655bbe9b6e473b28e6853dacb1b1f205678c36d5
<ide><path>src/Routing/ScopedRouteCollection.php <ide> public function parse($url) { <ide> list($url, $queryParameters) = explode('?', $url, 2); <ide> parse_str($queryParameters, $queryParameters); <ide> } <del> $out = array(); <add> $out = []; <ide> for ($i = 0, $len = count($this->_routes); $i < $len; $i++) { <ide> $r = $this->_routes[$i]->parse($url); <del> if ($r !== false && $queryParameters) { <del> $r['?'] = $queryParameters; <del> return $r; <add> if ($r === false) { <add> continue; <ide> } <del> if ($r !== false) { <add> if ($queryParameters) { <add> $r['?'] = $queryParameters; <ide> return $r; <ide> } <add> return $r; <ide> } <ide> return $out; <ide> } <ide> public function parse($url) { <ide> * @param array $url The url to match. <ide> * @param array $context The request context to use. Contains _base, _port, <ide> * _host, and _scheme keys. <del> * @return void <add> * @return string|false Either a string on match, or false on failure. <ide> */ <ide> public function match($url, $context) { <ide> foreach ($this->_getNames($url) as $name) { <ide> public function match($url, $context) { <ide> } <ide> } <ide> } <del> return '/'; <add> return false; <ide> } <ide> <ide> /** <ide><path>tests/TestCase/Routing/ScopedRouteCollectionTest.php <ide> public function testParse() { <ide> $this->assertEquals($expected, $result); <ide> } <ide> <add>/** <add> * Test matching routes. <add> * <add> * @return void <add> */ <add> public function testMatch() { <add> $context = [ <add> '_base' => '/', <add> '_scheme' => 'http', <add> '_host' => 'example.org', <add> ]; <add> $routes = new ScopedRouteCollection('/b'); <add> $routes->connect('/', ['controller' => 'Articles']); <add> $routes->connect('/:id', ['controller' => 'Articles', 'action' => 'view']); <add> <add> $result = $routes->match(['plugin' => null, 'controller' => 'Articles', 'action' => 'index'], $context); <add> $this->assertEquals('b', $result); <add> <add> $result = $routes->match( <add> ['id' => 'thing', 'plugin' => null, 'controller' => 'Articles', 'action' => 'view'], <add> $context); <add> $this->assertEquals('b/thing', $result); <add> <add> $result = $routes->match(['plugin' => null, 'controller' => 'Articles', 'action' => 'add'], $context); <add> $this->assertFalse($result, 'No matches'); <add> } <add> <ide> }
2
Ruby
Ruby
add bottle_hash method
04804c6db5ad4912efb946313930abee97d7fcf8
<ide><path>Library/Homebrew/formula.rb <ide> def to_hash <ide> "revision" => stable.specs[:revision], <ide> } <ide> <del> if bottle_defined? <del> bottle_spec = stable.bottle_specification <del> bottle_info = { <del> "rebuild" => bottle_spec.rebuild, <del> "cellar" => (cellar = bottle_spec.cellar).is_a?(Symbol) ? cellar.inspect : cellar, <del> "prefix" => bottle_spec.prefix, <del> "root_url" => bottle_spec.root_url, <del> } <del> bottle_info["files"] = {} <del> bottle_spec.collector.each_key do |os| <del> bottle_url = "#{bottle_spec.root_url}/#{Bottle::Filename.create(self, os, bottle_spec.rebuild).bintray}" <del> checksum = bottle_spec.collector[os][:checksum] <del> bottle_info["files"][os] = { <del> "url" => bottle_url, <del> "sha256" => checksum.hexdigest, <del> } <del> end <del> hsh["bottle"]["stable"] = bottle_info <del> end <add> hsh["bottle"]["stable"] = bottle_hash if bottle_defined? <ide> end <ide> <ide> hsh["options"] = options.map do |opt| <ide> def to_hash <ide> hsh <ide> end <ide> <add> # Returns the bottle information for a formula <add> def bottle_hash <add> bottle_spec = stable.bottle_specification <add> hash = { <add> "rebuild" => bottle_spec.rebuild, <add> "cellar" => (cellar = bottle_spec.cellar).is_a?(Symbol) ? cellar.inspect : cellar, <add> "prefix" => bottle_spec.prefix, <add> "root_url" => bottle_spec.root_url, <add> "files" => {}, <add> } <add> bottle_spec.collector.each_key do |os| <add> bottle_url = "#{bottle_spec.root_url}/#{Bottle::Filename.create(self, os, bottle_spec.rebuild).bintray}" <add> checksum = bottle_spec.collector[os][:checksum] <add> hash["files"][os] = { <add> "url" => bottle_url, <add> "sha256" => checksum.hexdigest, <add> } <add> end <add> hash <add> end <add> <ide> # @private <ide> def fetch(verify_download_integrity: true) <ide> active_spec.fetch(verify_download_integrity: verify_download_integrity)
1
PHP
PHP
improve api docs for event
4853264e31b8a28127349ce66b6df71fcb5e92c3
<ide><path>src/Event/Event.php <ide> public function getName(): string <ide> /** <ide> * Returns the subject of this event <ide> * <add> * If the event has no subject an exception will be raised. <add> * <ide> * @return object <add> * @throws \Cake\Core\Exception\Exception; <ide> * @psalm-return TSubject <ide> * @psalm-suppress LessSpecificImplementedReturnType <ide> */
1
Python
Python
fix wrong variable in test_build.py exception
3248aa7d59afbb4c649ffd40e95b17e63de77c94
<ide><path>numpy/linalg/tests/test_build.py <ide> def __init__(self): <ide> except OSError: <ide> raise RuntimeError("command %s cannot be run" % self.cmd) <ide> <del> def get_dependencies(self, file): <del> p = Popen(self.cmd + [file], stdout=PIPE, stderr=PIPE) <add> def get_dependencies(self, lfile): <add> p = Popen(self.cmd + [lfile], stdout=PIPE, stderr=PIPE) <ide> stdout, stderr = p.communicate() <ide> if not (p.returncode == 0): <del> raise RuntimeError("Failed to check dependencies for %s" % libfile) <add> raise RuntimeError("failed dependencies check for %s" % lfile) <ide> <ide> return stdout <ide> <del> def grep_dependencies(self, file, deps): <del> stdout = self.get_dependencies(file) <add> def grep_dependencies(self, lfile, deps): <add> stdout = self.get_dependencies(lfile) <ide> <ide> rdeps = dict([(dep, re.compile(dep)) for dep in deps]) <ide> founds = []
1
Java
Java
make new bridge default in oss
c89c25f27d115c1928104b06dbe6eabf005995a6
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/testing/ReactAppTestActivity.java <ide> public void loadApp( <ide> ReactInstanceManager.Builder builder = <ide> ReactTestHelper.getReactTestFactory().getReactInstanceManagerBuilder() <ide> .setApplication(getApplication()) <del> .setUseOldBridge(true) <ide> .setBundleAssetName(bundleName) <ide> // By not setting a JS module name, we force the bundle to be always loaded from <ide> // assets, not the devserver, even if dev mode is enabled (such as when testing redboxes). <ide><path>ReactAndroid/src/main/java/com/facebook/react/ReactNativeHost.java <ide> public void clear() { <ide> <ide> protected ReactInstanceManager createReactInstanceManager() { <ide> ReactInstanceManager.Builder builder = ReactInstanceManager.builder() <del> .setUseOldBridge(true) <ide> .setApplication(mApplication) <ide> .setJSMainModuleName(getJSMainModuleName()) <ide> .setUseDeveloperSupport(getUseDeveloperSupport())
2
Python
Python
fix mypy errors in airflow utils
efd365274a548e7dd859ca1823da6a7c417a34f1
<ide><path>airflow/utils/dag_cycle_tester.py <ide> # under the License. <ide> """DAG Cycle tester""" <ide> from collections import defaultdict, deque <del>from typing import TYPE_CHECKING <add>from typing import TYPE_CHECKING, Deque, Dict <ide> <ide> from airflow.exceptions import AirflowDagCycleException <ide> <ide> if TYPE_CHECKING: <del> from airflow.models import DAG <add> from airflow.models.dag import DAG <ide> <ide> CYCLE_NEW = 0 <ide> CYCLE_IN_PROGRESS = 1 <ide> CYCLE_DONE = 2 <ide> <ide> <del>def test_cycle(dag: "DAG") -> None: <add>def test_cycle(dag: 'DAG') -> None: <ide> """ <ide> A wrapper function of `check_cycle` for backward compatibility purpose. <ide> New code should use `check_cycle` instead since this function name `test_cycle` starts with 'test_' and <ide> def test_cycle(dag: "DAG") -> None: <ide> return check_cycle(dag) <ide> <ide> <del>def check_cycle(dag: "DAG") -> None: <add>def check_cycle(dag: 'DAG') -> None: <ide> """Check to see if there are any cycles in the DAG. <ide> <ide> :raises AirflowDagCycleException: If cycle is found in the DAG. <ide> """ <ide> # default of int is 0 which corresponds to CYCLE_NEW <del> visited = defaultdict(int) <del> path_stack = deque() <add> visited: Dict[str, int] = defaultdict(int) <add> path_stack: Deque[str] = deque() <ide> task_dict = dag.task_dict <ide> <ide> def _check_adjacent_tasks(task_id, current_task): <ide><path>airflow/utils/db.py <ide> import sys <ide> import time <ide> from tempfile import gettempdir <del>from typing import Any, Iterable, List <add>from typing import Any, Callable, Iterable, List, Tuple <ide> <ide> from sqlalchemy import Table, exc, func, inspect, or_, text <ide> from sqlalchemy.orm.session import Session <ide> def _format_airflow_moved_table_name(source_table, version): <ide> <ide> <ide> @provide_session <del>def merge_conn(conn, session=None): <add>def merge_conn(conn, session: Session = NEW_SESSION): <ide> """Add new Connection.""" <ide> if not session.query(Connection).filter(Connection.conn_id == conn.conn_id).first(): <ide> session.add(conn) <ide> session.commit() <ide> <ide> <ide> @provide_session <del>def add_default_pool_if_not_exists(session=None): <add>def add_default_pool_if_not_exists(session: Session = NEW_SESSION): <ide> """Add default pool if it does not exist.""" <ide> if not Pool.get_pool(Pool.DEFAULT_POOL_NAME, session=session): <ide> default_pool = Pool( <ide> def add_default_pool_if_not_exists(session=None): <ide> <ide> <ide> @provide_session <del>def create_default_connections(session=None): <add>def create_default_connections(session: Session = NEW_SESSION): <ide> """Create default Airflow connections.""" <ide> merge_conn( <ide> Connection( <ide> def create_default_connections(session=None): <ide> <ide> <ide> @provide_session <del>def initdb(session=None): <add>def initdb(session: Session = NEW_SESSION): <ide> """Initialize Airflow database.""" <ide> upgradedb(session=session) <ide> <ide> def _format_dangling_error(source_table, target_table, invalid_count, reason): <ide> ) <ide> <ide> <del>def _move_dangling_run_data_to_new_table(session, source_table: "Table", target_table_name: str): <add>def _move_dangling_run_data_to_new_table(session: Session, source_table: "Table", target_table_name: str): <ide> where_clause = "where dag_id is null or run_id is null or execution_date is null" <ide> _move_dangling_table(session, source_table, target_table_name, where_clause) <ide> <ide> def check_task_tables_without_matching_dagruns(session: Session) -> Iterable[str <ide> models_to_dagrun: List[Any] = [TaskInstance, TaskReschedule] <ide> for model in models_to_dagrun + [DagRun]: <ide> try: <del> metadata.reflect(only=[model.__tablename__], extend_existing=True, resolve_fks=False) <add> metadata.reflect( <add> only=[model.__tablename__], extend_existing=True, resolve_fks=False # type: ignore <add> ) <ide> except exc.InvalidRequestError: <ide> # Table doesn't exist, but try the other ones in case the user is upgrading from an _old_ DB <ide> # version <ide> def check_task_tables_without_matching_dagruns(session: Session) -> Iterable[str <ide> for model in models_to_dagrun: <ide> # We can't use the model here since it may differ from the db state due to <ide> # this function is run prior to migration. Use the reflected table instead. <del> source_table = metadata.tables.get(model.__tablename__) <add> source_table = metadata.tables.get(model.__tablename__) # type: ignore <ide> if source_table is None: <ide> continue <ide> <ide> def _check_migration_errors(session: Session = NEW_SESSION) -> Iterable[str]: <ide> :session: session of the sqlalchemy <ide> :rtype: list[str] <ide> """ <del> for check_fn in ( <add> check_functions: Tuple[Callable[..., Iterable[str]], ...] = ( <ide> check_conn_id_duplicates, <ide> check_conn_type_null, <ide> check_run_id_null, <ide> check_task_tables_without_matching_dagruns, <del> ): <add> ) <add> for check_fn in check_functions: <ide> yield from check_fn(session) <ide> # Ensure there is no "active" transaction. Seems odd, but without this MSSQL can hang <ide> session.commit() <ide> <ide> <ide> @provide_session <del>def upgradedb(session=None): <add>def upgradedb(session: Session = NEW_SESSION): <ide> """Upgrade the database.""" <ide> # alembic adds significant import time, so we import it lazily <add> if not settings.SQL_ALCHEMY_CONN: <add> raise RuntimeError("The settings.SQL_ALCHEMY_CONN not set. This is critical assertion.") <ide> from alembic import command <ide> <ide> config = _get_alembic_config() <ide> def upgradedb(session=None): <ide> <ide> <ide> @provide_session <del>def resetdb(session=None): <add>def resetdb(session: Session = NEW_SESSION): <ide> """Clear out the database""" <add> if not settings.engine: <add> raise RuntimeError("The settings.engine must be set. This is a critical assertion") <ide> log.info("Dropping tables that exist") <ide> <ide> connection = settings.engine.connect() <ide> def drop_flask_models(connection): <ide> <ide> <ide> @provide_session <del>def check(session=None): <add>def check(session: Session = NEW_SESSION): <ide> """ <ide> Checks if the database works. <ide> <ide> def __str__(self): <ide> <ide> <ide> @contextlib.contextmanager <del>def create_global_lock(session, lock: DBLocks, lock_timeout=1800): <add>def create_global_lock(session: Session, lock: DBLocks, lock_timeout=1800): <ide> """Contextmanager that will create and teardown a global db lock.""" <ide> conn = session.get_bind().connect() <ide> dialect = conn.dialect <ide><path>airflow/utils/dot_renderer.py <ide> # specific language governing permissions and limitations <ide> # under the License. <ide> """Renderer DAG (tasks and dependencies) to the graphviz object.""" <del>from typing import Dict, List, Optional <add>from typing import Any, Dict, List, Optional <ide> <ide> import graphviz <ide> <add>from airflow import AirflowException <ide> from airflow.models import TaskInstance <ide> from airflow.models.baseoperator import BaseOperator <ide> from airflow.models.dag import DAG <ide> def _refine_color(color: str): <ide> return color <ide> <ide> <del>def _draw_task(task: BaseOperator, parent_graph: graphviz.Digraph, states_by_task_id: Dict[str, str]) -> None: <add>def _draw_task( <add> task: BaseOperator, parent_graph: graphviz.Digraph, states_by_task_id: Optional[Dict[Any, Any]] <add>) -> None: <ide> """Draw a single task on the given parent_graph""" <ide> if states_by_task_id: <ide> state = states_by_task_id.get(task.task_id, State.NONE) <ide> def _draw_task(task: BaseOperator, parent_graph: graphviz.Digraph, states_by_tas <ide> <ide> <ide> def _draw_task_group( <del> task_group: TaskGroup, parent_graph: graphviz.Digraph, states_by_task_id: Dict[str, str] <add> task_group: TaskGroup, parent_graph: graphviz.Digraph, states_by_task_id: Optional[Dict[str, str]] <ide> ) -> None: <ide> """Draw the given task_group and its children on the given parent_graph""" <ide> # Draw joins <ide> def _draw_task_group( <ide> ) <ide> <ide> # Draw children <del> for child in sorted(task_group.children.values(), key=lambda t: t.label): <add> for child in sorted(task_group.children.values(), key=lambda t: t.label if t.label else ""): <ide> _draw_nodes(child, parent_graph, states_by_task_id) <ide> <ide> <del>def _draw_nodes(node: TaskMixin, parent_graph: graphviz.Digraph, states_by_task_id: Dict[str, str]) -> None: <add>def _draw_nodes( <add> node: TaskMixin, parent_graph: graphviz.Digraph, states_by_task_id: Optional[Dict[Any, Any]] <add>) -> None: <ide> """Draw the node and its children on the given parent_graph recursively.""" <ide> if isinstance(node, BaseOperator): <ide> _draw_task(node, parent_graph, states_by_task_id) <ide> else: <add> if not isinstance(node, TaskGroup): <add> raise AirflowException(f"The node {node} should be TaskGroup and is not") <ide> # Draw TaskGroup <ide> if node.is_root: <ide> # No need to draw background for root TaskGroup. <ide><path>airflow/utils/edgemodifier.py <ide> # specific language governing permissions and limitations <ide> # under the License. <ide> <del>from typing import Sequence, Union <add>from typing import List, Optional, Sequence, Union <ide> <ide> from airflow.models.taskmixin import TaskMixin <ide> <ide> class EdgeModifier(TaskMixin): <ide> is the representation of the information for one specific edge. <ide> """ <ide> <del> def __init__(self, label: str = None): <add> def __init__(self, label: Optional[str] = None): <add> from airflow.models.baseoperator import BaseOperator <add> <ide> self.label = label <del> self._upstream = [] <del> self._downstream = [] <add> self._upstream: List[BaseOperator] = [] <add> self._downstream: List[BaseOperator] = [] <ide> <ide> @property <ide> def roots(self): <ide> def set_upstream(self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]], <ide> Providing this also provides << via TaskMixin. <ide> """ <ide> # Ensure we have a list, even if it's just one item <del> if not isinstance(task_or_task_list, list): <add> if isinstance(task_or_task_list, TaskMixin): <ide> task_or_task_list = [task_or_task_list] <ide> # Unfurl it into actual operators <ide> operators = [] <ide> def set_downstream(self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin] <ide> Providing this also provides >> via TaskMixin. <ide> """ <ide> # Ensure we have a list, even if it's just one item <del> if not isinstance(task_or_task_list, list): <add> if isinstance(task_or_task_list, TaskMixin): <ide> task_or_task_list = [task_or_task_list] <ide> # Unfurl it into actual operators <ide> operators = [] <ide><path>airflow/utils/email.py <ide> def send_email_smtp( <ide> <ide> <ide> def build_mime_message( <del> mail_from: str, <add> mail_from: Optional[str], <ide> to: Union[str, Iterable[str]], <ide> subject: str, <ide> html_content: str, <ide> def send_mime_email( <ide> try: <ide> from airflow.hooks.base import BaseHook <ide> <del> conn = BaseHook.get_connection(conn_id) <del> smtp_user = conn.login <del> smtp_password = conn.password <add> airflow_conn = BaseHook.get_connection(conn_id) <add> smtp_user = airflow_conn.login <add> smtp_password = airflow_conn.password <ide> except AirflowException: <ide> pass <ide> if smtp_user is None or smtp_password is None: <ide><path>airflow/utils/entry_points.py <ide> try: <ide> import importlib_metadata <ide> except ImportError: <del> from importlib import metadata as importlib_metadata <add> from importlib import metadata as importlib_metadata # type: ignore <ide> <ide> <ide> def entry_points_with_dist(group: str): <ide><path>airflow/utils/json.py <ide> try: <ide> import numpy as np <ide> except ImportError: <del> np = None <add> np = None # type: ignore <ide> <ide> try: <ide> from kubernetes.client import models as k8s <ide><path>airflow/utils/log/logging_mixin.py <ide> import logging <ide> import re <ide> import sys <add>from io import IOBase <ide> from logging import Handler, Logger, StreamHandler <ide> <ide> # 7-bit C1 ANSI escape sequences <ide> def supports_external_link(self) -> bool: <ide> """Return whether handler is able to support external links.""" <ide> <ide> <del># TODO: Formally inherit from io.IOBase <del>class StreamLogWriter: <add>class StreamLogWriter(IOBase): <ide> """Allows to redirect stdout and stderr to logger""" <ide> <ide> encoding: None = None <ide><path>airflow/utils/log/secrets_masker.py <ide> import collections <ide> import logging <ide> import re <del>from typing import TYPE_CHECKING, Iterable, Optional, Set, TypeVar, Union <add>from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple, Union <ide> <ide> from airflow.compat.functools import cache, cached_property <ide> <ide> if TYPE_CHECKING: <ide> from airflow.typing_compat import RePatternType <ide> <del> RedactableItem = TypeVar('RedactableItem') <add> RedactableItem = Union[str, Dict[Any, Any], Tuple[Any, ...], List[Any]] <ide> <ide> <ide> log = logging.getLogger(__name__) <ide> def should_hide_value_for_key(name): <ide> return False <ide> <ide> <del>def mask_secret(secret: Union[str, dict, Iterable], name: str = None) -> None: <add>def mask_secret(secret: Union[str, dict, Iterable], name: Optional[str] = None) -> None: <ide> """ <ide> Mask a secret from appearing in the task logs. <ide> <ide> def mask_secret(secret: Union[str, dict, Iterable], name: str = None) -> None: <ide> _secrets_masker().add_mask(secret, name) <ide> <ide> <del>def redact(value: "RedactableItem", name: str = None) -> "RedactableItem": <add>def redact(value: "RedactableItem", name: Optional[str] = None) -> "RedactableItem": <ide> """Redact any secrets found in ``value``.""" <ide> return _secrets_masker().redact(value, name) <ide> <ide> def redact(self, item: "RedactableItem", name: Optional[str] = None) -> "Redacta <ide> """ <ide> return self._redact(item, name, depth=0) <ide> <del> def add_mask(self, secret: Union[str, dict, Iterable], name: str = None): <add> def add_mask(self, secret: Union[str, dict, Iterable], name: Optional[str] = None): <ide> """Add a new secret to be masked to this filter instance.""" <ide> from airflow.configuration import conf <ide> <ide><path>airflow/utils/session.py <ide> @contextlib.contextmanager <ide> def create_session() -> Iterator[settings.SASession]: <ide> """Contextmanager that will create and teardown a session.""" <add> if not settings.Session: <add> raise RuntimeError("Session must be set before!") <ide> session = settings.Session() <ide> try: <ide> yield session <ide><path>airflow/utils/task_group.py <ide> def __init__( <ide> raise AirflowException("TaskGroup must have a parent_group except for the root TaskGroup") <ide> self.used_group_ids = self._parent_group.used_group_ids <ide> <del> self._group_id = group_id <ide> # if given group_id already used assign suffix by incrementing largest used suffix integer <ide> # Example : task_group ==> task_group__1 -> task_group__2 -> task_group__3 <del> if group_id in self.used_group_ids: <del> if not add_suffix_on_collision: <del> raise DuplicateTaskIdFound(f"group_id '{self.group_id}' has already been added to the DAG") <del> base = re.split(r'__\d+$', group_id)[0] <del> suffixes = sorted( <del> int(re.split(r'^.+__', used_group_id)[1]) <del> for used_group_id in self.used_group_ids <del> if used_group_id is not None and re.match(rf'^{base}__\d+$', used_group_id) <del> ) <del> if not suffixes: <del> self._group_id += '__1' <del> else: <del> self._group_id = f'{base}__{suffixes[-1] + 1}' <add> self._group_id = group_id <add> self._check_for_group_id_collisions(add_suffix_on_collision) <ide> <ide> self.used_group_ids.add(self.group_id) <ide> self.used_group_ids.add(self.downstream_join_id) <ide> def __init__( <ide> self.upstream_task_ids: Set[Optional[str]] = set() <ide> self.downstream_task_ids: Set[Optional[str]] = set() <ide> <add> def _check_for_group_id_collisions(self, add_suffix_on_collision: bool): <add> if self._group_id is None: <add> return <add> # if given group_id already used assign suffix by incrementing largest used suffix integer <add> # Example : task_group ==> task_group__1 -> task_group__2 -> task_group__3 <add> if self._group_id in self.used_group_ids: <add> if not add_suffix_on_collision: <add> raise DuplicateTaskIdFound(f"group_id '{self._group_id}' has already been added to the DAG") <add> base = re.split(r'__\d+$', self._group_id)[0] <add> suffixes = sorted( <add> int(re.split(r'^.+__', used_group_id)[1]) <add> for used_group_id in self.used_group_ids <add> if used_group_id is not None and re.match(rf'^{base}__\d+$', used_group_id) <add> ) <add> if not suffixes: <add> self._group_id += '__1' <add> else: <add> self._group_id = f'{base}__{suffixes[-1] + 1}' <add> <ide> @classmethod <ide> def create_root(cls, dag: "DAG") -> "TaskGroup": <ide> """Create a root TaskGroup with no group_id or parent.""" <ide><path>airflow/utils/timezone.py <ide> def coerce_datetime(v: Optional[dt.datetime]) -> Optional[DateTime]: <ide> """Convert whatever is passed in to an timezone-aware ``pendulum.DateTime``.""" <ide> if v is None: <ide> return None <del> if v.tzinfo is None: <del> v = make_aware(v) <ide> if isinstance(v, DateTime): <del> return v <del> return pendulum.instance(v) <add> return v if v.tzinfo else make_aware(v) <add> # Only dt.datetime is left here <add> return pendulum.instance(v if v.tzinfo else make_aware(v)) <ide><path>airflow/utils/weekday.py <ide> class WeekDay(enum.IntEnum): <ide> SUNDAY = 7 <ide> <ide> @classmethod <del> def get_weekday_number(cls, week_day_str): <add> def get_weekday_number(cls, week_day_str: str): <ide> """ <ide> Return the ISO Week Day Number for a Week Day <ide> <ide> def convert(cls, day: Union[str, 'WeekDay']) -> int: <ide> return cls.get_weekday_number(week_day_str=day) <ide> <ide> @classmethod <del> def validate_week_day(cls, week_day: Union[str, 'WeekDay', Set[str], List[str]]): <add> def validate_week_day( <add> cls, week_day: Union[str, 'WeekDay', Set[str], Set['WeekDay'], List[str], List['WeekDay']] <add> ): <ide> """Validate each item of iterable and create a set to ease compare of values""" <ide> if not isinstance(week_day, Iterable): <ide> if isinstance(week_day, WeekDay): <ide><path>tests/utils/test_timezone.py <ide> import pytest <ide> <ide> from airflow.utils import timezone <add>from airflow.utils.timezone import coerce_datetime <ide> <ide> CET = pendulum.tz.timezone("Europe/Paris") <ide> EAT = pendulum.tz.timezone('Africa/Nairobi') # Africa/Nairobi <ide> def test_make_aware(self): <ide> ) <ide> with pytest.raises(ValueError): <ide> timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT) <add> <add> <add>@pytest.mark.parametrize( <add> 'input_datetime, output_datetime', <add> [ <add> pytest.param(None, None, id='None datetime'), <add> pytest.param( <add> pendulum.DateTime(2021, 11, 1), <add> pendulum.DateTime(2021, 11, 1, tzinfo=UTC), <add> id="Non aware pendulum Datetime", <add> ), <add> pytest.param( <add> pendulum.DateTime(2021, 11, 1, tzinfo=CET), <add> pendulum.DateTime(2021, 11, 1, tzinfo=CET), <add> id="Aware pendulum Datetime", <add> ), <add> pytest.param( <add> datetime.datetime(2021, 11, 1), <add> pendulum.DateTime(2021, 11, 1, tzinfo=UTC), <add> id="Non aware datetime", <add> ), <add> pytest.param( <add> datetime.datetime(2021, 11, 1, tzinfo=CET), <add> pendulum.DateTime(2021, 11, 1, tzinfo=CET), <add> id="Aware datetime", <add> ), <add> ], <add>) <add>def test_coerce_datetime(input_datetime, output_datetime): <add> assert output_datetime == coerce_datetime(input_datetime)
14
Javascript
Javascript
add tab id to tooltip
351d5f662a77b27f4ca2a79982aed8da109c9696
<ide><path>src/devtools/views/TabBar.js <ide> export default function TabBar({ <ide> <ide> if (title) { <ide> button = ( <del> <Tooltip className={tooltipStyles.Tooltip} label={title}> <add> <Tooltip key={id} className={tooltipStyles.Tooltip} label={title}> <ide> {button} <ide> </Tooltip> <ide> );
1
Text
Text
fix header on migration page
0a6e0c43d9442926691cd7a842dcf55fc555c505
<ide><path>docs/migration.md <ide> weight=79 <ide> +++ <ide> <![end-metadata]--> <ide> <del># Migrate to Engine 1.10 <add># Migrate to Engine 1.10 <ide> <ide> Starting from version 1.10 of Docker Engine, we completely change the way image <ide> data is addressed on disk. Previously, every image and layer used a randomly
1
Text
Text
fix a broken link
04d0d384357331877c2cb78132be066233ea7046
<ide><path>docs/Installation.md <ide> Uninstallation is documented in the [FAQ](FAQ.md). <ide> <a name="1"><sup>1</sup></a> Not all formulae have CPU or OS requirements, but <ide> you can assume you will have trouble if you don’t conform. Also, you can find <ide> PowerPC and Tiger branches from other users in the fork network. See <del>[Interesting Taps & Forks](Interesting-Taps-&-Forks.md). <add>[Interesting Taps and Forks](Interesting-Taps-and-Forks.md). <ide> <ide> <a name="2"><sup>2</sup></a> 10.10 or higher is recommended. 10.5–10.9 are <ide> supported on a best-effort basis. For 10.4 and 10.5, see
1
Ruby
Ruby
add tests for formula path shortucut 3
063cbe7acdb0af0a4cd9bd35f29f89bc0d638d4a
<ide><path>Library/Homebrew/rubocops/lines_cop.rb <ide> def audit_formula(_node, _class_node, _parent_class_node, body_node) <ide> problem "\"\#\{share}#{match[1]}\" should be \"\#{#{match[2]}}\"" <ide> end <ide> <del> formula_path_strings(body_node, :share) do |p| <del> if match = regex_match_group(p, %r{/(bin|include|libexec|lib|sbin|share|Frameworks)}i) <del> problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[1].downcase}}\"" <add> formula_path_strings(body_node, :prefix) do |p| <add> if match = regex_match_group(p, %r{(/share/(info|man))$}) <add> problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[2]}}\"" <ide> end <del> if match = regex_match_group(p, %r{((/share/man/|\#\{man\}/)(man[1-8]))}) <add> if match = regex_match_group(p, %r{((/share/man/)(man[1-8]))}) <ide> problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[3]}}\"" <ide> end <del> if match = regex_match_group(p, %r{(/share/(info|man))}) <del> problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[2]}}\"" <add> if match = regex_match_group(p, %r{(/(bin|include|libexec|lib|sbin|share|Frameworks))}i) <add> problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[2].downcase}}\"" <ide> end <ide> end <ide> <ide> def modifier?(node) <ide> EOS <ide> <ide> def_node_search :formula_path_strings, <<-EOS.undent <del> (dstr (begin (send nil %1)) $(str _ )) <add> {(dstr (begin (send nil %1)) $(str _ )) <add> (dstr _ (begin (send nil %1)) $(str _ ))} <ide> EOS <ide> <ide> def_node_matcher :negation?, '(send ... :!)' <ide><path>Library/Homebrew/test/rubocops/lines_cop_spec.rb <ide> def install <ide> expect_offense(expected, actual) <ide> end <ide> end <add> <add> it "with formula path shortcut long form 1" do <add> source = <<-EOS.undent <add> class Foo < Formula <add> desc "foo" <add> url 'http://example.com/foo-1.0.tgz' <add> def install <add> mv "\#{prefix}/libexec", share <add> end <add> end <add> EOS <add> <add> expected_offenses = [{ message: "\"\#\{prefix}/libexec\" should be \"\#{libexec}\"", <add> severity: :convention, <add> line: 5, <add> column: 18, <add> source: source }] <add> <add> inspect_source(cop, source) <add> <add> expected_offenses.zip(cop.offenses).each do |expected, actual| <add> expect_offense(expected, actual) <add> end <add> end <add> <add> it "with formula path shortcut long form 2" do <add> source = <<-EOS.undent <add> class Foo < Formula <add> desc "foo" <add> url 'http://example.com/foo-1.0.tgz' <add> def install <add> system "./configure", "--INFODIR=\#{prefix}/share/info" <add> end <add> end <add> EOS <add> <add> expected_offenses = [{ message: "\"\#\{prefix}/share/info\" should be \"\#{info}\"", <add> severity: :convention, <add> line: 5, <add> column: 47, <add> source: source }] <add> <add> inspect_source(cop, source) <add> <add> expected_offenses.zip(cop.offenses).each do |expected, actual| <add> expect_offense(expected, actual) <add> end <add> end <add> it "with formula path shortcut long form 3" do <add> source = <<-EOS.undent <add> class Foo < Formula <add> desc "foo" <add> url 'http://example.com/foo-1.0.tgz' <add> def install <add> system "./configure", "--MANDIR=\#{prefix}/share/man/man8" <add> end <add> end <add> EOS <add> <add> expected_offenses = [{ message: "\"\#\{prefix}/share/man/man8\" should be \"\#{man8}\"", <add> severity: :convention, <add> line: 5, <add> column: 46, <add> source: source }] <add> <add> inspect_source(cop, source) <add> <add> expected_offenses.zip(cop.offenses).each do |expected, actual| <add> expect_offense(expected, actual) <add> end <add> end <add> <ide> end <ide> def expect_offense(expected, actual) <ide> expect(actual.message).to eq(expected[:message])
2
Java
Java
rearrange order of managechildren
5f027ec64d6764fbbb9813fabb373194dec79db7
<ide><path>ReactAndroid/src/main/java/com/facebook/react/uimanager/NativeViewHierarchyManager.java <ide> public synchronized void manageChildren( <ide> } <ide> } <ide> <add> if (tagsToDelete != null) { <add> for (int i = 0; i < tagsToDelete.length; i++) { <add> int tagToDelete = tagsToDelete[i]; <add> final int indexToDelete = indicesToDelete[i]; <add> final View viewToDestroy = mTagsToViews.get(tagToDelete); <add> if (viewToDestroy == null) { <add> throw new IllegalViewOperationException( <add> "Trying to destroy unknown view tag: " <add> + tagToDelete + "\n detail: " + <add> constructManageChildrenErrorMessage( <add> viewToManage, <add> viewManager, <add> indicesToRemove, <add> viewsToAdd, <add> tagsToDelete)); <add> } <add> <add> if (mLayoutAnimationEnabled && <add> mLayoutAnimator.shouldAnimateLayout(viewToDestroy)) { <add> int updatedCount = pendingIndicesToDelete.get(indexToDelete, 0) + 1; <add> pendingIndicesToDelete.put(indexToDelete, updatedCount); <add> mLayoutAnimator.deleteView( <add> viewToDestroy, <add> new LayoutAnimationListener() { <add> @Override <add> public void onAnimationEnd() { <add> viewManager.removeView(viewToManage, viewToDestroy); <add> dropView(viewToDestroy); <add> <add> int count = pendingIndicesToDelete.get(indexToDelete, 0); <add> pendingIndicesToDelete.put(indexToDelete, Math.max(0, count - 1)); <add> } <add> }); <add> } else { <add> dropView(viewToDestroy); <add> } <add> } <add> } <add> <ide> if (viewsToAdd != null) { <ide> for (int i = 0; i < viewsToAdd.length; i++) { <ide> ViewAtIndex viewAtIndex = viewsToAdd[i]; <ide> public synchronized void manageChildren( <ide> viewManager.addView(viewToManage, viewToAdd, normalizedIndexToAdd); <ide> } <ide> } <del> <del> if (tagsToDelete != null) { <del> for (int i = 0; i < tagsToDelete.length; i++) { <del> int tagToDelete = tagsToDelete[i]; <del> final int indexToDelete = indicesToDelete[i]; <del> final View viewToDestroy = mTagsToViews.get(tagToDelete); <del> if (viewToDestroy == null) { <del> throw new IllegalViewOperationException( <del> "Trying to destroy unknown view tag: " <del> + tagToDelete + "\n detail: " + <del> constructManageChildrenErrorMessage( <del> viewToManage, <del> viewManager, <del> indicesToRemove, <del> viewsToAdd, <del> tagsToDelete)); <del> } <del> <del> if (mLayoutAnimationEnabled && <del> mLayoutAnimator.shouldAnimateLayout(viewToDestroy)) { <del> int updatedCount = pendingIndicesToDelete.get(indexToDelete, 0) + 1; <del> pendingIndicesToDelete.put(indexToDelete, updatedCount); <del> mLayoutAnimator.deleteView( <del> viewToDestroy, <del> new LayoutAnimationListener() { <del> @Override <del> public void onAnimationEnd() { <del> viewManager.removeView(viewToManage, viewToDestroy); <del> dropView(viewToDestroy); <del> <del> int count = pendingIndicesToDelete.get(indexToDelete, 0); <del> pendingIndicesToDelete.put(indexToDelete, Math.max(0, count - 1)); <del> } <del> }); <del> } else { <del> dropView(viewToDestroy); <del> } <del> } <del> } <ide> } <ide> <ide> private boolean arrayContains(@Nullable int[] array, int ele) {
1
Javascript
Javascript
improve input validation
fc7b0dda85c006e5830a0e34645d769e20b894d2
<ide><path>lib/child_process.js <ide> function _convertCustomFds(options) { <ide> function normalizeSpawnArguments(file /*, args, options*/) { <ide> var args, options; <ide> <add> if (typeof file !== 'string' || file.length === 0) <add> throw new TypeError('"file" argument must be a non-empty string'); <add> <ide> if (Array.isArray(arguments[1])) { <ide> args = arguments[1].slice(0); <ide> options = arguments[2]; <ide> function normalizeSpawnArguments(file /*, args, options*/) { <ide> else if (options === null || typeof options !== 'object') <ide> throw new TypeError('"options" argument must be an object'); <ide> <add> // Validate the cwd, if present. <add> if (options.cwd != null && <add> typeof options.cwd !== 'string') { <add> throw new TypeError('"cwd" must be a string'); <add> } <add> <add> // Validate detached, if present. <add> if (options.detached != null && <add> typeof options.detached !== 'boolean') { <add> throw new TypeError('"detached" must be a boolean'); <add> } <add> <add> // Validate the uid, if present. <add> if (options.uid != null && !Number.isInteger(options.uid)) { <add> throw new TypeError('"uid" must be an integer'); <add> } <add> <add> // Validate the gid, if present. <add> if (options.gid != null && !Number.isInteger(options.gid)) { <add> throw new TypeError('"gid" must be an integer'); <add> } <add> <add> // Validate the shell, if present. <add> if (options.shell != null && <add> typeof options.shell !== 'boolean' && <add> typeof options.shell !== 'string') { <add> throw new TypeError('"shell" must be a boolean or string'); <add> } <add> <add> // Validate argv0, if present. <add> if (options.argv0 != null && <add> typeof options.argv0 !== 'string') { <add> throw new TypeError('"argv0" must be a string'); <add> } <add> <add> // Validate windowsVerbatimArguments, if present. <add> if (options.windowsVerbatimArguments != null && <add> typeof options.windowsVerbatimArguments !== 'boolean') { <add> throw new TypeError('"windowsVerbatimArguments" must be a boolean'); <add> } <add> <ide> // Make a shallow copy so we don't clobber the user's options object. <ide> options = Object.assign({}, options); <ide> <ide> function spawnSync(/*file, args, options*/) { <ide> <ide> debug('spawnSync', opts.args, options); <ide> <add> // Validate the timeout, if present. <add> if (options.timeout != null && <add> !(Number.isInteger(options.timeout) && options.timeout >= 0)) { <add> throw new TypeError('"timeout" must be an unsigned integer'); <add> } <add> <add> // Validate maxBuffer, if present. <add> if (options.maxBuffer != null && <add> !(Number.isInteger(options.maxBuffer) && options.maxBuffer >= 0)) { <add> throw new TypeError('"maxBuffer" must be an unsigned integer'); <add> } <add> <ide> options.file = opts.file; <ide> options.args = opts.args; <ide> options.envPairs = opts.envPairs; <ide> <del> if (options.killSignal) <add> // Validate the kill signal, if present. <add> if (typeof options.killSignal === 'string' || <add> typeof options.killSignal === 'number') { <ide> options.killSignal = lookupSignal(options.killSignal); <ide> <add> if (options.killSignal === 0) <add> throw new RangeError('"killSignal" cannot be 0'); <add> } else if (options.killSignal != null) { <add> throw new TypeError('"killSignal" must be a string or number'); <add> } <add> <ide> options.stdio = _validateStdio(options.stdio || 'pipe', true).stdio; <ide> <ide> if (options.input) { <ide><path>test/parallel/test-child-process-spawn-typeerror.js <ide> const cmd = common.isWindows ? 'rundll32' : 'ls'; <ide> const invalidcmd = 'hopefully_you_dont_have_this_on_your_machine'; <ide> const invalidArgsMsg = /Incorrect value of args option/; <ide> const invalidOptionsMsg = /"options" argument must be an object/; <add>const invalidFileMsg = <add> /^TypeError: "file" argument must be a non-empty string$/; <ide> const empty = common.fixturesDir + '/empty.js'; <ide> <ide> assert.throws(function() { <ide> assert.doesNotThrow(function() { <ide> // verify that invalid argument combinations throw <ide> assert.throws(function() { <ide> spawn(); <del>}, /Bad argument/); <add>}, invalidFileMsg); <add> <add>assert.throws(function() { <add> spawn(''); <add>}, invalidFileMsg); <add> <add>assert.throws(function() { <add> const file = { toString() { throw new Error('foo'); } }; <add> spawn(file); <add>}, invalidFileMsg); <ide> <ide> assert.throws(function() { <ide> spawn(cmd, null); <ide><path>test/parallel/test-child-process-spawnsync-validation-errors.js <add>'use strict'; <add>const common = require('../common'); <add>const assert = require('assert'); <add>const spawnSync = require('child_process').spawnSync; <add>const noop = function() {}; <add> <add>function pass(option, value) { <add> // Run the command with the specified option. Since it's not a real command, <add> // spawnSync() should run successfully but return an ENOENT error. <add> const child = spawnSync('not_a_real_command', { [option]: value }); <add> <add> assert.strictEqual(child.error.code, 'ENOENT'); <add>} <add> <add>function fail(option, value, message) { <add> assert.throws(() => { <add> spawnSync('not_a_real_command', { [option]: value }); <add> }, message); <add>} <add> <add>{ <add> // Validate the cwd option <add> const err = /^TypeError: "cwd" must be a string$/; <add> <add> pass('cwd', undefined); <add> pass('cwd', null); <add> pass('cwd', __dirname); <add> fail('cwd', 0, err); <add> fail('cwd', 1, err); <add> fail('cwd', true, err); <add> fail('cwd', false, err); <add> fail('cwd', [], err); <add> fail('cwd', {}, err); <add> fail('cwd', noop, err); <add>} <add> <add>{ <add> // Validate the detached option <add> const err = /^TypeError: "detached" must be a boolean$/; <add> <add> pass('detached', undefined); <add> pass('detached', null); <add> pass('detached', true); <add> pass('detached', false); <add> fail('detached', 0, err); <add> fail('detached', 1, err); <add> fail('detached', __dirname, err); <add> fail('detached', [], err); <add> fail('detached', {}, err); <add> fail('detached', noop, err); <add>} <add> <add>if (!common.isWindows) { <add> { <add> // Validate the uid option <add> if (process.getuid() !== 0) { <add> const err = /^TypeError: "uid" must be an integer$/; <add> <add> pass('uid', undefined); <add> pass('uid', null); <add> pass('uid', process.getuid()); <add> fail('uid', __dirname, err); <add> fail('uid', true, err); <add> fail('uid', false, err); <add> fail('uid', [], err); <add> fail('uid', {}, err); <add> fail('uid', noop, err); <add> fail('uid', NaN, err); <add> fail('uid', Infinity, err); <add> fail('uid', 3.1, err); <add> fail('uid', -3.1, err); <add> } <add> } <add> <add> { <add> // Validate the gid option <add> if (process.getgid() !== 0) { <add> const err = /^TypeError: "gid" must be an integer$/; <add> <add> pass('gid', undefined); <add> pass('gid', null); <add> pass('gid', process.getgid()); <add> fail('gid', __dirname, err); <add> fail('gid', true, err); <add> fail('gid', false, err); <add> fail('gid', [], err); <add> fail('gid', {}, err); <add> fail('gid', noop, err); <add> fail('gid', NaN, err); <add> fail('gid', Infinity, err); <add> fail('gid', 3.1, err); <add> fail('gid', -3.1, err); <add> } <add> } <add>} <add> <add>{ <add> // Validate the shell option <add> const err = /^TypeError: "shell" must be a boolean or string$/; <add> <add> pass('shell', undefined); <add> pass('shell', null); <add> pass('shell', false); <add> fail('shell', 0, err); <add> fail('shell', 1, err); <add> fail('shell', [], err); <add> fail('shell', {}, err); <add> fail('shell', noop, err); <add>} <add> <add>{ <add> // Validate the argv0 option <add> const err = /^TypeError: "argv0" must be a string$/; <add> <add> pass('argv0', undefined); <add> pass('argv0', null); <add> pass('argv0', 'myArgv0'); <add> fail('argv0', 0, err); <add> fail('argv0', 1, err); <add> fail('argv0', true, err); <add> fail('argv0', false, err); <add> fail('argv0', [], err); <add> fail('argv0', {}, err); <add> fail('argv0', noop, err); <add>} <add> <add>{ <add> // Validate the windowsVerbatimArguments option <add> const err = /^TypeError: "windowsVerbatimArguments" must be a boolean$/; <add> <add> pass('windowsVerbatimArguments', undefined); <add> pass('windowsVerbatimArguments', null); <add> pass('windowsVerbatimArguments', true); <add> pass('windowsVerbatimArguments', false); <add> fail('windowsVerbatimArguments', 0, err); <add> fail('windowsVerbatimArguments', 1, err); <add> fail('windowsVerbatimArguments', __dirname, err); <add> fail('windowsVerbatimArguments', [], err); <add> fail('windowsVerbatimArguments', {}, err); <add> fail('windowsVerbatimArguments', noop, err); <add>} <add> <add>{ <add> // Validate the timeout option <add> const err = /^TypeError: "timeout" must be an unsigned integer$/; <add> <add> pass('timeout', undefined); <add> pass('timeout', null); <add> pass('timeout', 1); <add> pass('timeout', 0); <add> fail('timeout', -1, err); <add> fail('timeout', true, err); <add> fail('timeout', false, err); <add> fail('timeout', __dirname, err); <add> fail('timeout', [], err); <add> fail('timeout', {}, err); <add> fail('timeout', noop, err); <add> fail('timeout', NaN, err); <add> fail('timeout', Infinity, err); <add> fail('timeout', 3.1, err); <add> fail('timeout', -3.1, err); <add>} <add> <add>{ <add> // Validate the maxBuffer option <add> const err = /^TypeError: "maxBuffer" must be an unsigned integer$/; <add> <add> pass('maxBuffer', undefined); <add> pass('maxBuffer', null); <add> pass('maxBuffer', 1); <add> pass('maxBuffer', 0); <add> fail('maxBuffer', 3.14, err); <add> fail('maxBuffer', -1, err); <add> fail('maxBuffer', NaN, err); <add> fail('maxBuffer', Infinity, err); <add> fail('maxBuffer', true, err); <add> fail('maxBuffer', false, err); <add> fail('maxBuffer', __dirname, err); <add> fail('maxBuffer', [], err); <add> fail('maxBuffer', {}, err); <add> fail('maxBuffer', noop, err); <add>} <add> <add>{ <add> // Validate the killSignal option <add> const typeErr = /^TypeError: "killSignal" must be a string or number$/; <add> const rangeErr = /^RangeError: "killSignal" cannot be 0$/; <add> const unknownSignalErr = /^Error: Unknown signal:/; <add> <add> pass('killSignal', undefined); <add> pass('killSignal', null); <add> pass('killSignal', 'SIGKILL'); <add> pass('killSignal', 500); <add> fail('killSignal', 0, rangeErr); <add> fail('killSignal', 'SIGNOTAVALIDSIGNALNAME', unknownSignalErr); <add> fail('killSignal', true, typeErr); <add> fail('killSignal', false, typeErr); <add> fail('killSignal', [], typeErr); <add> fail('killSignal', {}, typeErr); <add> fail('killSignal', noop, typeErr); <add>}
3
Javascript
Javascript
fix resolution of outer props with react.memo()
0c7189d92370717ce8c48cf4846b3a101056e772
<ide><path>packages/react-reconciler/src/ReactFiberBeginWork.js <ide> function updateMemoComponent( <ide> ): null | Fiber { <ide> if (current === null) { <ide> let type = Component.type; <del> if (isSimpleFunctionComponent(type) && Component.compare === null) { <add> if ( <add> isSimpleFunctionComponent(type) && <add> Component.compare === null && <add> // SimpleMemoComponent codepath doesn't resolve outer props either. <add> Component.defaultProps === undefined <add> ) { <ide> // If this is a plain function component without default props, <ide> // and with only the default shallow comparison, we upgrade it <ide> // to a SimpleMemoComponent to allow fast path updates. <ide> function beginWork( <ide> case MemoComponent: { <ide> const type = workInProgress.type; <ide> const unresolvedProps = workInProgress.pendingProps; <del> const resolvedProps = resolveDefaultProps(type.type, unresolvedProps); <add> // Resolve outer props first, then resolve inner props. <add> let resolvedProps = resolveDefaultProps(type, unresolvedProps); <add> resolvedProps = resolveDefaultProps(type.type, resolvedProps); <ide> return updateMemoComponent( <ide> current, <ide> workInProgress, <ide><path>packages/react-reconciler/src/__tests__/ReactLazy-test.internal.js <ide> describe('ReactLazy', () => { <ide> expect(root).toMatchRenderedOutput('FooBar'); <ide> expect(ref.current).not.toBe(null); <ide> }); <add> <add> // Regression test for #14310 <add> it('supports defaultProps defined on the memo() return value', async () => { <add> const Add = React.memo(props => { <add> return props.inner + props.outer; <add> }); <add> Add.defaultProps = { <add> inner: 2, <add> }; <add> const LazyAdd = lazy(() => fakeImport(Add)); <add> const root = ReactTestRenderer.create( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={2} /> <add> </Suspense>, <add> { <add> unstable_isConcurrent: true, <add> }, <add> ); <add> expect(root).toFlushAndYield(['Loading...']); <add> expect(root).toMatchRenderedOutput(null); <add> <add> // Mount <add> await Promise.resolve(); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('4'); <add> <add> // Update (shallowly equal) <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={2} /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('4'); <add> <add> // Update <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={3} /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('5'); <add> <add> // Update (shallowly equal) <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={3} /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('5'); <add> <add> // Update (explicit props) <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={1} inner={1} /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('2'); <add> <add> // Update (explicit props, shallowly equal) <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={1} inner={1} /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('2'); <add> <add> // Update <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={1} /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('3'); <add> }); <add> <add> it('merges defaultProps in the correct order', async () => { <add> let Add = React.memo(props => { <add> return props.inner + props.outer; <add> }); <add> Add.defaultProps = { <add> inner: 100, <add> }; <add> Add = React.memo(Add); <add> Add.defaultProps = { <add> inner: 2, <add> outer: 0, <add> }; <add> const LazyAdd = lazy(() => fakeImport(Add)); <add> const root = ReactTestRenderer.create( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={2} /> <add> </Suspense>, <add> { <add> unstable_isConcurrent: true, <add> }, <add> ); <add> expect(root).toFlushAndYield(['Loading...']); <add> expect(root).toMatchRenderedOutput(null); <add> <add> // Mount <add> await Promise.resolve(); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('4'); <add> <add> // Update <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd outer={3} /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('5'); <add> <add> // Update <add> root.update( <add> <Suspense fallback={<Text text="Loading..." />}> <add> <LazyAdd /> <add> </Suspense>, <add> ); <add> root.unstable_flushAll(); <add> expect(root).toMatchRenderedOutput('2'); <add> }); <ide> });
2
Ruby
Ruby
fix variable in pythonpath
431f23db32e29827252d1bbdc809ec72039b3749
<ide><path>Library/Homebrew/requirements/python_requirement.rb <ide> def pour_bottle? <ide> end <ide> <ide> if python_binary == "python" <del> ENV["PYTHONPATH"] = "#{HOMEBREW_PREFIX}/lib/python#{version}/site-packages" <add> ENV["PYTHONPATH"] = "#{HOMEBREW_PREFIX}/lib/python#{short_version}/site-packages" <ide> end <ide> end <ide>
1
PHP
PHP
make an eloquent collection
0fdd6241f236508d24b3c09d11d054056b45254f
<ide><path>src/Illuminate/Database/Eloquent/FactoryBuilder.php <ide> public function make(array $attributes = array()) <ide> $results[] = $this->makeInstance($attributes); <ide> } <ide> <del> return collect($results); <add> return new Collection($results); <ide> } <ide> } <ide>
1
Ruby
Ruby
fix sparkle detection if content type is missing
71759035dd82b40b7ad8a4a1f30e917b4306e6c8
<ide><path>Library/Homebrew/livecheck/strategy/sparkle.rb <ide> def self.match?(url) <ide> xml = url.end_with?(".xml") <ide> xml ||= begin <ide> headers = Strategy.page_headers(url) <del> content_type = headers["content-type"]&.split(";", 2)&.first <del> ["application/xml", "text/xml"].include?(content_type) <add> content_type = headers["content-type"] <add> content_type.blank? || content_type.include?("xml") <ide> end <ide> return false unless xml <ide>
1
Python
Python
fix missing seedsequence import
8a0c54ac0442dd1bf7d60f3ea5e2c578ae71959f
<ide><path>numpy/random/tests/test_direct.py <ide> assert_raises) <ide> import pytest <ide> <del>from numpy.random import (Generator, MT19937, PCG64, Philox, RandomState) <add>from numpy.random import ( <add> Generator, MT19937, PCG64, Philox, RandomState, SeedSequence <add>) <ide> from numpy.random.common import interface <ide> <ide> try:
1
Mixed
Go
add wildcard support to copy/add
acd40d50799a117c4e7c318abdbc8c14b9eb1632
<ide><path>builder/internals.go <ide> func (b *Builder) commit(id string, autoCmd []string, comment string) error { <ide> type copyInfo struct { <ide> origPath string <ide> destPath string <del> hashPath string <add> hash string <ide> decompress bool <ide> tmpDir string <ide> } <ide> func (b *Builder) runContextCommand(args []string, allowRemote bool, allowDecomp <ide> <ide> dest := args[len(args)-1] // last one is always the dest <ide> <del> if len(args) > 2 && dest[len(dest)-1] != '/' { <del> return fmt.Errorf("When using %s with more than one source file, the destination must be a directory and end with a /", cmdName) <del> } <del> <del> copyInfos := make([]copyInfo, len(args)-1) <del> hasHash := false <del> srcPaths := "" <del> origPaths := "" <add> copyInfos := []*copyInfo{} <ide> <ide> b.Config.Image = b.image <ide> <ide> func (b *Builder) runContextCommand(args []string, allowRemote bool, allowDecomp <ide> // Loop through each src file and calculate the info we need to <ide> // do the copy (e.g. hash value if cached). Don't actually do <ide> // the copy until we've looked at all src files <del> for i, orig := range args[0 : len(args)-1] { <del> ci := &copyInfos[i] <del> ci.origPath = orig <del> ci.destPath = dest <del> ci.decompress = true <del> <del> err := calcCopyInfo(b, cmdName, ci, allowRemote, allowDecompression) <add> for _, orig := range args[0 : len(args)-1] { <add> err := calcCopyInfo(b, cmdName, &copyInfos, orig, dest, allowRemote, allowDecompression) <ide> if err != nil { <ide> return err <ide> } <add> } <ide> <del> origPaths += " " + ci.origPath // will have leading space <del> if ci.hashPath == "" { <del> srcPaths += " " + ci.origPath // note leading space <del> } else { <del> srcPaths += " " + ci.hashPath // note leading space <del> hasHash = true <add> if len(copyInfos) == 0 { <add> return fmt.Errorf("No source files were specified") <add> } <add> <add> if len(copyInfos) > 1 && !strings.HasSuffix(dest, "/") { <add> return fmt.Errorf("When using %s with more than one source file, the destination must be a directory and end with a /", cmdName) <add> } <add> <add> // For backwards compat, if there's just one CI then use it as the <add> // cache look-up string, otherwise hash 'em all into one <add> var srcHash string <add> var origPaths string <add> <add> if len(copyInfos) == 1 { <add> srcHash = copyInfos[0].hash <add> origPaths = copyInfos[0].origPath <add> } else { <add> var hashs []string <add> var origs []string <add> for _, ci := range copyInfos { <add> hashs = append(hashs, ci.hash) <add> origs = append(origs, ci.origPath) <ide> } <add> hasher := sha256.New() <add> hasher.Write([]byte(strings.Join(hashs, ","))) <add> srcHash = "multi:" + hex.EncodeToString(hasher.Sum(nil)) <add> origPaths = strings.Join(origs, " ") <ide> } <ide> <ide> cmd := b.Config.Cmd <del> b.Config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) %s%s in %s", cmdName, srcPaths, dest)} <add> b.Config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) %s %s in %s", cmdName, srcHash, dest)} <ide> defer func(cmd []string) { b.Config.Cmd = cmd }(cmd) <ide> <ide> hit, err := b.probeCache() <ide> if err != nil { <ide> return err <ide> } <ide> // If we do not have at least one hash, never use the cache <del> if hit && hasHash { <add> if hit && b.UtilizeCache { <ide> return nil <ide> } <ide> <ide> func (b *Builder) runContextCommand(args []string, allowRemote bool, allowDecomp <ide> } <ide> } <ide> <del> if err := b.commit(container.ID, cmd, fmt.Sprintf("%s%s in %s", cmdName, origPaths, dest)); err != nil { <add> if err := b.commit(container.ID, cmd, fmt.Sprintf("%s %s in %s", cmdName, origPaths, dest)); err != nil { <ide> return err <ide> } <ide> return nil <ide> } <ide> <del>func calcCopyInfo(b *Builder, cmdName string, ci *copyInfo, allowRemote bool, allowDecompression bool) error { <del> var ( <del> remoteHash string <del> isRemote bool <del> ) <add>func calcCopyInfo(b *Builder, cmdName string, cInfos *[]*copyInfo, origPath string, destPath string, allowRemote bool, allowDecompression bool) error { <add> <add> if origPath != "" && origPath[0] == '/' && len(origPath) > 1 { <add> origPath = origPath[1:] <add> } <add> origPath = strings.TrimPrefix(origPath, "./") <ide> <del> saveOrig := ci.origPath <del> isRemote = utils.IsURL(ci.origPath) <add> // In the remote/URL case, download it and gen its hashcode <add> if utils.IsURL(origPath) { <add> if !allowRemote { <add> return fmt.Errorf("Source can't be a URL for %s", cmdName) <add> } <add> <add> ci := copyInfo{} <add> ci.origPath = origPath <add> ci.hash = origPath // default to this but can change <add> ci.destPath = destPath <add> ci.decompress = false <add> *cInfos = append(*cInfos, &ci) <ide> <del> if isRemote && !allowRemote { <del> return fmt.Errorf("Source can't be an URL for %s", cmdName) <del> } else if isRemote { <ide> // Initiate the download <ide> resp, err := utils.Download(ci.origPath) <ide> if err != nil { <ide> func calcCopyInfo(b *Builder, cmdName string, ci *copyInfo, allowRemote bool, al <ide> <ide> ci.origPath = path.Join(filepath.Base(tmpDirName), filepath.Base(tmpFileName)) <ide> <del> // Process the checksum <del> r, err := archive.Tar(tmpFileName, archive.Uncompressed) <del> if err != nil { <del> return err <del> } <del> tarSum, err := tarsum.NewTarSum(r, true, tarsum.Version0) <del> if err != nil { <del> return err <del> } <del> if _, err := io.Copy(ioutil.Discard, tarSum); err != nil { <del> return err <del> } <del> remoteHash = tarSum.Sum(nil) <del> r.Close() <del> <ide> // If the destination is a directory, figure out the filename. <ide> if strings.HasSuffix(ci.destPath, "/") { <del> u, err := url.Parse(saveOrig) <add> u, err := url.Parse(origPath) <ide> if err != nil { <ide> return err <ide> } <ide> func calcCopyInfo(b *Builder, cmdName string, ci *copyInfo, allowRemote bool, al <ide> } <ide> ci.destPath = ci.destPath + filename <ide> } <add> <add> // Calc the checksum, only if we're using the cache <add> if b.UtilizeCache { <add> r, err := archive.Tar(tmpFileName, archive.Uncompressed) <add> if err != nil { <add> return err <add> } <add> tarSum, err := tarsum.NewTarSum(r, true, tarsum.Version0) <add> if err != nil { <add> return err <add> } <add> if _, err := io.Copy(ioutil.Discard, tarSum); err != nil { <add> return err <add> } <add> ci.hash = tarSum.Sum(nil) <add> r.Close() <add> } <add> <add> return nil <add> } <add> <add> // Deal with wildcards <add> if ContainsWildcards(origPath) { <add> for _, fileInfo := range b.context.GetSums() { <add> if fileInfo.Name() == "" { <add> continue <add> } <add> match, _ := path.Match(origPath, fileInfo.Name()) <add> if !match { <add> continue <add> } <add> <add> calcCopyInfo(b, cmdName, cInfos, fileInfo.Name(), destPath, allowRemote, allowDecompression) <add> } <add> return nil <ide> } <ide> <del> if err := b.checkPathForAddition(ci.origPath); err != nil { <add> // Must be a dir or a file <add> <add> if err := b.checkPathForAddition(origPath); err != nil { <ide> return err <ide> } <add> fi, _ := os.Stat(path.Join(b.contextPath, origPath)) <ide> <del> // Hash path and check the cache <del> if b.UtilizeCache { <del> var ( <del> sums = b.context.GetSums() <del> ) <add> ci := copyInfo{} <add> ci.origPath = origPath <add> ci.hash = origPath <add> ci.destPath = destPath <add> ci.decompress = allowDecompression <add> *cInfos = append(*cInfos, &ci) <ide> <del> if remoteHash != "" { <del> ci.hashPath = remoteHash <del> } else if fi, err := os.Stat(path.Join(b.contextPath, ci.origPath)); err != nil { <del> return err <del> } else if fi.IsDir() { <del> var subfiles []string <del> absOrigPath := path.Join(b.contextPath, ci.origPath) <del> <del> // Add a trailing / to make sure we only <del> // pick up nested files under the dir and <del> // not sibling files of the dir that just <del> // happen to start with the same chars <del> if !strings.HasSuffix(absOrigPath, "/") { <del> absOrigPath += "/" <del> } <del> for _, fileInfo := range sums { <del> absFile := path.Join(b.contextPath, fileInfo.Name()) <del> if strings.HasPrefix(absFile, absOrigPath) { <del> subfiles = append(subfiles, fileInfo.Sum()) <del> } <del> } <del> sort.Strings(subfiles) <del> hasher := sha256.New() <del> hasher.Write([]byte(strings.Join(subfiles, ","))) <del> ci.hashPath = "dir:" + hex.EncodeToString(hasher.Sum(nil)) <del> } else { <del> if ci.origPath[0] == '/' && len(ci.origPath) > 1 { <del> ci.origPath = ci.origPath[1:] <del> } <del> ci.origPath = strings.TrimPrefix(ci.origPath, "./") <del> // This will match on the first file in sums of the archive <del> if fis := sums.GetFile(ci.origPath); fis != nil { <del> ci.hashPath = "file:" + fis.Sum() <del> } <add> // If not using cache don't need to do anything else. <add> // If we are using a cache then calc the hash for the src file/dir <add> if !b.UtilizeCache { <add> return nil <add> } <add> <add> // Deal with the single file case <add> if !fi.IsDir() { <add> // This will match first file in sums of the archive <add> fis := b.context.GetSums().GetFile(ci.origPath) <add> if fis != nil { <add> ci.hash = "file:" + fis.Sum() <ide> } <add> return nil <add> } <add> <add> // Must be a dir <add> var subfiles []string <add> absOrigPath := path.Join(b.contextPath, ci.origPath) <ide> <add> // Add a trailing / to make sure we only pick up nested files under <add> // the dir and not sibling files of the dir that just happen to <add> // start with the same chars <add> if !strings.HasSuffix(absOrigPath, "/") { <add> absOrigPath += "/" <ide> } <ide> <del> if !allowDecompression || isRemote { <del> ci.decompress = false <add> // Need path w/o / too to find matching dir w/o trailing / <add> absOrigPathNoSlash := absOrigPath[:len(absOrigPath)-1] <add> <add> for _, fileInfo := range b.context.GetSums() { <add> absFile := path.Join(b.contextPath, fileInfo.Name()) <add> if strings.HasPrefix(absFile, absOrigPath) || absFile == absOrigPathNoSlash { <add> subfiles = append(subfiles, fileInfo.Sum()) <add> } <ide> } <add> sort.Strings(subfiles) <add> hasher := sha256.New() <add> hasher.Write([]byte(strings.Join(subfiles, ","))) <add> ci.hash = "dir:" + hex.EncodeToString(hasher.Sum(nil)) <add> <ide> return nil <ide> } <ide> <add>func ContainsWildcards(name string) bool { <add> for i := 0; i < len(name); i++ { <add> ch := name[i] <add> if ch == '\\' { <add> i++ <add> } else if ch == '*' || ch == '?' || ch == '[' { <add> return true <add> } <add> } <add> return false <add>} <add> <ide> func (b *Builder) pullImage(name string) (*imagepkg.Image, error) { <ide> remote, tag := parsers.ParseRepositoryTag(name) <ide> if tag == "" { <ide><path>docs/sources/reference/builder.md <ide> The `ADD` instruction copies new files,directories or remote file URLs to <ide> the filesystem of the container from `<src>` and add them to the at <ide> path `<dest>`. <ide> <del>Multiple <src> resource may be specified but if they are files or <add>Multiple `<src>` resource may be specified but if they are files or <ide> directories then they must be relative to the source directory that is <ide> being built (the context of the build). <ide> <del>`<dest>` is the absolute path to which the source will be copied inside the <add>Each `<src>` may contain wildcards and matching will be done using Go's <add>[filepath.Match](http://golang.org/pkg/path/filepath#Match) rules. <add>For most command line uses this should act as expected, for example: <add> <add> ADD hom* /mydir/ # adds all files starting with "hom" <add> ADD hom?.txt /mydir/ # ? is replaced with any single character <add> <add>The `<dest>` is the absolute path to which the source will be copied inside the <ide> destination container. <ide> <ide> All new files and directories are created with a UID and GID of 0. <ide> The copy obeys the following rules: <ide> will be considered a directory and the contents of `<src>` will be written <ide> at `<dest>/base(<src>)`. <ide> <del>- If multiple `<src>` resources are specified then `<dest>` must be a <del> directory, and it must end with a slash `/`. <add>- If multiple `<src>` resources are specified, either directly or due to the <add> use of a wildcard, then `<dest>` must be a directory, and it must end with <add> a slash `/`. <ide> <ide> - If `<dest>` does not end with a trailing slash, it will be considered a <ide> regular file and the contents of `<src>` will be written at `<dest>`. <ide> The `COPY` instruction copies new files,directories or remote file URLs to <ide> the filesystem of the container from `<src>` and add them to the at <ide> path `<dest>`. <ide> <del>Multiple <src> resource may be specified but if they are files or <add>Multiple `<src>` resource may be specified but if they are files or <ide> directories then they must be relative to the source directory that is being <ide> built (the context of the build). <ide> <del>`<dest>` is the absolute path to which the source will be copied inside the <add>Each `<src>` may contain wildcards and matching will be done using Go's <add>[filepath.Match](http://golang.org/pkg/path/filepath#Match) rules. <add>For most command line uses this should act as expected, for example: <add> <add> COPY hom* /mydir/ # adds all files starting with "hom" <add> COPY hom?.txt /mydir/ # ? is replaced with any single character <add> <add>The `<dest>` is the absolute path to which the source will be copied inside the <ide> destination container. <ide> <ide> All new files and directories are created with a UID and GID of 0. <ide> The copy obeys the following rules: <ide> will be considered a directory and the contents of `<src>` will be written <ide> at `<dest>/base(<src>)`. <ide> <del>- If multiple `<src>` resources are specified then `<dest>` must be a <del> directory, and it must end with a slash `/`. <add>- If multiple `<src>` resources are specified, either directly or due to the <add> use of a wildcard, then `<dest>` must be a directory, and it must end with <add> a slash `/`. <ide> <ide> - If `<dest>` does not end with a trailing slash, it will be considered a <ide> regular file and the contents of `<src>` will be written at `<dest>`. <ide><path>integration-cli/docker_cli_build_test.go <ide> func TestBuildAddMultipleFilesToFile(t *testing.T) { <ide> logDone("build - multiple add files to file") <ide> } <ide> <add>func TestBuildAddMultipleFilesToFileWild(t *testing.T) { <add> name := "testaddmultiplefilestofilewild" <add> defer deleteImages(name) <add> ctx, err := fakeContext(`FROM scratch <add> ADD file*.txt test <add> `, <add> map[string]string{ <add> "file1.txt": "test1", <add> "file2.txt": "test1", <add> }) <add> defer ctx.Close() <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> expected := "When using ADD with more than one source file, the destination must be a directory and end with a /" <add> if _, err := buildImageFromContext(name, ctx, true); err == nil || !strings.Contains(err.Error(), expected) { <add> t.Fatalf("Wrong error: (should contain \"%s\") got:\n%v", expected, err) <add> } <add> <add> logDone("build - multiple add files to file wild") <add>} <add> <ide> func TestBuildCopyMultipleFilesToFile(t *testing.T) { <ide> name := "testcopymultiplefilestofile" <ide> defer deleteImages(name) <ide> func TestBuildCopyMultipleFilesToFile(t *testing.T) { <ide> logDone("build - multiple copy files to file") <ide> } <ide> <add>func TestBuildCopyWildcard(t *testing.T) { <add> name := "testcopywildcard" <add> defer deleteImages(name) <add> server, err := fakeStorage(map[string]string{ <add> "robots.txt": "hello", <add> "index.html": "world", <add> }) <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer server.Close() <add> ctx, err := fakeContext(fmt.Sprintf(`FROM busybox <add> COPY file*.txt /tmp/ <add> RUN ls /tmp/file1.txt /tmp/file2.txt <add> RUN mkdir /tmp1 <add> COPY dir* /tmp1/ <add> RUN ls /tmp1/dirt /tmp1/nested_file /tmp1/nested_dir/nest_nest_file <add> RUN mkdir /tmp2 <add> ADD dir/*dir %s/robots.txt /tmp2/ <add> RUN ls /tmp2/nest_nest_file /tmp2/robots.txt <add> `, server.URL), <add> map[string]string{ <add> "file1.txt": "test1", <add> "file2.txt": "test2", <add> "dir/nested_file": "nested file", <add> "dir/nested_dir/nest_nest_file": "2 times nested", <add> "dirt": "dirty", <add> }) <add> defer ctx.Close() <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> id1, err := buildImageFromContext(name, ctx, true) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> // Now make sure we use a cache the 2nd time <add> id2, err := buildImageFromContext(name, ctx, true) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if id1 != id2 { <add> t.Fatal(fmt.Errorf("Didn't use the cache")) <add> } <add> <add> logDone("build - copy wild card") <add>} <add> <add>func TestBuildCopyWildcardNoFind(t *testing.T) { <add> name := "testcopywildcardnofind" <add> defer deleteImages(name) <add> ctx, err := fakeContext(`FROM busybox <add> COPY file*.txt /tmp/ <add> `, nil) <add> defer ctx.Close() <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> _, err = buildImageFromContext(name, ctx, true) <add> if err == nil { <add> t.Fatal(fmt.Errorf("Should have failed to find a file")) <add> } <add> if !strings.Contains(err.Error(), "No source files were specified") { <add> t.Fatalf("Wrong error %v, must be about no source files", err) <add> } <add> <add> logDone("build - copy wild card no find") <add>} <add> <add>func TestBuildCopyWildcardCache(t *testing.T) { <add> name := "testcopywildcardcache" <add> defer deleteImages(name) <add> server, err := fakeStorage(map[string]string{ <add> "robots.txt": "hello", <add> "index.html": "world", <add> }) <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer server.Close() <add> ctx, err := fakeContext(`FROM busybox <add> COPY file1.txt /tmp/ <add> `, <add> map[string]string{ <add> "file1.txt": "test1", <add> }) <add> defer ctx.Close() <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if err != nil { <add> t.Fatal(err) <add> } <add> id1, err := buildImageFromContext(name, ctx, true) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> // Now make sure we use a cache the 2nd time even with wild card <add> ctx2, err := fakeContext(`FROM busybox <add> COPY file*.txt /tmp/ <add> `, <add> map[string]string{ <add> "file1.txt": "test1", <add> }) <add> defer ctx2.Close() <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if err != nil { <add> t.Fatal(err) <add> } <add> id2, err := buildImageFromContext(name, ctx2, true) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if id1 != id2 { <add> t.Fatal(fmt.Errorf("Didn't use the cache")) <add> } <add> <add> logDone("build - copy wild card cache") <add>} <add> <ide> func TestBuildAddSingleFileToNonExistDir(t *testing.T) { <ide> name := "testaddsinglefiletononexistdir" <ide> defer deleteImages(name)
3
Javascript
Javascript
fix merge error
aa06e18df113d33c238a03289361d5dc6a6a1ff6
<ide><path>test/Stats.test.js <ide> describe("Stats", () => { <ide> 336, <ide> ], <ide> "emitted": true, <add> "info": Object { <add> "size": 111, <add> }, <ide> "name": "chunkB.js", <ide> "size": 111, <ide> }, <ide> describe("Stats", () => { <ide> 938, <ide> ], <ide> "emitted": true, <add> "info": Object { <add> "size": 211, <add> }, <ide> "name": "entryA.js", <ide> "size": 211, <ide> }, <ide> describe("Stats", () => { <ide> 513, <ide> ], <ide> "emitted": true, <add> "info": Object { <add> "size": 2085, <add> }, <ide> "name": "entryB.js", <ide> "size": 2085, <ide> },
1
Java
Java
move instance methods before static methods
de3e07b3203581242c701f86ffc49cd010d44fca
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/function/Configuration.java <ide> */ <ide> public interface Configuration { <ide> <add> // Instance methods <add> <add> /** <add> * Supply a {@linkplain Stream stream} of {@link HttpMessageReader}s to be used for request <add> * body conversion. <add> * @return the stream of message readers <add> */ <add> Supplier<Stream<HttpMessageReader<?>>> messageReaders(); <add> <add> /** <add> * Supply a {@linkplain Stream stream} of {@link HttpMessageWriter}s to be used for response <add> * body conversion. <add> * @return the stream of message writers <add> */ <add> Supplier<Stream<HttpMessageWriter<?>>> messageWriters(); <add> <add> /** <add> * Supply a {@linkplain Stream stream} of {@link ViewResolver}s to be used for view name <add> * resolution. <add> * @return the stream of view resolvers <add> */ <add> Supplier<Stream<ViewResolver>> viewResolvers(); <add> <ide> // Static methods <ide> <ide> /** <ide> static Builder applicationContext(ApplicationContext applicationContext) { <ide> return builder; <ide> } <ide> <del> // Instance methods <del> <del> /** <del> * Supply a {@linkplain Stream stream} of {@link HttpMessageReader}s to be used for request <del> * body conversion. <del> * @return the stream of message readers <del> */ <del> Supplier<Stream<HttpMessageReader<?>>> messageReaders(); <del> <del> /** <del> * Supply a {@linkplain Stream stream} of {@link HttpMessageWriter}s to be used for response <del> * body conversion. <del> * @return the stream of message writers <del> */ <del> Supplier<Stream<HttpMessageWriter<?>>> messageWriters(); <del> <del> /** <del> * Supply a {@linkplain Stream stream} of {@link ViewResolver}s to be used for view name <del> * resolution. <del> * @return the stream of view resolvers <del> */ <del> Supplier<Stream<ViewResolver>> viewResolvers(); <del> <ide> <ide> /** <ide> * A mutable builder for a {@link Configuration}. <ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/function/Request.java <ide> default Optional<String> pathVariable(String name) { <ide> */ <ide> Map<String, String> pathVariables(); <ide> <add> <ide> /** <ide> * Represents the headers of the HTTP request. <ide> * @see Request#headers() <ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/function/Response.java <ide> */ <ide> public interface Response<T> { <ide> <add> // Instance methods <add> <add> /** <add> * Return the status code of this response. <add> */ <add> HttpStatus statusCode(); <add> <add> /** <add> * Return the headers of this response. <add> */ <add> HttpHeaders headers(); <add> <add> /** <add> * Return the body of this response. <add> */ <add> T body(); <add> <add> /** <add> * Writes this response to the given web exchange. <add> * <add> * @param exchange the web exchange to write to <add> * @return {@code Mono<Void>} to indicate when request handling is complete <add> */ <add> Mono<Void> writeTo(ServerWebExchange exchange, Configuration configuration); <add> <ide> // Static builder methods <ide> <ide> /** <ide> static BodyBuilder unprocessableEntity() { <ide> return status(HttpStatus.UNPROCESSABLE_ENTITY); <ide> } <ide> <del> // Instance methods <del> <del> /** <del> * Return the status code of this response. <del> */ <del> HttpStatus statusCode(); <del> <del> /** <del> * Return the headers of this response. <del> */ <del> HttpHeaders headers(); <del> <del> /** <del> * Return the body of this response. <del> */ <del> T body(); <del> <del> /** <del> * Writes this response to the given web exchange. <del> * <del> * @param exchange the web exchange to write to <del> * @return {@code Mono<Void>} to indicate when request handling is complete <del> */ <del> Mono<Void> writeTo(ServerWebExchange exchange, Configuration configuration); <del> <ide> <ide> /** <ide> * Defines a builder that adds headers to the response.
3
PHP
PHP
expand help in view task
345c4e60a5cb629f6b17c37a0ef16f1f715355d5
<ide><path>src/Console/Command/Task/ViewTask.php <ide> public function getTemplate($action) { <ide> public function getOptionParser() { <ide> $parser = parent::getOptionParser(); <ide> <del> $parser->description( <del> __d('cake_console', 'Bake views for a controller, using built-in or custom templates.') <del> )->addArgument('controller', [ <add> $parser->description([ <add> __d('cake_console', 'Bake views for a controller, using built-in or custom templates.'), <add> __d('cake_console', 'You can bake all the views for a controller using: '), <add> '', <add> __d('cake_console', ' Console/cake bake view Tasks'), <add> '', <add> __d('cake_console', 'You can bake a single view using:'), <add> '', <add> __d('cake_console', ' Console/cake bake view Tasks index'), <add> '', <add> __d('cake_console', 'You can bake a single view for all controllers using:'), <add> '', <add> __d('cake_console', ' Console/cake bake view all index') <add> ])->addArgument('controller', [ <ide> 'help' => __d('cake_console', 'Name of the controller views to bake. Can be Plugin.name as a shortcut for plugin baking.') <ide> ])->addArgument('action', [ <ide> 'help' => __d('cake_console', "Will bake a single action's file. core templates are (index, add, edit, view)")
1