content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Ruby
Ruby
fix hashwithindifferentaccess#to_hash behaviour
df24b8790f22384a068fece7042f04ffd2fcb33e
<ide><path>activesupport/lib/active_support/hash_with_indifferent_access.rb <ide> def to_options!; self end <ide> <ide> # Convert to a regular hash with string keys. <ide> def to_hash <del> Hash.new(default).merge!(self) <add> _new_hash= {} <add> each do |key, value| <add> _new_hash[convert_key(key)] = convert_value(value,true) <add> end <add> Hash.new(default).merge!(_new_hash) <ide> end <ide> <ide> protected <ide> def convert_key(key) <ide> key.kind_of?(Symbol) ? key.to_s : key <ide> end <ide> <del> def convert_value(value) <add> def convert_value(value, _convert_for_to_hash = false) <ide> if value.is_a? Hash <del> value.nested_under_indifferent_access <add> _convert_for_to_hash ? value.to_hash : value.nested_under_indifferent_access <ide> elsif value.is_a?(Array) <ide> value = value.dup if value.frozen? <ide> value.map! { |e| convert_value(e) } <ide><path>activesupport/test/core_ext/hash_ext_test.rb <ide> def test_indifferent_to_hash <ide> roundtrip = mixed_with_default.with_indifferent_access.to_hash <ide> assert_equal @strings, roundtrip <ide> assert_equal '1234', roundtrip.default <add> new_to_hash = @nested_mixed.with_indifferent_access.to_hash <add> assert_not new_to_hash.instance_of?(HashWithIndifferentAccess) <add> assert_not new_to_hash["a"].instance_of?(HashWithIndifferentAccess) <add> assert_not new_to_hash["a"]["b"].instance_of?(HashWithIndifferentAccess) <ide> end <ide> <ide> def test_lookup_returns_the_same_object_that_is_stored_in_hash_indifferent_access
2
Text
Text
add ibm qiskit references
fd7da5ff8f7dabb73a3786a0951518de18388d71
<ide><path>quantum/README.md <ide> Started at https://github.com/TheAlgorithms/Python/issues/1831 <ide> * Google: https://research.google/teams/applied-science/quantum <ide> * IBM: https://qiskit.org and https://github.com/Qiskit <ide> * Rigetti: https://rigetti.com and https://github.com/rigetti <add> <add>## IBM Qiskit <add>- Start using by installing `pip install qiskit`, refer the [docs](https://qiskit.org/documentation/install.html) for more info. <add>- Tutorials & References <add> - https://github.com/Qiskit/qiskit-tutorials <add> - https://quantum-computing.ibm.com/docs/iql/first-circuit <add> - https://medium.com/qiskit/how-to-program-a-quantum-computer-982a9329ed02
1
PHP
PHP
fix non-model children support
a69db4280d39d09cf3494462d7e1d26206a86d49
<ide><path>src/Illuminate/Database/Eloquent/Model.php <ide> public function resolveSoftDeletableRouteBinding($value, $field = null) <ide> /** <ide> * Retrieve the model for a bound value. <ide> * <del> * @param Model|Relation $query <add> * @param Model|Relation $query <ide> * @param mixed $value <ide> * @param string|null $field <ide> * @return Model|Illuminate\Database\Eloquent\Relations\Relation <ide> protected function resolveChildRouteBindingQuery($childType, $value, $field) <ide> <ide> if ($relationship instanceof HasManyThrough || <ide> $relationship instanceof BelongsToMany) { <del> return $relationship->getRelated()->resolveRouteBindingQuery( <del> $relationship, $value, $relationship->getRelated()->getTable().'.'.$field); <add> $field = $relationship->getRelated()->getTable().'.'.$field; <ide> } <ide> <del> return $relationship->getRelated()->resolveRouteBindingQuery($relationship, $value, $field); <add> if ($relationship instanceof Model) { <add> return $relationship->getRelated()->resolveRouteBindingQuery($relationship, $value, $field); <add> } <add> <add> return $relationship->where($field, $value); <ide> } <ide> <ide> /**
1
Javascript
Javascript
fix subsequent enroll calls not working
734eb17e5d19145c4c5a696aa94e81c9655272f5
<ide><path>lib/timers.js <ide> exports.unenroll = util.deprecate(unenroll, <ide> // This function does not start the timer, see `active()`. <ide> // Using existing objects as timers slightly reduces object overhead. <ide> function enroll(item, msecs) { <del> item._idleTimeout = validateTimerDuration(msecs); <add> msecs = validateTimerDuration(msecs); <ide> <ide> // if this item was already in a list somewhere <ide> // then we should unenroll it from that <ide> if (item._idleNext) unenroll(item); <ide> <ide> L.init(item); <add> item._idleTimeout = msecs; <ide> } <ide> <ide> exports.enroll = util.deprecate(enroll, <ide><path>test/parallel/test-timers-enroll-second-time.js <add>'use strict'; <add> <add>const common = require('../common'); <add> <add>const assert = require('assert'); <add>const timers = require('timers'); <add> <add>const enrollObj = { <add> _onTimeout: common.mustCall(), <add>}; <add> <add>timers.enroll(enrollObj, 1); <add>assert.strictEqual(enrollObj._idleTimeout, 1); <add>timers.enroll(enrollObj, 10); <add>assert.strictEqual(enrollObj._idleTimeout, 10); <add>timers.active(enrollObj);
2
Javascript
Javascript
fix anchor links
23bf35d5ef34039fd181b3f25b973a4d30f1720d
<ide><path>web/viewer.js <ide> var PDFView = { <ide> }, <ide> <ide> getDestinationHash: function pdfViewGetDestinationHash(dest) { <add> // We add the full url for the extension so the anchor links don't come up <add> // as resource:// urls and so open in new tab/window works. <add> var url = PDFJS.isFirefoxExtension ? this.url.split('#')[0] : ''; <ide> if (typeof dest === 'string') <del> return '#' + escape(dest); <add> return url + '#' + escape(dest); <ide> if (dest instanceof Array) { <ide> var destRef = dest[0]; // see navigateTo method for dest format <ide> var pageNumber = destRef instanceof Object ? <ide> this.pagesRefMap[destRef.num + ' ' + destRef.gen + ' R'] : <ide> (destRef + 1); <ide> if (pageNumber) { <del> var pdfOpenParams = '#page=' + pageNumber; <add> var pdfOpenParams = url + '#page=' + pageNumber; <ide> var destKind = dest[1]; <ide> if ('name' in destKind && destKind.name == 'XYZ') { <ide> var scale = (dest[4] || this.currentScale);
1
Go
Go
fix unmountdevice for non-existing device
304e33a2fe2d006cc1063c8060c6e22c9ade3d59
<ide><path>graphdriver/devmapper/deviceset.go <ide> func (devices *DeviceSet) UnmountDevice(hash string, mode UnmountMode) error { <ide> defer devices.Unlock() <ide> <ide> info := devices.Devices[hash] <add> if info == nil { <add> return fmt.Errorf("UnmountDevice: no such device %s\n", hash) <add> } <ide> <ide> if mode == UnmountFloat { <ide> if info.floating {
1
Text
Text
fix repetitive word in the tutorial
3fd6db144594983cbfac4502874ba853d51439be
<ide><path>docs/tutorial/1-serialization.md <ide> Quit out of the shell... <ide> <ide> In another terminal window, we can test the server. <ide> <del>We can test our API using using [curl][curl] or [httpie][httpie]. Httpie is a user friendly http client that's written in Python. Let's install that. <add>We can test our API using [curl][curl] or [httpie][httpie]. Httpie is a user friendly http client that's written in Python. Let's install that. <ide> <ide> You can install httpie using pip: <ide>
1
PHP
PHP
trim comment bloat from route class
cf8e5e2f801f01e2f29947cae85e29e553302f2e
<ide><path>system/route.php <ide> public function call() <ide> { <ide> $response = null; <ide> <del> // ------------------------------------------------------------ <del> // If the route value is just a function, all we have to do <del> // is execute the function! There are no filters to call. <del> // ------------------------------------------------------------ <ide> if (is_callable($this->callback)) <ide> { <ide> $response = call_user_func_array($this->callback, $this->parameters); <ide> } <del> // ------------------------------------------------------------ <del> // If the route value is an array, we'll need to check it for <del> // any filters that may be attached. <del> // ------------------------------------------------------------ <add> // If the route value is an array, we'll need to check it for any filters that may be attached. <ide> elseif (is_array($this->callback)) <ide> { <ide> $response = isset($this->callback['before']) ? Route\Filter::call($this->callback['before'], array(), true) : null; <ide> <del> // ------------------------------------------------------------ <del> // We verify that the before filters did not return a response <del> // Before filters can override the request cycle to make things <del> // like authentication convenient to implement. <del> // ------------------------------------------------------------ <add> // Verify that the before filters did not return a response. Before filters can override <add> // the request cycle to make things like authentication more convenient. <ide> if (is_null($response) and isset($this->callback['do'])) <ide> { <ide> $response = call_user_func_array($this->callback['do'], $this->parameters);
1
Go
Go
enable 5 build api tests
8bc1c9c08a09b7f47d8429645a061cb29fb8118e
<ide><path>integration-cli/docker_api_build_test.go <ide> import ( <ide> <ide> func (s *DockerSuite) TestBuildApiDockerFileRemote(c *check.C) { <ide> testRequires(c, NotUserNamespace) <del> testRequires(c, DaemonIsLinux) <ide> server, err := fakeStorage(map[string]string{ <ide> "testD": `FROM busybox <ide> COPY * /tmp/ <ide> RUN find /tmp/`, <ide> } <ide> <ide> func (s *DockerSuite) TestBuildApiRemoteTarballContext(c *check.C) { <del> testRequires(c, DaemonIsLinux) <ide> buffer := new(bytes.Buffer) <ide> tw := tar.NewWriter(buffer) <ide> defer tw.Close() <ide> func (s *DockerSuite) TestBuildApiRemoteTarballContext(c *check.C) { <ide> } <ide> <ide> func (s *DockerSuite) TestBuildApiRemoteTarballContextWithCustomDockerfile(c *check.C) { <del> testRequires(c, DaemonIsLinux) <ide> buffer := new(bytes.Buffer) <ide> tw := tar.NewWriter(buffer) <ide> defer tw.Close() <ide> RUN echo 'right' <ide> } <ide> <ide> func (s *DockerSuite) TestBuildApiLowerDockerfile(c *check.C) { <del> testRequires(c, DaemonIsLinux) <ide> git, err := newFakeGit("repo", map[string]string{ <ide> "dockerfile": `FROM busybox <ide> RUN echo from dockerfile`, <ide> RUN echo from dockerfile`, <ide> } <ide> <ide> func (s *DockerSuite) TestBuildApiBuildGitWithF(c *check.C) { <del> testRequires(c, DaemonIsLinux) <ide> git, err := newFakeGit("repo", map[string]string{ <ide> "baz": `FROM busybox <ide> RUN echo from baz`,
1
Ruby
Ruby
pull another unwieldy condition into a method
4adf8f047ab30c55605e2276ab9c102aec0e71e5
<ide><path>Library/Homebrew/macos.rb <ide> def locate tool <ide> end <ide> <ide> def dev_tools_path <del> @dev_tools_path ||= \ <del> if File.exist? MacOS::CLT::STANDALONE_PKG_PATH and <del> File.exist? "#{MacOS::CLT::STANDALONE_PKG_PATH}/usr/bin/cc" and <del> File.exist? "#{MacOS::CLT::STANDALONE_PKG_PATH}/usr/bin/make" <add> @dev_tools_path ||= if tools_in_prefix? CLT::STANDALONE_PKG_PATH <ide> # In 10.9 the CLT moved from /usr into /Library/Developer/CommandLineTools. <del> Pathname.new "#{MacOS::CLT::STANDALONE_PKG_PATH}/usr/bin" <del> elsif File.exist? "/usr/bin/cc" and File.exist? "/usr/bin/make" <add> Pathname.new "#{CLT::STANDALONE_PKG_PATH}/usr/bin" <add> elsif tools_in_prefix? "/" <ide> # probably a safe enough assumption (the unix way) <ide> Pathname.new "/usr/bin" <del> # Note that the exit status of system "xcrun foo" isn't always accurate <ide> elsif not Xcode.bad_xcode_select_path? and not `/usr/bin/xcrun -find make 2>/dev/null`.empty? <add> # Note that the exit status of system "xcrun foo" isn't always accurate <ide> # Wherever "make" is there are the dev tools. <ide> Pathname.new(`/usr/bin/xcrun -find make`.chomp).dirname <ide> elsif File.exist? "#{Xcode.prefix}/usr/bin/make" <ide> def dev_tools_path <ide> end <ide> end <ide> <add> def tools_in_prefix?(prefix) <add> File.directory?(prefix) && <add> %w{cc make}.all? { |tool| File.executable? "#{prefix}/usr/bin/#{tool}" } <add> end <add> <ide> def xctoolchain_path <ide> # As of Xcode 4.3, some tools are located in the "xctoolchain" directory <ide> @xctoolchain_path ||= begin
1
Text
Text
fix typo in configuring rails guide. [ci-skip]
3a8f514ff3dcba2df48a33cf33c0d69c2651240e
<ide><path>guides/source/configuring.md <ide> The default value depends on the `config.load_defaults` target version: <ide> <ide> #### `config.active_record.yaml_column_permitted_classes` <ide> <del>Defaults to `[Symbol]`. Allows applications to include additional permitted classes to `safe_load()` on the `ActiveStorage::Coders::YamlColumn`. <add>Defaults to `[Symbol]`. Allows applications to include additional permitted classes to `safe_load()` on the `ActiveRecord::Coders::YAMLColumn`. <ide> <ide> #### `config.active_record.use_yaml_unsafe_load` <ide> <del>Defaults to `false`. Allows applications to opt into using `unsafe_load` on the `ActiveStorage::Coders::YamlColumn`. <add>Defaults to `false`. Allows applications to opt into using `unsafe_load` on the `ActiveRecord::Coders::YAMLColumn`. <ide> <ide> #### `ActiveRecord::ConnectionAdapters::Mysql2Adapter.emulate_booleans` <ide>
1
Ruby
Ruby
remove the direct url writer from resource
0f01e9ff43c1576bc91741732ca961ee3d7e0545
<ide><path>Library/Homebrew/patch.rb <ide> def inspect <ide> class LegacyPatch < ExternalPatch <ide> def initialize(strip, url) <ide> super(strip) <del> resource.url = url <add> resource.url(url) <ide> resource.download_strategy = CurlDownloadStrategy <ide> end <ide> <ide><path>Library/Homebrew/resource.rb <ide> class Resource <ide> include FileUtils <ide> <ide> attr_reader :checksum, :mirrors, :specs, :using <del> attr_writer :url, :checksum, :version <add> attr_writer :checksum, :version <ide> attr_accessor :download_strategy <ide> <ide> # Formula name must be set after the DSL, as we have no access to the <ide><path>Library/Homebrew/software_spec.rb <ide> def initialize(formula, spec) <ide> checksum, tag = spec.checksum_for(bottle_tag) <ide> <ide> filename = Filename.create(formula, tag, spec.revision) <del> @resource.url = build_url(spec.root_url, filename) <add> @resource.url(build_url(spec.root_url, filename)) <ide> @resource.download_strategy = CurlBottleDownloadStrategy <ide> @resource.version = formula.pkg_version <ide> @resource.checksum = checksum
3
Python
Python
remove useless statement in dense
ac2a7254c2c29b4c13fb97740f2aaaf46a0282dc
<ide><path>keras/layers/core.py <ide> def __init__(self, units, <ide> <ide> def build(self, input_shape): <ide> assert len(input_shape) >= 2 <del> <ide> input_dim = input_shape[-1] <del> # TODO: check last dim in input_dim <del> self.input_spec = [InputSpec(dtype=K.floatx(), <del> ndim='2+')] <ide> <ide> self.kernel = self.add_weight((input_dim, self.units), <ide> initializer=self.kernel_initializer,
1
Javascript
Javascript
remove scope reference when form is destroyed
01f50e1a7b2bff7070616494774ec493f8133204
<ide><path>src/ng/directive/form.js <ide> var formDirectiveFactory = function(isNgForm) { <ide> parentFormCtrl.$$renameControl(controller, alias); <ide> }); <ide> } <del> if (parentFormCtrl !== nullFormCtrl) { <del> formElement.on('$destroy', function() { <del> parentFormCtrl.$removeControl(controller); <del> if (alias) { <del> setter(scope, alias, undefined, alias); <del> } <del> extend(controller, nullFormCtrl); //stop propagating child destruction handlers upwards <del> }); <del> } <add> formElement.on('$destroy', function() { <add> parentFormCtrl.$removeControl(controller); <add> if (alias) { <add> setter(scope, alias, undefined, alias); <add> } <add> extend(controller, nullFormCtrl); //stop propagating child destruction handlers upwards <add> }); <ide> } <ide> }; <ide> } <ide><path>test/ng/directive/formSpec.js <ide> describe('form', function() { <ide> expect(form.alias).toBeUndefined(); <ide> }); <ide> <add> it('should remove scope reference when form with no parent form is removed from the DOM', function() { <add> var formController; <add> scope.ctrl = {}; <add> doc = $compile( <add> '<div><form name="ctrl.myForm" ng-if="formPresent">' + <add> '<input name="alias" ng-model="value" />' + <add> '</form></div>')(scope); <add> <add> scope.$digest(); <add> expect(scope.ctrl.myForm).toBeUndefined(); <add> <add> scope.$apply('formPresent = true'); <add> expect(scope.ctrl.myForm).toBeDefined(); <add> <add> formController = doc.find('form').controller('form'); <add> expect(scope.ctrl.myForm).toBe(formController); <add> <add> scope.$apply('formPresent = false'); <add> expect(scope.ctrl.myForm).toBeUndefined(); <add> }); <ide> <ide> it('should use ngForm value as form name', function() { <ide> doc = $compile(
2
Python
Python
adapt has_labels test when no labels were found
c08a1e26ab1858c7c51d123577b0de6501779f9b
<ide><path>src/transformers/trainer.py <ide> def prediction_step( <ide> Tuple[Optional[torch.Tensor], Optional[torch.Tensor], Optional[torch.Tensor]]: A tuple with the loss, <ide> logits and labels (each being optional). <ide> """ <del> has_labels = all(inputs.get(k) is not None for k in self.label_names) <add> has_labels = False if len(self.label_names) == 0 else all(inputs.get(k) is not None for k in self.label_names) <ide> inputs = self._prepare_inputs(inputs) <ide> if ignore_keys is None: <ide> if hasattr(self.model, "config"):
1
Go
Go
handle concurrent creation of default gw network
7086da757a37b4ed730b4af8f868eadff9080fb8
<ide><path>libnetwork/default_gateway.go <ide> const ( <ide> gwEPlen = 12 <ide> ) <ide> <add>var procGwNetwork = make(chan (bool), 1) <add> <ide> /* <ide> libnetwork creates a bridge network "docker_gw_bridge" for provding <ide> default gateway for the containers if none of the container's endpoints <ide> func (sb *sandbox) setupDefaultGW(srcEp *endpoint) error { <ide> return nil <ide> } <ide> <add> // Look for default gw network. In case of error (includes not found), <add> // retry and create it if needed in a serialized execution. <ide> n, err := c.NetworkByName(libnGWNetwork) <ide> if err != nil { <del> if _, ok := err.(types.NotFoundError); !ok { <del> return err <del> } <del> n, err = c.createGWNetwork() <del> if err != nil { <add> if n, err = c.defaultGwNetwork(); err != nil { <ide> return err <ide> } <ide> } <ide> func (sb *sandbox) getEPwithoutGateway() *endpoint { <ide> } <ide> return nil <ide> } <add> <add>// Looks for the default gw network and creates it if not there. <add>// Parallel executions are serialized. <add>func (c *controller) defaultGwNetwork() (Network, error) { <add> procGwNetwork <- true <add> defer func() { <-procGwNetwork }() <add> <add> n, err := c.NetworkByName(libnGWNetwork) <add> if err != nil { <add> if _, ok := err.(types.NotFoundError); ok { <add> n, err = c.createGWNetwork() <add> } <add> } <add> return n, err <add>}
1
Python
Python
set version to v3.0.0.dev8
664a3603b0313b650b3b43e2897f381f1e3598df
<ide><path>spacy/about.py <ide> # fmt: off <ide> __title__ = "spacy" <del>__version__ = "3.0.0.dev7" <add>__version__ = "3.0.0.dev8" <ide> __release__ = True <ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download" <ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json"
1
Ruby
Ruby
remove unused require
6497096b85c037f75c4f09e0af03b7abecf84bee
<ide><path>activemodel/lib/active_model/serialization.rb <ide> # frozen_string_literal: true <ide> <del>require "active_support/core_ext/hash/except" <del>require "active_support/core_ext/hash/slice" <del> <ide> module ActiveModel <ide> # == Active \Model \Serialization <ide> #
1
Ruby
Ruby
add gotcha to rdoc of collection_check_boxes
58a75ffbfe7ee622a4a8551b2bfbbe01b396135e
<ide><path>actionview/lib/action_view/helpers/form_options_helper.rb <ide> def collection_radio_buttons(object, method, collection, value_method, text_meth <ide> # collection_check_boxes(:post, :author_ids, Author.all, :id, :name_with_initial) do |b| <ide> # b.label(:"data-value" => b.value) { b.check_box + b.text } <ide> # end <add> # <add> # ==== Gotcha <add> # <add> # When no selection is made for a collection of checkboxes most <add> # web browsers will not send any value. <add> # <add> # For example, if we have a +User+ model with +category_ids+ field and we <add> # have the following code in our update action: <add> # <add> # @user.update(params[:user]) <add> # <add> # If no +category_ids+ are selected then we can safely assume this field <add> # will not be updated. <add> # <add> # This is possible thanks to a hidden field generated by the helper method <add> # for every collection of checkboxes. <add> # This hidden field is given the same field name as the checkboxes with a <add> # blank value. <add> # <add> # In the rare case you don't want this hidden field, you can pass the <add> # <tt>include_hidden: false</tt> option to the helper method. <ide> def collection_check_boxes(object, method, collection, value_method, text_method, options = {}, html_options = {}, &block) <ide> Tags::CollectionCheckBoxes.new(object, method, self, collection, value_method, text_method, options, html_options).render(&block) <ide> end
1
Python
Python
follow the same convention as the rest of the file
93f92635b69c16a79ae546e060522eb437dc1c25
<ide><path>django/db/backends/postgresql_psycopg2/base.py <ide> try: <ide> import psycopg2 as Database <ide> import psycopg2.extensions <del> import psycopg2.Error <ide> except ImportError, e: <ide> from django.core.exceptions import ImproperlyConfigured <ide> raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e) <ide> def close(self): <ide> try: <ide> self.connection.close() <ide> self.connection = None <del> except psycopg2.Error: <add> except Database.Error: <ide> # In some cases (database restart, network connection lost etc...) <ide> # the connection to the database is lost without giving Django a <ide> # notification. If we don't set self.connection to None, the error
1
Javascript
Javascript
remove unused method
51bd62d0b8e98ff982f083726a67adcd843bfdf4
<ide><path>lib/serialization/BinaryMiddleware.js <ide> const identifyNumber = n => { <ide> * @extends {SerializerMiddleware<DeserializedType, SerializedType>} <ide> */ <ide> class BinaryMiddleware extends SerializerMiddleware { <del> static optimizeSerializedData(data) { <del> const result = []; <del> const temp = []; <del> const flush = () => { <del> if (temp.length > 0) { <del> if (temp.length === 1) { <del> result.push(temp[0]); <del> } else { <del> result.push(Buffer.concat(temp)); <del> } <del> temp.length = 0; <del> } <del> }; <del> for (const item of data) { <del> if (Buffer.isBuffer(item)) { <del> temp.push(item); <del> } else { <del> flush(); <del> result.push(item); <del> } <del> } <del> flush(); <del> return result; <del> } <del> <ide> /** <ide> * @param {DeserializedType} data data <ide> * @param {Object} context context object
1
Java
Java
fix scroll events getting skipped on android
e964a7f4ef93fcadf5cfd1c269b77b13cb3be374
<ide><path>ReactAndroid/src/main/java/com/facebook/react/views/scroll/OnScrollDispatchHelper.java <ide> public class OnScrollDispatchHelper { <ide> <ide> private long mLastScrollEventTimeMs = -(MIN_EVENT_SEPARATION_MS + 1); <ide> <del> private static final float THRESHOLD = 0.1f; // Threshold for end fling <del> <ide> /** <ide> * Call from a ScrollView in onScrollChanged, returns true if this onScrollChanged is legit (not a <ide> * duplicate) and should be dispatched. <ide> public boolean onScrollChanged(int x, int y) { <ide> mPrevX != x || <ide> mPrevY != y; <ide> <del> // Skip the first calculation in each scroll <del> if (Math.abs(mXFlingVelocity) < THRESHOLD && Math.abs(mYFlingVelocity) < THRESHOLD) { <del> shouldDispatch = false; <del> } <del> <ide> if (eventTime - mLastScrollEventTimeMs != 0) { <ide> mXFlingVelocity = (float) (x - mPrevX) / (eventTime - mLastScrollEventTimeMs); <ide> mYFlingVelocity = (float) (y - mPrevY) / (eventTime - mLastScrollEventTimeMs);
1
Ruby
Ruby
remove check on sha-only change
dff0320f8cbf3593e73c5f1e2d5f49f96a05926c
<ide><path>Library/Homebrew/cask/audit.rb <ide> def check_token_conflicts? <ide> <ide> def run! <ide> check_required_stanzas <del> check_version_and_checksum <ide> check_version <ide> check_sha256 <ide> check_url <ide> def check_required_stanzas <ide> add_error "at least one activatable artifact stanza is required" if installable_artifacts.empty? <ide> end <ide> <del> def check_version_and_checksum <del> return if cask.sha256 == :no_check <del> <del> return if @cask.sourcefile_path.nil? <del> <del> tap = @cask.tap <del> return if tap.nil? <del> <del> return if commit_range.nil? <del> <del> previous_cask_contents = Git.last_revision_of_file(tap.path, @cask.sourcefile_path, before_commit: commit_range) <del> return if previous_cask_contents.empty? <del> <del> begin <del> previous_cask = CaskLoader.load(previous_cask_contents) <del> <del> return unless previous_cask.version == cask.version <del> return if previous_cask.sha256 == cask.sha256 <del> <del> add_error "only sha256 changed (see: https://github.com/Homebrew/homebrew-cask/blob/master/doc/cask_language_reference/stanzas/sha256.md)" <del> rescue CaskError => e <del> add_warning "Skipped version and checksum comparison. Reading previous version failed: #{e}" <del> end <del> end <del> <ide> def check_version <ide> return unless cask.version <ide>
1
Javascript
Javascript
apply unused translations
a888b476a904c5690a5b96b984aa698c66f47ad6
<ide><path>client/src/components/settings/DeleteModal.js <ide> function DeleteModal(props) { <ide> > <ide> <Modal.Header closeButton={true}> <ide> <Modal.Title id='modal-title'> <del> {t('settings.danger.delete')} <add> {t('settings.danger.delete-title')} <ide> </Modal.Title> <ide> </Modal.Header> <ide> <Modal.Body> <ide><path>client/src/pages/update-email.js <ide> class UpdateEmail extends Component { <ide> > <ide> {isNewEmail <ide> ? t('buttons.update-email') <del> : t('misc.verify-email')} <add> : t('buttons.verify-email')} <ide> </Button> <ide> </Form> <ide> <p className='text-center'> <ide><path>client/src/templates/Challenges/components/ResetModal.js <ide> function ResetModal({ reset, close, isOpen }) { <ide> bsStyle='danger' <ide> onClick={withActions(reset, close)} <ide> > <del> {t('buttons.reset')} <add> {t('buttons.reset-lesson')} <ide> </Button> <ide> </Modal.Footer> <ide> </Modal>
3
Ruby
Ruby
use array.wrap uniformly
a98db7c6ef7384d60a1c7f02d43ee601e2647eea
<ide><path>activemodel/lib/active_model/callbacks.rb <add>require 'active_support/core_ext/array/wrap' <ide> require 'active_support/callbacks' <ide> <ide> module ActiveModel <ide> def define_model_callbacks(*callbacks) <ide> options = callbacks.extract_options! <ide> options = { :terminator => "result == false", :scope => [:kind, :name] }.merge(options) <ide> <del> types = Array(options.delete(:only)) <add> types = Array.wrap(options.delete(:only)) <ide> types = [:before, :around, :after] if types.empty? <ide> <ide> callbacks.each do |callback| <ide> def _define_after_model_callback(klass, callback) #:nodoc: <ide> def self.after_#{callback}(*args, &block) <ide> options = args.extract_options! <ide> options[:prepend] = true <del> options[:if] = Array(options[:if]) << "!halted && value != false" <add> options[:if] = Array.wrap(options[:if]) << "!halted && value != false" <ide> set_callback(:#{callback}, :after, *(args << options), &block) <ide> end <ide> CALLBACK <ide> end <ide> end <del>end <ide>\ No newline at end of file <add>end <ide><path>activemodel/lib/active_model/errors.rb <add>require 'active_support/core_ext/array/wrap' <ide> require 'active_support/core_ext/string/inflections' <ide> require 'active_support/ordered_hash' <ide> <ide> def full_messages <ide> full_messages = [] <ide> <ide> each do |attribute, messages| <del> messages = Array(messages) <add> messages = Array.wrap(messages) <ide> next if messages.empty? <ide> <ide> if attribute == :base <ide><path>activemodel/lib/active_model/serializers/xml.rb <add>require 'active_support/core_ext/array/wrap' <ide> require 'active_support/core_ext/class/attribute_accessors' <ide> require 'active_support/core_ext/hash/conversions' <ide> <ide> def serializable_attributes <ide> end <ide> <ide> def serializable_method_attributes <del> Array(options[:methods]).inject([]) do |methods, name| <add> Array.wrap(options[:methods]).inject([]) do |methods, name| <ide> methods << MethodAttribute.new(name.to_s, @serializable) if @serializable.respond_to?(name.to_s) <ide> methods <ide> end <ide><path>activemodel/lib/active_model/validations.rb <ide> require 'active_support/core_ext/array/extract_options' <add>require 'active_support/core_ext/array/wrap' <ide> require 'active_support/core_ext/class/attribute' <ide> require 'active_support/core_ext/hash/keys' <ide> require 'active_model/errors' <ide> def validates_each(*attr_names, &block) <ide> def validate(*args, &block) <ide> options = args.last <ide> if options.is_a?(Hash) && options.key?(:on) <del> options[:if] = Array(options[:if]) <add> options[:if] = Array.wrap(options[:if]) <ide> options[:if] << "@_on_validate == :#{options[:on]}" <ide> end <ide> set_callback(:validate, *args, &block) <ide><path>activemodel/lib/active_model/validator.rb <add>require 'active_support/core_ext/array/wrap' <ide> require "active_support/core_ext/module/anonymous" <ide> <ide> module ActiveModel #:nodoc: <ide> class EachValidator < Validator <ide> # +options+ reader, however the <tt>:attributes</tt> option will be removed <ide> # and instead be made available through the +attributes+ reader. <ide> def initialize(options) <del> @attributes = Array(options.delete(:attributes)) <add> @attributes = Array.wrap(options.delete(:attributes)) <ide> raise ":attributes cannot be blank" if @attributes.empty? <ide> super <ide> check_validity!
5
Ruby
Ruby
fix curl_output for curl download strategy
f54de5a8481bc9dc4ab2880a69c98efc6f4a9552
<ide><path>Library/Homebrew/download_strategy.rb <ide> def _curl_download(resolved_url, to, timeout) <ide> <ide> curl_download resolved_url, to: to, try_partial: @try_partial, timeout: timeout, use_homebrew_curl: true <ide> end <add> <add> def curl_output(*args, **options) <add> raise HomebrewCurlDownloadStrategyError, url unless Formula["curl"].any_version_installed? <add> <add> options[:use_homebrew_curl] = true <add> super(*args, **options) <add> end <ide> end <ide> <ide> # Strategy for downloading a file from an GitHub Packages URL.
1
Javascript
Javascript
create benchmark test for misc and module
be3ac440dc2280fe8fed8c0da5b06b9803c70a44
<ide><path>test/parallel/test-benchmark-misc.js <add>'use strict'; <add> <add>require('../common'); <add> <add>const runBenchmark = require('../common/benchmark'); <add> <add>runBenchmark('misc', [ <add> 'n=1', <add> 'val=magyarország.icom.museum', <add> 'millions=.000001', <add> 'type=extend', <add> 'concat=0' <add>]); <ide><path>test/parallel/test-benchmark-module.js <add>'use strict'; <add> <add>require('../common'); <add> <add>const runBenchmark = require('../common/benchmark'); <add> <add>runBenchmark('module', [ <add> 'thousands=.001', <add> 'useCache=true', <add> 'fullPath=true' <add>]);
2
Python
Python
fix ticket #104
67eeb36aee72eae9a2ac42765fb11b63613f05cb
<ide><path>numpy/core/records.py <ide> def field(self,attr, val=None): <ide> else: <ide> return self.setfield(val, *res) <ide> <add> def view(self, obj): <add> try: <add> if issubclass(obj, sb.ndarray): <add> return sb.ndarray.view(self, obj) <add> except TypeError: <add> pass <add> dtype = sb.dtype(obj) <add> if dtype.fields is None: <add> return self.__array__().view(dtype) <add> return sb.ndarray.view(self, obj) <add> <add> <ide> def fromarrays(arrayList, formats=None, names=None, titles=None, shape=None, <ide> aligned=0): <ide> """ create a record array from a (flat) list of arrays
1
Java
Java
fix failing test
cf2e1ffc65a2684493cae60580fa76703fa006b7
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/server/endpoint/SpringConfiguratorTests.java <ide> public void getEndpointInstanceSingletonByType() throws Exception { <ide> <ide> @Test <ide> public void getEndpointInstanceSingletonByComponentName() throws Exception { <del> AlternativeEchoEndpoint expected = this.webAppContext.getBean(AlternativeEchoEndpoint.class); <del> AlternativeEchoEndpoint actual = this.configurator.getEndpointInstance(AlternativeEchoEndpoint.class); <add> AnotherEchoEndpoint expected = this.webAppContext.getBean(AnotherEchoEndpoint.class); <add> AnotherEchoEndpoint actual = this.configurator.getEndpointInstance(AnotherEchoEndpoint.class); <ide> assertSame(expected, actual); <ide> } <ide> <ide> public void onOpen(Session session, EndpointConfig config) { <ide> } <ide> } <ide> <del> @Component("echoEndpoint") <del> private static class AlternativeEchoEndpoint extends Endpoint { <add> @Component("myEchoEndpoint") <add> private static class AnotherEchoEndpoint extends Endpoint { <ide> <ide> @SuppressWarnings("unused") <ide> private final EchoService service; <ide> <ide> @Autowired <del> public AlternativeEchoEndpoint(EchoService service) { <add> public AnotherEchoEndpoint(EchoService service) { <ide> this.service = service; <ide> } <ide>
1
Python
Python
prepare new pypi release
b2c66816d7c5dc39e4fc434b3102f57723ff0329
<ide><path>keras/__init__.py <ide> from . import optimizers <ide> from . import regularizers <ide> <del>__version__ = '1.0.6' <add>__version__ = '1.0.7' <ide><path>setup.py <ide> <ide> <ide> setup(name='Keras', <del> version='1.0.6', <add> version='1.0.7', <ide> description='Deep Learning for Python', <ide> author='Francois Chollet', <ide> author_email='[email protected]', <ide> url='https://github.com/fchollet/keras', <del> download_url='https://github.com/fchollet/keras/tarball/1.0.6', <add> download_url='https://github.com/fchollet/keras/tarball/1.0.7', <ide> license='MIT', <ide> install_requires=['theano', 'pyyaml', 'six'], <ide> extras_require={
2
Python
Python
increase pipeline support for onnx export.
7b685f5229c858f3e0be27a927dc71a9fec00ede
<ide><path>src/transformers/convert_graph_to_onnx.py <ide> from transformers.tokenization_utils import BatchEncoding <ide> <ide> <add>SUPPORTED_PIPELINES = [ <add> "feature-extraction", <add> "ner", <add> "sentiment-analysis", <add> "fill-mask", <add> "question-answering", <add> "text-generation", <add> "translation_en_to_fr", <add> "translation_en_to_de", <add> "translation_en_to_ro", <add>] <add> <add> <ide> class OnnxConverterArgumentParser(ArgumentParser): <ide> """ <ide> Wraps all the script arguments supported to export transformers models to ONNX IR <ide> class OnnxConverterArgumentParser(ArgumentParser): <ide> def __init__(self): <ide> super(OnnxConverterArgumentParser, self).__init__("ONNX Converter") <ide> <add> self.add_argument("--pipeline", type=str, choices=SUPPORTED_PIPELINES, default="feature-extraction") <ide> self.add_argument("--model", type=str, required=True, help="Model's id or path (ex: bert-base-cased)") <ide> self.add_argument("--tokenizer", type=str, help="Tokenizer's id or path (ex: bert-base-cased)") <ide> self.add_argument("--framework", type=str, choices=["pt", "tf"], help="Framework for loading the model") <ide> def load_graph_from_args(framework: str, model: str, tokenizer: Optional[str] = <ide> print("Loading pipeline (model: {}, tokenizer: {})".format(model, tokenizer)) <ide> <ide> # Allocate tokenizer and model <del> return pipeline("feature-extraction", model=model, tokenizer=tokenizer, framework=framework) <add> return pipeline(args.pipeline, model=model, tokenizer=tokenizer, framework=framework) <ide> <ide> <ide> def convert_pytorch(nlp: Pipeline, opset: int, output: str, use_external_format: bool):
1
Javascript
Javascript
remove obsolete elements and comments
21cb28d077c166f25aa4d462b673e25073ea02bb
<ide><path>src/renderers/shaders/UniformsLib.js <ide> var UniformsLib = { <ide> } }, <ide> <ide> // TODO (abelnation): RectAreaLight BRDF data needs to be moved from example to main src <del> rectAreaLights: { type: "sa", value: [], properties: { <del> color: { type: "c" }, <del> position: { type: "v3" }, <del> width: { type: "v3" }, <del> height: { type: "v3" }, <add> rectAreaLights: { value: [], properties: { <add> color: {}, <add> position: {}, <add> width: {}, <add> height: {}, <ide> } } <del> // rectAreaLights: { type: "sa", value: [], properties: { <del> // color: { type: "c" }, <del> // position: { type: "v3" }, <del> // width: { type: "1f" }, <del> // height: { type: "1f" }, <del> // rotationMatrix: { type: "m4" } <del> // } }, <ide> <ide> }, <ide>
1
PHP
PHP
add tests for staticconfigtrait
44ed93e03818babed9c786bd11aad1aefaff470d
<ide><path>tests/TestCase/Core/StaticConfigTraitTest.php <add><?php <add>/** <add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * Redistributions of files must retain the above copyright notice. <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) <add> * @link http://cakephp.org CakePHP(tm) Project <add> * @since 3.0.0 <add> * @license http://www.opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\Test\TestCase\Core; <add> <add>use Cake\Core\StaticConfigTrait; <add>use Cake\TestSuite\TestCase; <add>use PHPUnit_Framework_Test; <add> <add>/** <add> * StaticConfigTraitTest class <add> * <add> */ <add>class StaticConfigTraitTest extends TestCase { <add> <add> public function setUp() { <add> parent::setUp(); <add> $this->subject = $this->getObjectForTrait('Cake\Core\StaticConfigTrait'); <add> } <add> <add> public function tearDown() { <add> unset($this->subject); <add> parent::tearDown(); <add> } <add> <add>/** <add> * Tests simple usage of parseDsn <add> * <add> * @return void <add> */ <add> public function testSimpleParseDsn() { <add> $klassName = get_class($this->subject); <add> <add> $this->assertInternalType('string', $klassName::parseDsn('')); <add> $this->assertEquals('', $klassName::parseDsn('')); <add> <add> $this->assertInternalType('array', $klassName::parseDsn(['key' => 'value'])); <add> $this->assertEquals(['key' => 'value'], $klassName::parseDsn(['key' => 'value'])); <add> <add> $this->assertInternalType('array', $klassName::parseDsn(['url' => 'http://:80'])); <add> $this->assertEquals(['url' => 'http://:80'], $klassName::parseDsn(['url' => 'http://:80'])); <add> <add> $this->assertInternalType('array', $klassName::parseDsn(['url' => 'http://user@:80'])); <add> $this->assertEquals(['url' => 'http://user@:80'], $klassName::parseDsn(['url' => 'http://user@:80'])); <add> <add> $dsn = 'mysql://localhost:3306/database'; <add> $expected = [ <add> 'className' => 'mysql', <add> 'driver' => 'mysql', <add> 'host' => 'localhost', <add> 'path' => '/database', <add> 'port' => 3306, <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $dsn = 'mysql://user:password@localhost:3306/database'; <add> $expected = [ <add> 'className' => 'mysql', <add> 'driver' => 'mysql', <add> 'host' => 'localhost', <add> 'password' => 'password', <add> 'path' => '/database', <add> 'port' => 3306, <add> 'username' => 'user', <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> } <add> <add>/** <add> * Tests className/driver value setting <add> * <add> * @return void <add> */ <add> public function testParseDsnClassnameDriver() { <add> $klassName = get_class($this->subject); <add> <add> <add> $dsn = 'Cake\Database\Driver\Mysql://localhost:3306/database'; <add> $expected = [ <add> 'className' => 'Cake\Database\Driver\Mysql', <add> 'driver' => 'Cake\Database\Driver\Mysql', <add> 'host' => 'localhost', <add> 'path' => '/database', <add> 'port' => 3306, <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $dsn = 'Cake\Database\Driver\Mysql://user:password@localhost:3306/database'; <add> $expected = [ <add> 'className' => 'Cake\Database\Driver\Mysql', <add> 'driver' => 'Cake\Database\Driver\Mysql', <add> 'host' => 'localhost', <add> 'password' => 'password', <add> 'path' => '/database', <add> 'port' => 3306, <add> 'username' => 'user', <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $dsn = 'Cake\Database\Driver\Mysql://localhost/database?className=Cake\Database\Connection'; <add> $expected = [ <add> 'className' => 'Cake\Database\Connection', <add> 'driver' => 'Cake\Database\Driver\Mysql', <add> 'host' => 'localhost', <add> 'path' => '/database', <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $dsn = 'Cake\Database\Driver\Mysql://localhost:3306/database?className=Cake\Database\Connection'; <add> $expected = [ <add> 'className' => 'Cake\Database\Connection', <add> 'driver' => 'Cake\Database\Driver\Mysql', <add> 'host' => 'localhost', <add> 'path' => '/database', <add> 'port' => 3306, <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $dsn = 'Cake\Database\Connection://localhost:3306/database?driver=Cake\Database\Driver\Mysql'; <add> $expected = [ <add> 'className' => 'Cake\Database\Connection', <add> 'driver' => 'Cake\Database\Driver\Mysql', <add> 'host' => 'localhost', <add> 'path' => '/database', <add> 'port' => 3306, <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> } <add> <add>/** <add> * Tests parsing querystring values <add> * <add> * @return void <add> */ <add> public function testParseDsnQuerystring() { <add> $klassName = get_class($this->subject); <add> <add> $expected = [ <add> 'className' => 'Cake\Log\Engine\FileLog', <add> 'driver' => 'Cake\Log\Engine\FileLog', <add> 'url' => 'test', <add> 'path' => '/', <add> ]; <add> $dsn = 'Cake\Log\Engine\FileLog:///?url=test'; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $expected = [ <add> 'className' => 'Cake\Log\Engine\FileLog', <add> 'driver' => 'Cake\Log\Engine\FileLog', <add> 'file' => 'debug', <add> 'path' => '/', <add> 'key' => 'value', <add> ]; <add> $dsn = 'Cake\Log\Engine\FileLog:///?file=debug&key=value'; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $expected = [ <add> 'className' => 'Cake\Log\Engine\FileLog', <add> 'driver' => 'Cake\Log\Engine\FileLog', <add> 'file' => 'debug', <add> 'path' => '/tmp', <add> 'types' => ['notice', 'info', 'debug'], <add> ]; <add> $dsn = 'Cake\Log\Engine\FileLog:///tmp?file=debug&types[]=notice&types[]=info&types[]=debug'; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $expected = [ <add> 'className' => 'Mail', <add> 'client' => null, <add> 'driver' => 'Mail', <add> 'key' => true, <add> 'key2' => false, <add> 'path' => '/', <add> 'timeout' =>'30', <add> 'tls' => null, <add> ]; <add> $dsn = 'Mail:///?timeout=30&key=true&key2=false&client=null&tls=null'; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $expected = [ <add> 'className' => 'Mail', <add> 'client' => null, <add> 'driver' => 'Mail', <add> 'host' => 'null', <add> 'key' => true, <add> 'key2' => false, <add> 'password' => 'false', <add> 'path' => '/1', <add> 'timeout' =>'30', <add> 'tls' => null, <add> 'username' => 'true', <add> ]; <add> $dsn = 'Mail://true:false@null/1?timeout=30&key=true&key2=false&client=null&tls=null'; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $expected = [ <add> 'className' => 'Mail', <add> 'client' => null, <add> 'driver' => 'Mail', <add> 'host' => 'localhost', <add> 'password' => 'secret', <add> 'port' => 25, <add> 'timeout' =>'30', <add> 'tls' => null, <add> 'username' => 'user', <add> ]; <add> $dsn = 'Mail://user:secret@localhost:25?timeout=30&client=null&tls=null'; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $dsn = 'File:///?prefix=myapp_cake_core_&serialize=true&duration=%2B2 minutes'; <add> $expected = [ <add> 'className' => 'File', <add> 'driver' => 'File', <add> 'duration' => '+2 minutes', <add> 'path' => '/', <add> 'prefix' => 'myapp_cake_core_', <add> 'serialize' => true, <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> } <add> <add>/** <add> * Tests loading a single plugin <add> * <add> * @return void <add> */ <add> public function testParseDsnPathSetting() { <add> $klassName = get_class($this->subject); <add> <add> $dsn = 'File:///'; <add> $expected = [ <add> 'className' => 'File', <add> 'driver' => 'File', <add> 'path' => '/', <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> <add> $dsn = 'File:///?path=/tmp/persistent/'; <add> $expected = [ <add> 'className' => 'File', <add> 'driver' => 'File', <add> 'path' => '/tmp/persistent/', <add> ]; <add> $this->assertEquals($expected, $klassName::parseDsn(['url' => $dsn])); <add> } <add> <add>} <add>
1
Ruby
Ruby
refer users to correct tracker for tapped brews
b5de42bfefe1ed7b06a948a2f5df06ceafccbb47
<ide><path>Library/Homebrew/exceptions.rb <ide> def dump <ide> formula_name = $1 <ide> error_line = $2 <ide> <add> path = HOMEBREW_REPOSITORY/"Library/Formula/#{formula_name}.rb" <add> if path.symlink? and path.realpath.to_s =~ %r{^#{HOMEBREW_REPOSITORY}/Library/Taps/(\w+)-(\w+)/} <add> repo = "#$1/homebrew-#$2" <add> repo_path = path.realpath.relative_path_from(HOMEBREW_REPOSITORY/"Library/Taps/#$1-#$2").parent.to_s <add> issues_url = "https://github.com/#$1/homebrew-#$2/issues/new" <add> else <add> repo = "mxcl/master" <add> repo_path = "Library/Formula" <add> issues_url = ISSUES_URL <add> end <add> <ide> ohai "Exit Status: #{e.exit_status}" <del> puts "http://github.com/mxcl/homebrew/blob/master/Library/Formula/#{formula_name}.rb#L#{error_line}" <add> puts "https://github.com/#{repo}/blob/master/#{repo_path}/#{formula_name}.rb#L#{error_line}" <ide> ohai "Environment" <ide> puts Homebrew.config_s <ide> ohai "Build Flags" <ide> def dump <ide> issues = GitHub.issues_for_formula formula_name <ide> if issues.empty? <ide> puts "If `brew doctor' does not help diagnose the issue, please report the bug:" <del> puts " #{Tty.em}#{ISSUES_URL}#{Tty.reset}" <add> puts " #{Tty.em}#{issues_url}#{Tty.reset}" <ide> else <ide> puts "These existing issues may help you:", *issues.map{ |s| " #{Tty.em}#{s}#{Tty.reset}" } <ide> puts "Otherwise, please report the bug:" <del> puts " #{Tty.em}#{ISSUES_URL}#{Tty.reset}" <add> puts " #{Tty.em}#{issues_url}#{Tty.reset}" <ide> end <ide> if e.was_running_configure? <ide> puts "We saved the configure log, please gist it if you report the issue:"
1
PHP
PHP
implement index reflection for sqlite
24b88c062f961fa8649abbc964e7bde32efc7ea6
<ide><path>lib/Cake/Database/Schema/SqliteSchema.php <ide> public function convertFieldDescription(Table $table, $row, $fieldParams = []) { <ide> * @return array An array of (sql, params) to execute. <ide> */ <ide> public function describeIndexSql($table) { <del> $sql = ''; <add> $sql = sprintf( <add> 'PRAGMA index_list(%s)', <add> $this->_driver->quoteIdentifier($table) <add> ); <ide> return [$sql, []]; <ide> } <ide> <ide> /** <ide> * Convert an index into the abstract description. <ide> * <add> * Since Sqlite does not have a way to get metadata about all indexes at once, <add> * additional queries are done here. Sqlite constraint names are not <add> * stable, and the names for constraints will not match those used to create <add> * the table. This is a limitation in Sqlite's metadata features. <add> * <ide> * @param Cake\Database\Schema\Table $table The table object to append <ide> * an index or constraint to. <ide> * @param array $row The row data from describeIndexSql <ide> * @return void <ide> */ <ide> public function convertIndexDescription(Table $table, $row) { <add> $sql = sprintf( <add> 'PRAGMA index_info(%s)', <add> $this->_driver->quoteIdentifier($row['name']) <add> ); <add> $statement = $this->_driver->prepare($sql); <add> $statement->execute(); <add> $columns = []; <add> foreach ($statement->fetchAll('assoc') as $column) { <add> $columns[] = $column['name']; <add> } <add> if ($row['unique']) { <add> $table->addConstraint($row['name'], [ <add> 'type' => 'unique', <add> 'columns' => $columns <add> ]); <add> } else { <add> $table->addIndex($row['name'], [ <add> 'type' => 'index', <add> 'columns' => $columns <add> ]); <add> } <ide> } <ide> <ide> /** <ide><path>lib/Cake/Test/TestCase/Database/Schema/SqliteSchemaTest.php <ide> protected function _createTables($connection) { <ide> body TEXT, <ide> author_id INT(11) NOT NULL, <ide> published BOOLEAN DEFAULT 0, <del>created DATETIME <del>) <add>created DATETIME, <add>CONSTRAINT "title_idx" UNIQUE ("title", "body") <add>); <ide> SQL; <ide> $connection->execute($table); <add> $connection->execute('CREATE INDEX "created_idx" ON "articles" ("created")'); <ide> } <ide> <ide> /** <ide> public function testDescribeTable() { <ide> } <ide> } <ide> <add>/** <add> * Test describing a table with indexes <add> * <add> * @return void <add> */ <add> public function testDescribeTableIndexes() { <add> $connection = new Connection(Configure::read('Datasource.test')); <add> $this->_createTables($connection); <add> <add> $schema = new SchemaCollection($connection); <add> $result = $schema->describe('articles'); <add> $this->assertInstanceOf('Cake\Database\Schema\Table', $result); <add> $expected = [ <add> 'primary' => [ <add> 'type' => 'primary', <add> 'columns' => ['id'], <add> 'length' => [] <add> ], <add> 'sqlite_autoindex_articles_1' => [ <add> 'type' => 'unique', <add> 'columns' => ['title', 'body'], <add> 'length' => [] <add> ] <add> ]; <add> $this->assertCount(2, $result->constraints()); <add> $this->assertEquals($expected['primary'], $result->constraint('primary')); <add> $this->assertEquals($expected['sqlite_autoindex_articles_1'], $result->constraint('sqlite_autoindex_articles_1')); <add> <add> $this->assertCount(1, $result->indexes()); <add> $expected = [ <add> 'type' => 'index', <add> 'columns' => ['created'], <add> 'length' => [] <add> ]; <add> $this->assertEquals($expected, $result->index('created_idx')); <add> } <add> <ide> /** <ide> * Column provider for creating column sql <ide> *
2
Javascript
Javascript
add pdfstring conversion
92a7468e9a50c11ee44ae691c2fb4f84f274c07b
<ide><path>pdf.js <ide> function stringToBytes(str) { <ide> return bytes; <ide> } <ide> <add>var PDFStringTranslateTable = [ <add> ,,,,,,,,,,,,,,,,,,,,,,,, 0x2D8, 0x2C7, 0x2C6, 0x2D9, 0x2DD, 0x2DB, 0x2DA, <add> 0x2DC,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, <add> ,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 0x2022, 0x2020, 0x2021, 0x2026, 0x2014, <add> 0x2013, 0x192, 0x2044, 0x2039, 0x203A, 0x2212, 0x2030, 0x201E, 0x201C, <add> 0x201D, 0x2018, 0x2019, 0x201A, 0x2122, 0xFB01, 0xFB02, 0x141, 0x152, <add> 0x160, 0x178, 0x17D, 0x131, 0x142, 0x153, 0x161, 0x17E,, 0x20AC <add>]; <add> <add>function stringToPDFString(str) { <add> var i, n = str.length, str2 = ''; <add> if (str[0] === '\xFE' && str[1] === '\xFF') { <add> // UTF16BE BOM <add> for (i = 2; i < n; i += 2) <add> str2 += String.fromCharCode( <add> (str.charCodeAt(i) << 8) | str.charCodeAt(i + 1)); <add> } else { <add> for (i = 0; i < n; ++i) { <add> var code = PDFStringTranslateTable[str.charCodeAt(i)]; <add> str2 += code ? String.fromCharCode(code) : str.charAt(i); <add> } <add> } <add> return str2; <add>} <add> <ide> var Stream = (function() { <ide> function constructor(arrayBuffer, start, length, dict) { <ide> this.bytes = new Uint8Array(arrayBuffer); <ide> var Catalog = (function() { <ide> return shadow(this, 'toplevelPagesDict', xrefObj); <ide> }, <ide> get documentOutline() { <del> function convertIfUnicode(str) { <del> if (str[0] === '\xFE' && str[1] === '\xFF') { <del> // UTF16BE BOM <del> var i, n = str.length, str2 = ''; <del> for (i = 2; i < n; i += 2) <del> str2 += String.fromCharCode( <del> (str.charCodeAt(i) << 8) | str.charCodeAt(i + 1)); <del> str = str2; <del> } <del> return str; <del> } <ide> var obj = this.catDict.get('Outlines'); <ide> var xref = this.xref; <ide> var root = { items: [] }; <ide> var Catalog = (function() { <ide> var title = xref.fetchIfRef(outlineDict.get('Title')); <ide> var outlineItem = { <ide> dest: dest, <del> title: convertIfUnicode(title), <add> title: stringToPDFString(title), <ide> color: outlineDict.get('C') || [0, 0, 0], <ide> count: outlineDict.get('Count'), <ide> bold: !!(outlineDict.get('F') & 2),
1
Java
Java
improve efficiency of built routerfunction
7c4f0318bb32938105335099956eccd2807ce4fb
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/RouterFunctionBuilder.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> import java.util.function.Predicate; <ide> import java.util.function.Supplier; <ide> <add>import reactor.core.publisher.Flux; <ide> import reactor.core.publisher.Mono; <ide> <ide> import org.springframework.core.io.Resource; <ide> public <T extends Throwable> RouterFunctions.Builder onError(Class<T> exceptionT <ide> <ide> @Override <ide> public RouterFunction<ServerResponse> build() { <del> RouterFunction<ServerResponse> result = this.routerFunctions.stream() <del> .reduce(RouterFunction::and) <del> .orElseThrow(IllegalStateException::new); <add> if (this.routerFunctions.isEmpty()) { <add> throw new IllegalStateException("No routes registered. Register a route with GET(), POST(), etc."); <add> } <add> RouterFunction<ServerResponse> result = new BuiltRouterFunction(this.routerFunctions); <ide> <ide> if (this.filterFunctions.isEmpty()) { <ide> return result; <ide> public RouterFunction<ServerResponse> build() { <ide> } <ide> } <ide> <add> <add> /** <add> * Router function returned by {@link #build()} that simply iterates over the registered routes. <add> */ <add> private static class BuiltRouterFunction extends RouterFunctions.AbstractRouterFunction<ServerResponse> { <add> <add> private List<RouterFunction<ServerResponse>> routerFunctions; <add> <add> public BuiltRouterFunction(List<RouterFunction<ServerResponse>> routerFunctions) { <add> Assert.notEmpty(routerFunctions, "RouterFunctions must not be empty"); <add> this.routerFunctions = routerFunctions; <add> } <add> <add> @Override <add> public Mono<HandlerFunction<ServerResponse>> route(ServerRequest request) { <add> return Flux.fromIterable(this.routerFunctions) <add> .concatMap(routerFunction -> routerFunction.route(request)) <add> .next(); <add> } <add> <add> @Override <add> public void accept(RouterFunctions.Visitor visitor) { <add> this.routerFunctions.forEach(routerFunction -> routerFunction.accept(visitor)); <add> } <add> } <add> <ide> } <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/RouterFunctions.java <ide> public interface Visitor { <ide> } <ide> <ide> <del> private abstract static class AbstractRouterFunction<T extends ServerResponse> implements RouterFunction<T> { <add> abstract static class AbstractRouterFunction<T extends ServerResponse> implements RouterFunction<T> { <ide> <ide> @Override <ide> public String toString() { <ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/function/RouterFunctionBuilder.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public RouterFunctions.Builder onError(Class<? extends Throwable> exceptionType, <ide> <ide> @Override <ide> public RouterFunction<ServerResponse> build() { <del> RouterFunction<ServerResponse> result = this.routerFunctions.stream() <del> .reduce(RouterFunction::and) <del> .orElseThrow(IllegalStateException::new); <add> if (this.routerFunctions.isEmpty()) { <add> throw new IllegalStateException("No routes registered. Register a route with GET(), POST(), etc."); <add> } <add> RouterFunction<ServerResponse> result = new BuiltRouterFunction(this.routerFunctions); <ide> <ide> if (this.filterFunctions.isEmpty()) { <ide> return result; <ide> public RouterFunction<ServerResponse> build() { <ide> } <ide> } <ide> <add> <add> /** <add> * Router function returned by {@link #build()} that simply iterates over the registered routes. <add> */ <add> private static class BuiltRouterFunction extends RouterFunctions.AbstractRouterFunction<ServerResponse> { <add> <add> private List<RouterFunction<ServerResponse>> routerFunctions; <add> <add> public BuiltRouterFunction(List<RouterFunction<ServerResponse>> routerFunctions) { <add> Assert.notEmpty(routerFunctions, "RouterFunctions must not be empty"); <add> this.routerFunctions = routerFunctions; <add> } <add> <add> @Override <add> public Optional<HandlerFunction<ServerResponse>> route(ServerRequest request) { <add> for (RouterFunction<ServerResponse> routerFunction : this.routerFunctions) { <add> Optional<HandlerFunction<ServerResponse>> result = routerFunction.route(request); <add> if (result.isPresent()) { <add> return result; <add> } <add> } <add> return Optional.empty(); <add> } <add> <add> @Override <add> public void accept(RouterFunctions.Visitor visitor) { <add> this.routerFunctions.forEach(routerFunction -> routerFunction.accept(visitor)); <add> } <add> } <add> <add> <ide> } <ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/function/RouterFunctions.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public interface Visitor { <ide> } <ide> <ide> <del> private abstract static class AbstractRouterFunction<T extends ServerResponse> implements RouterFunction<T> { <add> abstract static class AbstractRouterFunction<T extends ServerResponse> implements RouterFunction<T> { <ide> <ide> @Override <ide> public String toString() {
4
Python
Python
use the new openblas lib
7ef28b2f1c383828bebcbb87b8e487e27c6e3ff9
<ide><path>tools/openblas_support.py <ide> def download_openblas(target, plat, ilp64): <ide> suffix = 'macosx_10_9_x86_64-gf_1becaaa.tar.gz' <ide> typ = 'tar.gz' <ide> elif plat == 'macosx-arm64': <del> suffix = 'macosx_11_0_arm64-gf_f10e307.tar.gz' <add> suffix = 'macosx_11_0_arm64-gf_f26990f.tar.gz' <ide> typ = 'tar.gz' <ide> elif osname == 'win': <ide> if plat == "win-32":
1
Javascript
Javascript
use native `promise` instead of `rsvp`
3589ebbe863dd3571fac44cc1c7b9a223d470858
<ide><path>tests/node/visit-test.js <del>var RSVP = require('rsvp'); <ide> var SimpleDOM = require('simple-dom'); <ide> var appModule = require('./helpers/app-module'); <ide> <ide> QUnit.test('FastBoot: basic', function(assert) { <ide> <ide> var App = this.createApplication(); <ide> <del> return RSVP.all([ <add> return Promise.all([ <ide> fastbootVisit(App, '/a').then( <ide> assertFastbootResult(assert, { <ide> url: '/a', <ide> QUnit.test('FastBoot: redirect', function(assert) { <ide> <ide> var App = this.createApplication(); <ide> <del> return RSVP.all([ <add> return Promise.all([ <ide> fastbootVisit(App, '/a').then( <ide> assertFastbootResult(assert, { <ide> url: '/c', <ide> QUnit.test('FastBoot: attributes are sanitized', function(assert) { <ide> <ide> var App = this.createApplication(); <ide> <del> return RSVP.all([ <add> return Promise.all([ <ide> fastbootVisit(App, '/').then( <ide> assertFastbootResult(assert, { <ide> url: '/', <ide> QUnit.test('FastBoot: route error', function(assert) { <ide> <ide> var App = this.createApplication(); <ide> <del> return RSVP.all([ <add> return Promise.all([ <ide> fastbootVisit(App, '/a').then( <ide> function(instance) { <ide> assert.ok(false, 'It should not render'); <ide> QUnit.test('FastBoot: route error template', function(assert) { <ide> <ide> var App = this.createApplication(); <ide> <del> return RSVP.all([ <add> return Promise.all([ <ide> fastbootVisit(App, '/a').then( <ide> assertFastbootResult(assert, { <ide> url: '/a', <ide> QUnit.test('Resource-discovery setup', function(assert) { <ide> <ide> fetch: function(url) { <ide> this.get('requests').push(url); <del> return RSVP.resolve(); <add> return Promise.resolve(); <ide> }, <ide> }); <ide> <ide> QUnit.test('Resource-discovery setup', function(assert) { <ide> }, handleError(assert)); <ide> } <ide> <del> return RSVP.all([ <add> return Promise.all([ <ide> assertResources('/a', ['/a', '/b', '/c']), <ide> assertResources('/b', ['/b', '/c']), <ide> assertResources('/c', ['/c']), <ide> QUnit.test('FastBoot: tagless components can render', function(assert) { <ide> <ide> var App = this.createApplication(); <ide> <del> return RSVP.all([ <add> return Promise.all([ <ide> fastbootVisit(App, '/').then( <ide> assertFastbootResult(assert, { <ide> url: '/',
1
Javascript
Javascript
add cors methods and headers for lazycompilation
8e914b75209eeca0935b39bcf5a06cbc1cdb82d5
<ide><path>lib/hmr/lazyCompilationBackend.js <ide> module.exports = options => (compiler, callback) => { <ide> req.socket.setNoDelay(true); <ide> res.writeHead(200, { <ide> "content-type": "text/event-stream", <del> "Access-Control-Allow-Origin": "*" <add> "Access-Control-Allow-Origin": "*", <add> "Access-Control-Allow-Methods": "*", <add> "Access-Control-Allow-Headers": "*" <ide> }); <ide> res.write("\n"); <ide> let moduleActivated = false;
1
Java
Java
fix spelling in reactinstancemanager
13d8375323c8fac2cfa94648cdde75448faaa01e
<ide><path>ReactAndroid/src/main/java/com/facebook/react/ReactInstanceManager.java <ide> public interface ReactInstanceEventListener { <ide> Collections.synchronizedSet(new HashSet<ReactInstanceEventListener>()); <ide> // Identifies whether the instance manager is or soon will be initialized (on background thread) <ide> private volatile boolean mHasStartedCreatingInitialContext = false; <del> // Identifies whether the insance manager destroy function is in process, <add> // Identifies whether the instance manager destroy function is in process, <ide> // while true any spawned create thread should wait for proper clean up before initializing <ide> private volatile Boolean mHasStartedDestroying = false; <ide> private final UIImplementationProvider mUIImplementationProvider;
1
Ruby
Ruby
remove duplication from cleanup methods
3587dc441e0c7f649dbda67a12e4b4c87e434885
<ide><path>Library/Homebrew/cmd/cleanup.rb <ide> def cleanup_logs <ide> return unless HOMEBREW_LOGS.directory? <ide> time = Time.now - 2 * 7 * 24 * 60 * 60 # two weeks <ide> HOMEBREW_LOGS.subdirs.each do |dir| <del> if dir.mtime < time <del> if ARGV.dry_run? <del> puts "Would remove: #{dir}" <del> else <del> puts "Removing: #{dir}..." <del> dir.rmtree <del> end <del> end <add> cleanup_path(dir) { dir.rmtree } if dir.mtime < time <ide> end <ide> end <ide> <ide> def cleanup_formula f <ide> def cleanup_keg keg <ide> if keg.linked? <ide> opoo "Skipping (old) #{keg} due to it being linked" <del> elsif ARGV.dry_run? <del> puts "Would remove: #{keg} (#{keg.abv})" <ide> else <del> puts "Removing: #{keg}... (#{keg.abv})" <del> keg.uninstall <add> cleanup_path(keg) { keg.uninstall } <ide> end <ide> end <ide> <ide> def cleanup_cache <ide> end <ide> <ide> if f.version > version || ARGV.switch?('s') && !f.installed? || bottle_file_outdated?(f, file) <del> cleanup_cached_file(file) <add> cleanup_path(file) { file.unlink } <ide> end <ide> end <ide> end <ide> <del> def cleanup_cached_file file <add> def cleanup_path(path) <ide> if ARGV.dry_run? <del> puts "Would remove: #{file} (#{file.abv})" <add> puts "Would remove: #{path} (#{path.abv})" <ide> else <del> puts "Removing: #{file}... (#{file.abv})" <del> file.unlink <add> puts "Removing: #{path}... (#{path.abv})" <add> yield <ide> end <ide> end <ide>
1
Text
Text
fix duplicate words & spellings in docs
b21e3f0711ca989fe21bd4484b58d03e5693f859
<ide><path>doc/api/async_hooks.md <ide> const server = net.createServer((conn) => { <ide> <ide> ## JavaScript Embedder API <ide> <del>Library developers that handle their own asychronous resources performing tasks <add>Library developers that handle their own asynchronous resources performing tasks <ide> like I/O, connection pooling, or managing callback queues may use the `AsyncWrap` <ide> JavaScript API so that all the appropriate callbacks are called. <ide> <ide><path>doc/api/dgram.md <ide> added: v8.6.0 <ide> <ide> * `multicastInterface` {String} <ide> <del>*Note: All references to scope in this section are refering to <add>*Note: All references to scope in this section are referring to <ide> [IPv6 Zone Indices][], which are defined by [RFC 4007][]. In string form, an IP <ide> with a scope index is written as `'IP%scope'` where scope is an interface name or <ide> interface number.* <ide> packet is allowed to travel through, specifically for multicast traffic. Each <ide> router or gateway that forwards a packet decrements the TTL. If the TTL is <ide> decremented to 0 by a router, it will not be forwarded. <ide> <del>The argument passed to to `socket.setMulticastTTL()` is a number of hops <add>The argument passed to `socket.setMulticastTTL()` is a number of hops <ide> between 0 and 255. The default on most systems is `1` but can vary. <ide> <ide> ### socket.setRecvBufferSize(size) <ide><path>doc/api/esm.md <ide> export async function dynamicInstantiate(url) { <ide> ``` <ide> <ide> With the list of module exports provided upfront, the `execute` function will <del>then be called at the exact point of module evalutation order for that module <add>then be called at the exact point of module evaluation order for that module <ide> in the import tree. <ide> <ide> [Node.js EP for ES Modules]: https://github.com/nodejs/node-eps/blob/master/002-es-modules.md <ide><path>doc/api/inspector.md <ide> const inspector = require('inspector'); <ide> to false. <ide> <ide> Activate inspector on host and port. Equivalent to `node <del>--inspect=[[host:]port]`, but can be done programatically after node has <add>--inspect=[[host:]port]`, but can be done programmatically after node has <ide> started. <ide> <ide> If wait is `true`, will block until a client has connected to the inspect port <ide><path>doc/api/v8.md <ide> by subclasses. <ide> This method is called to generate error objects that will be thrown when an <ide> object can not be cloned. <ide> <del>This method defaults to the [`Error`][] constructor and can be be overridden on <add>This method defaults to the [`Error`][] constructor and can be overridden on <ide> subclasses. <ide> <ide> #### serializer.\_getSharedArrayBufferId(sharedArrayBuffer)
5
Javascript
Javascript
add spec for incrementing headings
d54f09ef29356d4c8ba71d3b37bbebd379627648
<ide><path>docs/spec/collectSpec.js <ide> describe('collect', function(){ <ide> '<p>dad<a href="#!angular.foo"><code>angular.foo</code></a></p>\n\n' + <ide> '<p><a href="#!angular.directive.ng:foo"><code>ng:foo</code></a></p>'); <ide> }); <add> <add> it('should increment all headings by one', function() { <add> TAG.description(doc, 'description', '# foo\nabc'); <add> expect(doc.description). <add> toBe('<h2>foo</h2>\n\n<p>abc</p>'); <add> }); <ide> }); <ide> <ide> describe('@example', function(){
1
Text
Text
update button labels
589c0a25dfa18c2090549cc6f5b626d69ea53c2a
<ide><path>docs/tutorial/tutorial.md <ide> Let's show the previous moves made in the game so far. We learned earlier that R <ide> <ide> const moves = history.map((step, move) => { <ide> const desc = move ? <del> 'Move #' + move : <del> 'Game start'; <add> 'Go to move #' + move : <add> 'Go to game start'; <ide> return ( <ide> <li> <ide> <button onClick={() => this.jumpTo(move)}>{desc}</button> <ide> For our move list, we already have a unique ID for each step: the number of the <ide> ```js{6} <ide> const moves = history.map((step, move) => { <ide> const desc = move ? <del> 'Move #' + move : <del> 'Game start'; <add> 'Go to move #' + move : <add> 'Go to game start'; <ide> return ( <ide> <li key={move}> <ide> <button onClick={() => this.jumpTo(move)}>{desc}</button>
1
Go
Go
ignore stat errors on volume rm
8d27417bfeff316346d00c07a456b0e1b056e788
<ide><path>daemon/delete.go <ide> func (daemon *Daemon) VolumeRm(name string) error { <ide> if err == ErrVolumeInUse { <ide> return fmt.Errorf("Conflict: %v", err) <ide> } <del> return err <add> return fmt.Errorf("Error while removing volume %s: %v", name, err) <ide> } <ide> return nil <ide> } <ide><path>volume/local/local.go <ide> func (r *Root) Create(name string, _ map[string]string) (volume.Volume, error) { <ide> func (r *Root) Remove(v volume.Volume) error { <ide> r.m.Lock() <ide> defer r.m.Unlock() <add> <ide> lv, ok := v.(*localVolume) <ide> if !ok { <ide> return errors.New("unknown volume type") <ide> } <ide> <ide> realPath, err := filepath.EvalSymlinks(lv.path) <ide> if err != nil { <del> return err <add> if !os.IsNotExist(err) { <add> return err <add> } <add> realPath = filepath.Dir(lv.path) <ide> } <add> <ide> if !r.scopedPath(realPath) { <del> return fmt.Errorf("Unable to remove a directory of out the Docker root: %s", realPath) <add> return fmt.Errorf("Unable to remove a directory of out the Docker root %s: %s", r.scope, realPath) <ide> } <ide> <del> if err := os.RemoveAll(realPath); err != nil { <add> if err := removePath(realPath); err != nil { <ide> return err <ide> } <ide> <ide> delete(r.volumes, lv.name) <del> return os.RemoveAll(filepath.Dir(lv.path)) <add> return removePath(filepath.Dir(lv.path)) <add>} <add> <add>func removePath(path string) error { <add> if err := os.RemoveAll(path); err != nil { <add> if os.IsNotExist(err) { <add> return nil <add> } <add> return err <add> } <add> return nil <ide> } <ide> <ide> // Get looks up the volume for the given name and returns it if found <ide> func (r *Root) Get(name string) (volume.Volume, error) { <ide> // is under Docker's root and the valid local paths. <ide> func (r *Root) scopedPath(realPath string) bool { <ide> // Volumes path for Docker version >= 1.7 <del> if strings.HasPrefix(realPath, filepath.Join(r.scope, volumesPathName)) { <add> if strings.HasPrefix(realPath, filepath.Join(r.scope, volumesPathName)) && realPath != filepath.Join(r.scope, volumesPathName) { <ide> return true <ide> } <ide> <ide><path>volume/local/local_test.go <add>package local <add> <add>import ( <add> "io/ioutil" <add> "os" <add> "testing" <add>) <add> <add>func TestRemove(t *testing.T) { <add> rootDir, err := ioutil.TempDir("", "local-volume-test") <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer os.RemoveAll(rootDir) <add> <add> r, err := New(rootDir) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> vol, err := r.Create("testing", nil) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if err := r.Remove(vol); err != nil { <add> t.Fatal(err) <add> } <add> <add> vol, err = r.Create("testing2", nil) <add> if err != nil { <add> t.Fatal(err) <add> } <add> if err := os.RemoveAll(vol.Path()); err != nil { <add> t.Fatal(err) <add> } <add> <add> if err := r.Remove(vol); err != nil { <add> t.Fatal(err) <add> } <add> <add> if _, err := os.Stat(vol.Path()); err != nil && !os.IsNotExist(err) { <add> t.Fatal("volume dir not removed") <add> } <add> <add> if len(r.List()) != 0 { <add> t.Fatal("expected there to be no volumes") <add> } <add>} <add> <add>func TestInitializeWithVolumes(t *testing.T) { <add> rootDir, err := ioutil.TempDir("", "local-volume-test") <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer os.RemoveAll(rootDir) <add> <add> r, err := New(rootDir) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> vol, err := r.Create("testing", nil) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> r, err = New(rootDir) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> v, err := r.Get(vol.Name()) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if v.Path() != vol.Path() { <add> t.Fatal("expected to re-initialize root with existing volumes") <add> } <add>}
3
Text
Text
remove text repetition
a43bf9f12f4facaae98cba8dec8435c60b633e09
<ide><path>guides/source/getting_started.md <ide> You're getting this error now because Rails expects plain actions like this one <ide> to have views associated with them to display their information. With no view <ide> available, Rails will raise an exception. <ide> <del>Let's look at the full error message looks again: <add>Let's look at the full error message again: <ide> <ide> >ArticlesController#new is missing a template for this request format and variant. request.formats: ["text/html"] request.variant: [] NOTE! For XHR/Ajax or API requests, this action would normally respond with 204 No Content: an empty white screen. Since you're loading it in a web browser, we assume that you expected to actually render a template, not… nothing, so we're showing an error to be extra-clear. If you expect 204 No Content, carry on. That's what you'll get from an XHR or API request. Give it a shot. <ide>
1
PHP
PHP
add some common files to the compile config
9a3704826187a2983911144c584f7acf83ffa06f
<ide><path>src/Illuminate/Foundation/Console/Optimize/config.php <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Contracts/View/View.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Contracts/Http/Kernel.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Contracts/Auth/Guard.php', <add> $basePath.'/vendor/laravel/framework/src/Illuminate/Contracts/Auth/Access/Gate.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Contracts/Hashing/Hasher.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Auth/AuthManager.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Auth/Guard.php', <add> $basePath.'/vendor/laravel/framework/src/Illuminate/Auth/Access/Gate.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Contracts/Auth/UserProvider.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Auth/EloquentUserProvider.php', <ide> $basePath.'/vendor/laravel/framework/src/Illuminate/Container/Container.php', <ide> $basePath.'/vendor/symfony/http-foundation/ResponseHeaderBag.php', <ide> $basePath.'/vendor/symfony/http-foundation/Cookie.php', <ide> $basePath.'/vendor/symfony/finder/SplFileInfo.php', <add> $basePath.'/vendor/symfony/finder/Expression/Glob.php', <ide> $basePath.'/vendor/symfony/finder/Expression/Regex.php', <ide> $basePath.'/vendor/symfony/finder/Expression/ValueInterface.php', <ide> $basePath.'/vendor/symfony/finder/Expression/Expression.php', <ide> $basePath.'/vendor/symfony/finder/Iterator/ExcludeDirectoryFilterIterator.php', <ide> $basePath.'/vendor/symfony/finder/Iterator/RecursiveDirectoryIterator.php', <ide> $basePath.'/vendor/symfony/finder/Iterator/FileTypeFilterIterator.php', <add> $basePath.'/vendor/symfony/finder/Iterator/FilenameFilterIterator.php', <ide> $basePath.'/vendor/symfony/finder/Shell/Shell.php', <ide> $basePath.'/vendor/symfony/finder/Adapter/AdapterInterface.php', <ide> $basePath.'/vendor/symfony/finder/Adapter/AbstractAdapter.php', <ide> $basePath.'/vendor/symfony/finder/Adapter/PhpAdapter.php', <ide> $basePath.'/vendor/symfony/finder/Adapter/BsdFindAdapter.php', <ide> $basePath.'/vendor/symfony/finder/Finder.php', <add> $basePath.'/vendor/symfony/finder/Glob.php', <add> $basePath.'/vendor/vlucas/phpdotenv/src/Dotenv.php', <add> $basePath.'/vendor/nesbot/carbon/src/Carbon/Carbon.php', <ide> ]);
1
Java
Java
fix nullability of statewrapper
f2fa2860d124f7691ea4879ac157c62e2d83cc03
<ide><path>ReactAndroid/src/main/java/com/facebook/react/uimanager/ViewManager.java <ide> public Map<String, String> getNativeProps() { <ide> * this component type. <ide> */ <ide> public @Nullable Object updateState( <del> @NonNull T view, ReactStylesDiffMap props, @Nullable StateWrapper stateWrapper) { <add> @NonNull T view, ReactStylesDiffMap props, StateWrapper stateWrapper) { <ide> return null; <ide> } <ide> <ide><path>ReactAndroid/src/main/java/com/facebook/react/views/modal/ReactModalHostManager.java <ide> protected void onAfterUpdateTransaction(ReactModalHostView view) { <ide> <ide> @Override <ide> public Object updateState( <del> ReactModalHostView view, ReactStylesDiffMap props, @Nullable StateWrapper stateWrapper) { <add> ReactModalHostView view, ReactStylesDiffMap props, StateWrapper stateWrapper) { <ide> view.getFabricViewStateManager().setStateWrapper(stateWrapper); <ide> Point modalSize = ModalHostHelper.getModalHostSize(view.getContext()); <ide> view.updateState(modalSize.x, modalSize.y); <ide><path>ReactAndroid/src/main/java/com/facebook/react/views/scroll/ReactHorizontalScrollViewManager.java <ide> public ReactHorizontalScrollView createViewInstance(ThemedReactContext context) <ide> <ide> @Override <ide> public Object updateState( <del> ReactHorizontalScrollView view, <del> ReactStylesDiffMap props, <del> @Nullable StateWrapper stateWrapper) { <add> ReactHorizontalScrollView view, ReactStylesDiffMap props, StateWrapper stateWrapper) { <ide> view.getFabricViewStateManager().setStateWrapper(stateWrapper); <ide> return null; <ide> } <ide><path>ReactAndroid/src/main/java/com/facebook/react/views/scroll/ReactScrollViewManager.java <ide> public void setContentOffset(ReactScrollView view, ReadableMap value) { <ide> <ide> @Override <ide> public Object updateState( <del> ReactScrollView view, ReactStylesDiffMap props, @Nullable StateWrapper stateWrapper) { <add> ReactScrollView view, ReactStylesDiffMap props, StateWrapper stateWrapper) { <ide> view.getFabricViewStateManager().setStateWrapper(stateWrapper); <ide> return null; <ide> } <ide><path>ReactAndroid/src/main/java/com/facebook/react/views/text/ReactTextViewManager.java <ide> public boolean needsCustomLayoutForChildren() { <ide> <ide> @Override <ide> public Object updateState( <del> ReactTextView view, ReactStylesDiffMap props, @Nullable StateWrapper stateWrapper) { <del> if (stateWrapper == null) { <del> return null; <del> } <del> <add> ReactTextView view, ReactStylesDiffMap props, StateWrapper stateWrapper) { <ide> if (ReactFeatureFlags.isMapBufferSerializationEnabled()) { <ide> MapBuffer stateMapBuffer = stateWrapper.getStateDataMapBuffer(); <ide> if (stateMapBuffer != null) { <ide><path>ReactAndroid/src/main/java/com/facebook/react/views/textinput/ReactTextInputManager.java <ide> protected EditText createInternalEditText(ThemedReactContext themedReactContext) <ide> <ide> @Override <ide> public Object updateState( <del> ReactEditText view, ReactStylesDiffMap props, @Nullable StateWrapper stateWrapper) { <del> <add> ReactEditText view, ReactStylesDiffMap props, StateWrapper stateWrapper) { <ide> if (ReactEditText.DEBUG_MODE) { <ide> FLog.e(TAG, "updateState: [" + view.getId() + "]"); <ide> } <ide> <ide> view.getFabricViewStateManager().setStateWrapper(stateWrapper); <ide> <del> if (stateWrapper == null) { <del> return null; <del> } <del> <ide> ReadableNativeMap state = stateWrapper.getStateData(); <ide> <ide> if (state == null) {
6
Javascript
Javascript
provide access to raw headers/trailers
e6c81bd67986e672b9b253c62ce6d4a519d3a2e1
<ide><path>lib/_http_common.js <ide> function parserOnHeadersComplete(info) { <ide> n = Math.min(n, parser.maxHeaderPairs); <ide> } <ide> <del> for (var i = 0; i < n; i += 2) { <del> var k = headers[i]; <del> var v = headers[i + 1]; <del> parser.incoming._addHeaderLine(k, v); <del> } <del> <add> parser.incoming._addHeaderLines(headers, n); <ide> <ide> if (info.method) { <ide> // server only <ide> function parserOnMessageComplete() { <ide> // Emit any trailing headers. <ide> var headers = parser._headers; <ide> if (headers) { <del> for (var i = 0, n = headers.length; i < n; i += 2) { <del> var k = headers[i]; <del> var v = headers[i + 1]; <del> parser.incoming._addHeaderLine(k, v); <del> } <add> parser.incoming._addHeaderLines(headers, headers.length); <ide> parser._headers = []; <ide> parser._url = ''; <ide> } <ide><path>lib/_http_incoming.js <ide> function IncomingMessage(socket) { <ide> this.httpVersion = null; <ide> this.complete = false; <ide> this.headers = {}; <add> this.rawHeaders = []; <ide> this.trailers = {}; <add> this.rawTrailers = []; <ide> <ide> this.readable = true; <ide> <ide> IncomingMessage.prototype.destroy = function(error) { <ide> }; <ide> <ide> <add>IncomingMessage.prototype._addHeaderLines = function(headers, n) { <add> if (headers && headers.length) { <add> var raw, dest; <add> if (this.complete) { <add> raw = this.rawTrailers; <add> dest = this.trailers; <add> } else { <add> raw = this.rawHeaders; <add> dest = this.headers; <add> } <add> raw.push.apply(raw, headers); <add> <add> for (var i = 0; i < n; i += 2) { <add> var k = headers[i]; <add> var v = headers[i + 1]; <add> this._addHeaderLine(k, v, dest); <add> } <add> } <add>}; <add> <add> <ide> // Add the given (field, value) pair to the message <ide> // <ide> // Per RFC2616, section 4.2 it is acceptable to join multiple instances of the <ide> // same header with a ', ' if the header in question supports specification of <ide> // multiple values this way. If not, we declare the first instance the winner <ide> // and drop the second. Extended header fields (those beginning with 'x-') are <ide> // always joined. <del>IncomingMessage.prototype._addHeaderLine = function(field, value) { <del> var dest = this.complete ? this.trailers : this.headers; <del> <add>IncomingMessage.prototype._addHeaderLine = function(field, value, dest) { <ide> field = field.toLowerCase(); <ide> switch (field) { <ide> // Array headers: <ide><path>test/simple/test-http-raw-headers.js <add>// Copyright Joyent, Inc. and other Node contributors. <add>// <add>// Permission is hereby granted, free of charge, to any person obtaining a <add>// copy of this software and associated documentation files (the <add>// "Software"), to deal in the Software without restriction, including <add>// without limitation the rights to use, copy, modify, merge, publish, <add>// distribute, sublicense, and/or sell copies of the Software, and to permit <add>// persons to whom the Software is furnished to do so, subject to the <add>// following conditions: <add>// <add>// The above copyright notice and this permission notice shall be included <add>// in all copies or substantial portions of the Software. <add>// <add>// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS <add>// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF <add>// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN <add>// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, <add>// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR <add>// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE <add>// USE OR OTHER DEALINGS IN THE SOFTWARE. <add> <add>var common = require('../common'); <add>var assert = require('assert'); <add> <add>var http = require('http'); <add> <add>http.createServer(function(req, res) { <add> this.close(); <add> var expectRawHeaders = [ <add> 'Host', <add> 'localhost:12346', <add> 'transfer-ENCODING', <add> 'CHUNKED', <add> 'x-BaR', <add> 'yoyoyo', <add> 'Connection', <add> 'keep-alive' <add> ]; <add> var expectHeaders = { <add> host: 'localhost:12346', <add> 'transfer-encoding': 'CHUNKED', <add> 'x-bar': 'yoyoyo', <add> connection: 'keep-alive' <add> }; <add> <add> var expectRawTrailers = [ <add> 'x-bAr', <add> 'yOyOyOy', <add> 'x-baR', <add> 'OyOyOyO', <add> 'X-bAr', <add> 'yOyOyOy', <add> 'X-baR', <add> 'OyOyOyO' <add> ]; <add> <add> var expectTrailers = { 'x-bar': 'yOyOyOy, OyOyOyO, yOyOyOy, OyOyOyO' }; <add> <add> assert.deepEqual(req.rawHeaders, expectRawHeaders); <add> assert.deepEqual(req.headers, expectHeaders); <add> <add> req.on('end', function() { <add> assert.deepEqual(req.rawTrailers, expectRawTrailers); <add> assert.deepEqual(req.trailers, expectTrailers); <add> }); <add> <add> req.resume(); <add> res.addTrailers([ <add> ['x-fOo', 'xOxOxOx'], <add> ['x-foO', 'OxOxOxO'], <add> ['X-fOo', 'xOxOxOx'], <add> ['X-foO', 'OxOxOxO'] <add> ]); <add> res.end('x f o o'); <add>}).listen(common.PORT, function() { <add> var expectRawHeaders = [ <add> 'Date', <add> 'Tue, 06 Aug 2013 01:31:54 GMT', <add> 'Connection', <add> 'keep-alive', <add> 'Transfer-Encoding', <add> 'chunked' <add> ]; <add> var req = http.request({ port: common.PORT, path: '/' }); <add> req.addTrailers([ <add> ['x-bAr', 'yOyOyOy'], <add> ['x-baR', 'OyOyOyO'], <add> ['X-bAr', 'yOyOyOy'], <add> ['X-baR', 'OyOyOyO'] <add> ]); <add> req.setHeader('transfer-ENCODING', 'CHUNKED'); <add> req.setHeader('x-BaR', 'yoyoyo'); <add> req.end('y b a r'); <add> req.on('response', function(res) { <add> var expectRawHeaders = [ <add> 'Date', <add> null, <add> 'Connection', <add> 'keep-alive', <add> 'Transfer-Encoding', <add> 'chunked' <add> ]; <add> var expectHeaders = { <add> date: null, <add> connection: 'keep-alive', <add> 'transfer-encoding': 'chunked' <add> }; <add> res.rawHeaders[1] = null; <add> res.headers.date = null; <add> assert.deepEqual(res.rawHeaders, expectRawHeaders); <add> assert.deepEqual(res.headers, expectHeaders); <add> res.on('end', function() { <add> var expectRawTrailers = [ <add> 'x-fOo', <add> 'xOxOxOx', <add> 'x-foO', <add> 'OxOxOxO', <add> 'X-fOo', <add> 'xOxOxOx', <add> 'X-foO', <add> 'OxOxOxO' <add> ]; <add> var expectTrailers = { 'x-foo': 'xOxOxOx, OxOxOxO, xOxOxOx, OxOxOxO' }; <add> <add> assert.deepEqual(res.rawTrailers, expectRawTrailers); <add> assert.deepEqual(res.trailers, expectTrailers); <add> console.log('ok'); <add> }); <add> res.resume(); <add> }); <add>});
3
Javascript
Javascript
move global getter out of parse.js
cefdaf131dd373cf06907e241a88339452c51313
<ide><path>src/Angular.js <ide> function assertArgFn(arg, name, acceptArrayAnnotation) { <ide> (arg && typeof arg == 'object' ? arg.constructor.name || 'Object' : typeof arg)); <ide> return arg; <ide> } <add> <add>/** <add> * Return the value accessible from the object by path. Any undefined traversals are ignored <add> * @param {Object} obj starting object <add> * @param {string} path path to traverse <add> * @param {boolean=true} bindFnToScope <add> * @returns value as accessible by path <add> */ <add>//TODO(misko): this function needs to be removed <add>function getter(obj, path, bindFnToScope) { <add> if (!path) return obj; <add> var keys = path.split('.'); <add> var key; <add> var lastInstance = obj; <add> var len = keys.length; <add> <add> for (var i = 0; i < len; i++) { <add> key = keys[i]; <add> if (obj) { <add> obj = (lastInstance = obj)[key]; <add> } <add> } <add> if (!bindFnToScope && isFunction(obj)) { <add> return bind(lastInstance, obj); <add> } <add> return obj; <add>} <ide><path>src/ng/parse.js <ide> function setter(obj, path, setValue) { <ide> return setValue; <ide> } <ide> <del>/** <del> * Return the value accessible from the object by path. Any undefined traversals are ignored <del> * @param {Object} obj starting object <del> * @param {string} path path to traverse <del> * @param {boolean=true} bindFnToScope <del> * @returns value as accessible by path <del> */ <del>//TODO(misko): this function needs to be removed <del>function getter(obj, path, bindFnToScope) { <del> if (!path) return obj; <del> var keys = path.split('.'); <del> var key; <del> var lastInstance = obj; <del> var len = keys.length; <del> <del> for (var i = 0; i < len; i++) { <del> key = keys[i]; <del> if (obj) { <del> obj = (lastInstance = obj)[key]; <del> } <del> } <del> if (!bindFnToScope && isFunction(obj)) { <del> return bind(lastInstance, obj); <del> } <del> return obj; <del>} <del> <ide> var getterFnCache = {}; <ide> <ide> /**
2
Javascript
Javascript
set donor cookie
e3bc038457af7f05f0db84eb8f53106d90808dfa
<ide><path>client/src/redux/donation-saga.js <ide> import { <ide> shouldRequestDonationSelector, <ide> preventProgressDonationRequests, <ide> recentlyClaimedBlockSelector, <add> isDonatingSelector, <ide> addDonationComplete, <ide> addDonationError, <ide> postChargeStripeComplete, <ide> function* addDonationSaga({ payload }) { <ide> try { <ide> yield call(addDonation, payload); <ide> yield put(addDonationComplete()); <add> yield call(setDonationCookie); <ide> } catch (error) { <ide> const data = <ide> error.response && error.response.data <ide> function* postChargeStripeSaga({ payload }) { <ide> try { <ide> yield call(postChargeStripe, payload); <ide> yield put(postChargeStripeComplete()); <add> yield call(setDonationCookie); <ide> } catch (error) { <ide> const err = <ide> error.response && error.response.data <ide> function* postChargeStripeCardSaga({ <ide> } <ide> yield call(addDonation, optimizedPayload); <ide> yield put(postChargeStripeCardComplete()); <add> yield call(setDonationCookie); <ide> } catch (error) { <ide> const errorMessage = error.message || defaultDonationErrorMessage; <ide> yield put(postChargeStripeCardError(errorMessage)); <ide> } <ide> } <ide> <add>function* setDonationCookie() { <add> const isDonating = yield select(isDonatingSelector); <add> const isDonorCookieSet = document.cookie <add> .split(';') <add> .some(item => item.trim().startsWith('isDonor=true')); <add> if (isDonating) { <add> if (!isDonorCookieSet) { <add> document.cookie = 'isDonor=true'; <add> } <add> } <add>} <add> <ide> export function createDonationSaga(types) { <ide> return [ <ide> takeEvery(types.tryToShowDonationModal, showDonateModalSaga), <ide> takeEvery(types.addDonation, addDonationSaga), <ide> takeLeading(types.postChargeStripe, postChargeStripeSaga), <del> takeLeading(types.postChargeStripeCard, postChargeStripeCardSaga) <add> takeLeading(types.postChargeStripeCard, postChargeStripeCardSaga), <add> takeEvery(types.fetchUserComplete, setDonationCookie) <ide> ]; <ide> }
1
Javascript
Javascript
implement locale inheritance and locale updating
bd6399e015021a1de6e7e14d640da5564b77d7d3
<ide><path>src/lib/locale/constructor.js <del>export function Locale() { <add>export function Locale(config) { <add> if (config != null) { <add> this.set(config); <add> } <ide> } <ide><path>src/lib/locale/locale.js <ide> import './prototype'; <ide> import { <ide> getSetGlobalLocale, <ide> defineLocale, <add> updateLocale, <ide> getLocale <ide> } from './locales'; <ide> <ide> import { <ide> export { <ide> getSetGlobalLocale, <ide> defineLocale, <add> updateLocale, <ide> getLocale, <ide> listMonths, <ide> listMonthsShort, <ide><path>src/lib/locale/locales.js <ide> import isArray from '../utils/is-array'; <ide> import isUndefined from '../utils/is-undefined'; <ide> import compareArrays from '../utils/compare-arrays'; <add>import { deprecateSimple } from '../utils/deprecate'; <add>import { mergeConfigs } from './set'; <ide> import { Locale } from './constructor'; <ide> <ide> // internal storage for locale config files <ide> export function getSetGlobalLocale (key, values) { <ide> return globalLocale._abbr; <ide> } <ide> <del>export function defineLocale (name, values) { <del> if (values !== null) { <del> values.abbr = name; <del> locales[name] = locales[name] || new Locale(); <del> locales[name].set(values); <add>export function defineLocale (name, config) { <add> if (config !== null) { <add> config.abbr = name; <add> if (locales[name] != null) { <add> deprecateSimple('defineLocaleOverride', <add> 'use moment.updateLocale(localeName, config) to change ' + <add> 'an existing locale. moment.defineLocale(localeName, ' + <add> 'config) should only be used for creating a new locale'); <add> config = mergeConfigs(locales[name]._config, config); <add> } else if (config.parentLocale != null) { <add> if (locales[config.parentLocale] != null) { <add> config = mergeConfigs(locales[config.parentLocale]._config, config); <add> } else { <add> // treat as if there is no base config <add> deprecateSimple('parentLocaleUndefined', <add> 'specified parentLocale is not defined yet'); <add> } <add> } <add> locales[name] = new Locale(config); <ide> <ide> // backwards compat for now: also set the locale <ide> getSetGlobalLocale(name); <ide> export function defineLocale (name, values) { <ide> } <ide> } <ide> <add>export function updateLocale(name, config) { <add> if (config != null) { <add> var locale; <add> if (locales[name] != null) { <add> config = mergeConfigs(locales[name]._config, config); <add> } <add> locale = new Locale(config); <add> locale.parentLocale = locales[name]; <add> locales[name] = locale; <add> <add> // backwards compat for now: also set the locale <add> getSetGlobalLocale(name); <add> } else { <add> // pass null for config to unupdate, useful for tests <add> if (locales[name] != null) { <add> if (locales[name].parentLocale != null) { <add> locales[name] = locales[name].parentLocale; <add> } else if (locales[name] != null) { <add> delete locales[name]; <add> } <add> } <add> } <add> return locales[name]; <add>} <add> <ide> // returns locale data <ide> export function getLocale (key) { <ide> var locale; <ide><path>src/lib/locale/set.js <ide> import isFunction from '../utils/is-function'; <add>import extend from '../utils/extend'; <add>import isObject from '../utils/is-object'; <add>import hasOwnProp from '../utils/has-own-prop'; <ide> <ide> export function set (config) { <ide> var prop, i; <ide> export function set (config) { <ide> this['_' + i] = prop; <ide> } <ide> } <add> this._config = config; <ide> // Lenient ordinal parsing accepts just a number in addition to <ide> // number + (possibly) stuff coming from _ordinalParseLenient. <ide> this._ordinalParseLenient = new RegExp(this._ordinalParse.source + '|' + (/\d{1,2}/).source); <ide> } <add> <add>export function mergeConfigs(parentConfig, childConfig) { <add> var res = extend({}, parentConfig), prop; <add> for (prop in childConfig) { <add> if (hasOwnProp(childConfig, prop)) { <add> if (isObject(parentConfig[prop]) && isObject(childConfig[prop])) { <add> res[prop] = {}; <add> extend(res[prop], parentConfig[prop]); <add> extend(res[prop], childConfig[prop]); <add> } else if (childConfig[prop] != null) { <add> res[prop] = childConfig[prop]; <add> } else { <add> delete res[prop]; <add> } <add> } <add> } <add> return res; <add>} <ide><path>src/lib/units/week-year.js <ide> function setWeekAll(weekYear, week, weekday, dow, doy) { <ide> var dayOfYearData = dayOfYearFromWeeks(weekYear, week, weekday, dow, doy), <ide> date = createUTCDate(dayOfYearData.year, 0, dayOfYearData.dayOfYear); <ide> <del> // console.log("got", weekYear, week, weekday, "set", date.toISOString()); <ide> this.year(date.getUTCFullYear()); <ide> this.month(date.getUTCMonth()); <ide> this.date(date.getUTCDate()); <ide><path>src/lib/utils/is-object.js <add>export default function isObject(input) { <add> return Object.prototype.toString.call(input) === '[object Object]'; <add>} <ide><path>src/moment.js <ide> import { <ide> <ide> import { <ide> defineLocale, <add> updateLocale, <ide> getSetGlobalLocale as locale, <ide> getLocale as localeData, <ide> listMonths as months, <ide> moment.isDuration = isDuration; <ide> moment.monthsShort = monthsShort; <ide> moment.weekdaysMin = weekdaysMin; <ide> moment.defineLocale = defineLocale; <add>moment.updateLocale = updateLocale; <ide> moment.weekdaysShort = weekdaysShort; <ide> moment.normalizeUnits = normalizeUnits; <ide> moment.relativeTimeThreshold = relativeTimeThreshold; <ide><path>src/test/moment/locale_inheritance.js <add>import { module, test } from '../qunit'; <add>import moment from '../../moment'; <add> <add>module('locale inheritance'); <add> <add>test('calendar', function (assert) { <add> moment.defineLocale('base-cal', { <add> calendar : { <add> sameDay: '[Today at] HH:mm', <add> nextDay: '[Tomorrow at] HH:mm', <add> nextWeek: '[Next week at] HH:mm', <add> lastDay: '[Yesterday at] HH:mm', <add> lastWeek: '[Last week at] HH:mm', <add> sameElse: '[whatever]' <add> } <add> }); <add> moment.defineLocale('child-cal', { <add> parentLocale: 'base-cal', <add> calendar: { <add> sameDay: '[Today] HH:mm', <add> nextDay: '[Tomorrow] HH:mm', <add> nextWeek: '[Next week] HH:mm' <add> } <add> }); <add> <add> moment.locale('child-cal'); <add> var anchor = moment.utc('2015-05-05T12:00:00', moment.ISO_8601); <add> assert.equal(anchor.clone().add(3, 'hours').calendar(anchor), 'Today 15:00', 'today uses child version'); <add> assert.equal(anchor.clone().add(1, 'day').calendar(anchor), 'Tomorrow 12:00', 'tomorrow uses child version'); <add> assert.equal(anchor.clone().add(3, 'days').calendar(anchor), 'Next week 12:00', 'next week uses child version'); <add> <add> assert.equal(anchor.clone().subtract(1, 'day').calendar(anchor), 'Yesterday at 12:00', 'yesterday uses parent version'); <add> assert.equal(anchor.clone().subtract(3, 'days').calendar(anchor), 'Last week at 12:00', 'last week uses parent version'); <add> assert.equal(anchor.clone().subtract(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version -'); <add> assert.equal(anchor.clone().add(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version +'); <add>}); <add> <add>test('missing', function (assert) { <add> moment.defineLocale('base-cal-2', { <add> calendar: { <add> sameDay: '[Today at] HH:mm', <add> nextDay: '[Tomorrow at] HH:mm', <add> nextWeek: '[Next week at] HH:mm', <add> lastDay: '[Yesterday at] HH:mm', <add> lastWeek: '[Last week at] HH:mm', <add> sameElse: '[whatever]' <add> } <add> }); <add> moment.defineLocale('child-cal-2', { <add> parentLocale: 'base-cal-2' <add> }); <add> moment.locale('child-cal-2'); <add> var anchor = moment.utc('2015-05-05T12:00:00', moment.ISO_8601); <add> assert.equal(anchor.clone().add(3, 'hours').calendar(anchor), 'Today at 15:00', 'today uses parent version'); <add> assert.equal(anchor.clone().add(1, 'day').calendar(anchor), 'Tomorrow at 12:00', 'tomorrow uses parent version'); <add> assert.equal(anchor.clone().add(3, 'days').calendar(anchor), 'Next week at 12:00', 'next week uses parent version'); <add> assert.equal(anchor.clone().subtract(1, 'day').calendar(anchor), 'Yesterday at 12:00', 'yesterday uses parent version'); <add> assert.equal(anchor.clone().subtract(3, 'days').calendar(anchor), 'Last week at 12:00', 'last week uses parent version'); <add> assert.equal(anchor.clone().subtract(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version -'); <add> assert.equal(anchor.clone().add(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version +'); <add>}); <add> <add>// Test function vs obj both directions <add> <add>test('long date format', function (assert) { <add> moment.defineLocale('base-ldf', { <add> longDateFormat : { <add> LTS : 'h:mm:ss A', <add> LT : 'h:mm A', <add> L : 'MM/DD/YYYY', <add> LL : 'MMMM D, YYYY', <add> LLL : 'MMMM D, YYYY h:mm A', <add> LLLL : 'dddd, MMMM D, YYYY h:mm A' <add> } <add> }); <add> moment.defineLocale('child-ldf', { <add> parentLocale: 'base-ldf', <add> longDateFormat: { <add> LLL : '[child] MMMM D, YYYY h:mm A', <add> LLLL : '[child] dddd, MMMM D, YYYY h:mm A' <add> } <add> }); <add> <add> moment.locale('child-ldf'); <add> var anchor = moment.utc('2015-09-06T12:34:56', moment.ISO_8601); <add> assert.equal(anchor.format('LTS'), '12:34:56 PM', 'LTS uses base'); <add> assert.equal(anchor.format('LT'), '12:34 PM', 'LT uses base'); <add> assert.equal(anchor.format('L'), '09/06/2015', 'L uses base'); <add> assert.equal(anchor.format('l'), '9/6/2015', 'l uses base'); <add> assert.equal(anchor.format('LL'), 'September 6, 2015', 'LL uses base'); <add> assert.equal(anchor.format('ll'), 'Sep 6, 2015', 'll uses base'); <add> assert.equal(anchor.format('LLL'), 'child September 6, 2015 12:34 PM', 'LLL uses child'); <add> assert.equal(anchor.format('lll'), 'child Sep 6, 2015 12:34 PM', 'lll uses child'); <add> assert.equal(anchor.format('LLLL'), 'child Sunday, September 6, 2015 12:34 PM', 'LLLL uses child'); <add> assert.equal(anchor.format('llll'), 'child Sun, Sep 6, 2015 12:34 PM', 'llll uses child'); <add>}); <add> <add>test('ordinal', function (assert) { <add> moment.defineLocale('base-ordinal-1', { <add> ordinal : '%dx' <add> }); <add> moment.defineLocale('child-ordinal-1', { <add> parentLocale: 'base-ordinal-1', <add> ordinal : '%dy' <add> }); <add> <add> assert.equal(moment.utc('2015-02-03', moment.ISO_8601).format('Do'), '3y', 'ordinal uses child string'); <add> <add> moment.defineLocale('base-ordinal-2', { <add> ordinal : '%dx' <add> }); <add> moment.defineLocale('child-ordinal-2', { <add> parentLocale: 'base-ordinal-2', <add> ordinal : function (num) { <add> return num + 'y'; <add> } <add> }); <add> <add> assert.equal(moment.utc('2015-02-03', moment.ISO_8601).format('Do'), '3y', 'ordinal uses child function'); <add> <add> moment.defineLocale('base-ordinal-3', { <add> ordinal : function (num) { <add> return num + 'x'; <add> } <add> }); <add> moment.defineLocale('child-ordinal-3', { <add> parentLocale: 'base-ordinal-3', <add> ordinal : '%dy' <add> }); <add> <add> assert.equal(moment.utc('2015-02-03', moment.ISO_8601).format('Do'), '3y', 'ordinal uses child string (overwrite parent function)'); <add>}); <add> <add>test('ordinal parse', function (assert) { <add> moment.defineLocale('base-ordinal-parse-1', { <add> ordinalParse : /\d{1,2}x/ <add> }); <add> moment.defineLocale('child-ordinal-parse-1', { <add> parentLocale: 'base-ordinal-parse-1', <add> ordinalParse : /\d{1,2}y/ <add> }); <add> <add> assert.ok(moment.utc('2015-01-1y', 'YYYY-MM-Do', true).isValid(), 'ordinal parse uses child'); <add> <add> moment.defineLocale('base-ordinal-parse-2', { <add> ordinalParse : /\d{1,2}x/ <add> }); <add> moment.defineLocale('child-ordinal-parse-2', { <add> parentLocale: 'base-ordinal-parse-2', <add> ordinalParse : null <add> }); <add> <add> assert.ok(moment.utc('2015-01-1', 'YYYY-MM-Do', true).isValid(), 'ordinal parse uses child (default)'); <add>}); <add> <add>test('months', function (assert) { <add> moment.defineLocale('base-months', { <add> months : 'One_Two_Three_Four_Five_Six_Seven_Eight_Nine_Ten_Eleven_Twelve'.split('_') <add> }); <add> moment.defineLocale('child-months', { <add> parentLocale: 'base-months', <add> months : 'First_Second_Third_Fourth_Fifth_Sixth_Seventh_Eighth_Ninth_Tenth_Eleventh_Twelveth '.split('_') <add> }); <add> assert.ok(moment.utc('2015-01-01', 'YYYY-MM-DD').format('MMMM'), 'First', 'months uses child'); <add>}); <ide><path>src/test/moment/locale_update.js <add>import { module, test } from '../qunit'; <add>import moment from '../../moment'; <add> <add>module('locale update'); <add> <add>test('calendar', function (assert) { <add> moment.defineLocale('cal', null); <add> moment.defineLocale('cal', { <add> calendar : { <add> sameDay: '[Today at] HH:mm', <add> nextDay: '[Tomorrow at] HH:mm', <add> nextWeek: '[Next week at] HH:mm', <add> lastDay: '[Yesterday at] HH:mm', <add> lastWeek: '[Last week at] HH:mm', <add> sameElse: '[whatever]' <add> } <add> }); <add> moment.updateLocale('cal', { <add> calendar: { <add> sameDay: '[Today] HH:mm', <add> nextDay: '[Tomorrow] HH:mm', <add> nextWeek: '[Next week] HH:mm' <add> } <add> }); <add> <add> moment.locale('cal'); <add> var anchor = moment.utc('2015-05-05T12:00:00', moment.ISO_8601); <add> assert.equal(anchor.clone().add(3, 'hours').calendar(anchor), 'Today 15:00', 'today uses child version'); <add> assert.equal(anchor.clone().add(1, 'day').calendar(anchor), 'Tomorrow 12:00', 'tomorrow uses child version'); <add> assert.equal(anchor.clone().add(3, 'days').calendar(anchor), 'Next week 12:00', 'next week uses child version'); <add> <add> assert.equal(anchor.clone().subtract(1, 'day').calendar(anchor), 'Yesterday at 12:00', 'yesterday uses parent version'); <add> assert.equal(anchor.clone().subtract(3, 'days').calendar(anchor), 'Last week at 12:00', 'last week uses parent version'); <add> assert.equal(anchor.clone().subtract(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version -'); <add> assert.equal(anchor.clone().add(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version +'); <add>}); <add> <add>test('missing', function (assert) { <add> moment.defineLocale('cal-2', null); <add> moment.defineLocale('cal-2', { <add> calendar: { <add> sameDay: '[Today at] HH:mm', <add> nextDay: '[Tomorrow at] HH:mm', <add> nextWeek: '[Next week at] HH:mm', <add> lastDay: '[Yesterday at] HH:mm', <add> lastWeek: '[Last week at] HH:mm', <add> sameElse: '[whatever]' <add> } <add> }); <add> moment.updateLocale('cal-2', { <add> }); <add> moment.locale('cal-2'); <add> var anchor = moment.utc('2015-05-05T12:00:00', moment.ISO_8601); <add> assert.equal(anchor.clone().add(3, 'hours').calendar(anchor), 'Today at 15:00', 'today uses parent version'); <add> assert.equal(anchor.clone().add(1, 'day').calendar(anchor), 'Tomorrow at 12:00', 'tomorrow uses parent version'); <add> assert.equal(anchor.clone().add(3, 'days').calendar(anchor), 'Next week at 12:00', 'next week uses parent version'); <add> assert.equal(anchor.clone().subtract(1, 'day').calendar(anchor), 'Yesterday at 12:00', 'yesterday uses parent version'); <add> assert.equal(anchor.clone().subtract(3, 'days').calendar(anchor), 'Last week at 12:00', 'last week uses parent version'); <add> assert.equal(anchor.clone().subtract(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version -'); <add> assert.equal(anchor.clone().add(7, 'days').calendar(anchor), 'whatever', 'sameElse uses parent version +'); <add>}); <add> <add>// Test function vs obj both directions <add> <add>test('long date format', function (assert) { <add> moment.defineLocale('ldf', null); <add> moment.defineLocale('ldf', { <add> longDateFormat : { <add> LTS : 'h:mm:ss A', <add> LT : 'h:mm A', <add> L : 'MM/DD/YYYY', <add> LL : 'MMMM D, YYYY', <add> LLL : 'MMMM D, YYYY h:mm A', <add> LLLL : 'dddd, MMMM D, YYYY h:mm A' <add> } <add> }); <add> moment.updateLocale('ldf', { <add> longDateFormat: { <add> LLL : '[child] MMMM D, YYYY h:mm A', <add> LLLL : '[child] dddd, MMMM D, YYYY h:mm A' <add> } <add> }); <add> <add> moment.locale('ldf'); <add> var anchor = moment.utc('2015-09-06T12:34:56', moment.ISO_8601); <add> assert.equal(anchor.format('LTS'), '12:34:56 PM', 'LTS uses base'); <add> assert.equal(anchor.format('LT'), '12:34 PM', 'LT uses base'); <add> assert.equal(anchor.format('L'), '09/06/2015', 'L uses base'); <add> assert.equal(anchor.format('l'), '9/6/2015', 'l uses base'); <add> assert.equal(anchor.format('LL'), 'September 6, 2015', 'LL uses base'); <add> assert.equal(anchor.format('ll'), 'Sep 6, 2015', 'll uses base'); <add> assert.equal(anchor.format('LLL'), 'child September 6, 2015 12:34 PM', 'LLL uses child'); <add> assert.equal(anchor.format('lll'), 'child Sep 6, 2015 12:34 PM', 'lll uses child'); <add> assert.equal(anchor.format('LLLL'), 'child Sunday, September 6, 2015 12:34 PM', 'LLLL uses child'); <add> assert.equal(anchor.format('llll'), 'child Sun, Sep 6, 2015 12:34 PM', 'llll uses child'); <add>}); <add> <add>test('ordinal', function (assert) { <add> moment.defineLocale('ordinal-1', null); <add> moment.defineLocale('ordinal-1', { <add> ordinal : '%dx' <add> }); <add> moment.updateLocale('ordinal-1', { <add> ordinal : '%dy' <add> }); <add> <add> assert.equal(moment.utc('2015-02-03', moment.ISO_8601).format('Do'), '3y', 'ordinal uses child string'); <add> <add> moment.defineLocale('ordinal-2', null); <add> moment.defineLocale('ordinal-2', { <add> ordinal : '%dx' <add> }); <add> moment.defineLocale('ordinal-2', { <add> parentLocale: 'ordinal-2', <add> ordinal : function (num) { <add> return num + 'y'; <add> } <add> }); <add> <add> assert.equal(moment.utc('2015-02-03', moment.ISO_8601).format('Do'), '3y', 'ordinal uses child function'); <add> <add> moment.defineLocale('ordinal-3', null); <add> moment.defineLocale('ordinal-3', { <add> ordinal : function (num) { <add> return num + 'x'; <add> } <add> }); <add> moment.updateLocale('ordinal-3', { <add> ordinal : '%dy' <add> }); <add> <add> assert.equal(moment.utc('2015-02-03', moment.ISO_8601).format('Do'), '3y', 'ordinal uses child string (overwrite parent function)'); <add>}); <add> <add>test('ordinal parse', function (assert) { <add> moment.defineLocale('ordinal-parse-1', null); <add> moment.defineLocale('ordinal-parse-1', { <add> ordinalParse : /\d{1,2}x/ <add> }); <add> moment.updateLocale('ordinal-parse-1', { <add> ordinalParse : /\d{1,2}y/ <add> }); <add> <add> assert.ok(moment.utc('2015-01-1y', 'YYYY-MM-Do', true).isValid(), 'ordinal parse uses child'); <add> <add> moment.defineLocale('ordinal-parse-2', null); <add> moment.defineLocale('ordinal-parse-2', { <add> ordinalParse : /\d{1,2}x/ <add> }); <add> moment.updateLocale('ordinal-parse-2', { <add> ordinalParse : null <add> }); <add> <add> assert.ok(moment.utc('2015-01-1', 'YYYY-MM-Do', true).isValid(), 'ordinal parse uses child (default)'); <add>}); <add> <add>test('months', function (assert) { <add> moment.defineLocale('months', null); <add> moment.defineLocale('months', { <add> months : 'One_Two_Three_Four_Five_Six_Seven_Eight_Nine_Ten_Eleven_Twelve'.split('_') <add> }); <add> moment.updateLocale('months', { <add> parentLocale: 'base-months', <add> months : 'First_Second_Third_Fourth_Fifth_Sixth_Seventh_Eighth_Ninth_Tenth_Eleventh_Twelveth '.split('_') <add> }); <add> assert.ok(moment.utc('2015-01-01', 'YYYY-MM-DD').format('MMMM'), 'First', 'months uses child'); <add>});
9
Text
Text
add v3.24.6 to changelog.md
89b46e4f61298a8b40fd72aeb4969c89eab96858
<ide><path>CHANGELOG.md <ide> <ide> - [glimmerjs/glimmer-vm#1351](https://github.com/glimmerjs/glimmer-vm/pull/1351) Support lexical scope in loose mode <ide> <add>### v3.24.6 (October 18, 2021) <add> <add>- [glimmerjs/glimmer-vm#1347](https://github.com/glimmerjs/glimmer-vm/pull/1347) Avoid using a nested WeakMap for manager instances for a given owner (makes memory leak debugging easier) <add> <ide> ## v3.28.1 (August 30, 2021) <ide> <ide> - [#19733](https://github.com/emberjs/ember.js/pull/19733) [BUGFIX] Ensure that using `routerService.urlFor(...)` and `routerService.recognize(...)` does not error if the router is not fully initialized
1
PHP
PHP
reduce required precision in tests
4bf2de2e6187d7e62c7eb2f2db1a1a9cb0eef3ab
<ide><path>tests/TestCase/ORM/QueryTest.php <ide> public function testSelectLargeNumbers() <ide> $this->assertNotEmpty($out, 'Should get a record'); <ide> $this->assertSame($big, $out->cost); <ide> <del> $small = '0.1234567890123456789'; <add> $small = '0.123456789012345'; <ide> $entity = $table->newEntity(['fraction' => $small]); <ide> <ide> $table->save($entity); <ide> public function testSelectLargeNumbers() <ide> ]) <ide> ->first(); <ide> $this->assertNotEmpty($out, 'Should get a record'); <del> $this->assertRegExp('/^0?\.1234567890123456789$/', $out->fraction); <add> $this->assertRegExp('/^0?\.123456789012345/', $out->fraction); <ide> <del> $small = 0.1234567890123456789; <add> $small = 0.123456789012345; <ide> $entity = $table->newEntity(['fraction' => $small]); <ide> <ide> $table->save($entity); <ide> public function testSelectLargeNumbers() <ide> ->first(); <ide> $this->assertNotEmpty($out, 'Should get a record'); <ide> // There will be loss of precision if too large/small value is set as float instead of string. <del> $this->assertRegExp('/^0?\.123456789012350+$/', $out->fraction); <add> $this->assertRegExp('/^0?\.1234567890123\d+$/', $out->fraction); <ide> } <ide> <ide> /**
1
Go
Go
fix typo in builder/dispatchers.go
bbdf045ac1dfa8fc78b1c932736fe6400eecdf63
<ide><path>builder/dispatchers.go <ide> func from(b *Builder, args []string, attributes map[string]bool, original string <ide> } <ide> <ide> // note that the top level err will still be !nil here if IsNotExist is <del> // not the error. This approach just simplifies hte logic a bit. <add> // not the error. This approach just simplifies the logic a bit. <ide> if err != nil { <ide> return err <ide> }
1
Javascript
Javascript
verify shell option internals
71f541411d56b3dceb087c0c0c50b86fc451e069
<ide><path>test/parallel/test-child-process-spawnsync-shell.js <ide> const env = cp.spawnSync(`"${process.execPath}" -pe process.env.BAZ`, { <ide> }); <ide> <ide> assert.strictEqual(env.stdout.toString().trim(), 'buzz'); <add> <add>// Verify that the shell internals work properly across platforms. <add>{ <add> const originalComspec = process.env.comspec; <add> <add> // Enable monkey patching process.platform. <add> const originalPlatform = process.platform; <add> let platform = null; <add> Object.defineProperty(process, 'platform', { get: () => platform }); <add> <add> function test(testPlatform, shell, shellOutput) { <add> platform = testPlatform; <add> <add> const cmd = 'not_a_real_command'; <add> const shellFlags = platform === 'win32' ? ['/d', '/s', '/c'] : ['-c']; <add> const outputCmd = platform === 'win32' ? `"${cmd}"` : cmd; <add> const windowsVerbatim = platform === 'win32' ? true : undefined; <add> const result = cp.spawnSync(cmd, { shell }); <add> <add> assert.strictEqual(result.file, shellOutput); <add> assert.deepStrictEqual(result.args, <add> [shellOutput, ...shellFlags, outputCmd]); <add> assert.strictEqual(result.options.shell, shell); <add> assert.strictEqual(result.options.file, result.file); <add> assert.deepStrictEqual(result.options.args, result.args); <add> assert.strictEqual(result.options.windowsVerbatimArguments, <add> windowsVerbatim); <add> } <add> <add> // Test Unix platforms with the default shell. <add> test('darwin', true, '/bin/sh'); <add> <add> // Test Unix platforms with a user specified shell. <add> test('darwin', '/bin/csh', '/bin/csh'); <add> <add> // Test Android platforms. <add> test('android', true, '/system/bin/sh'); <add> <add> // Test Windows platforms with a user specified shell. <add> test('win32', 'powershell.exe', 'powershell.exe'); <add> <add> // Test Windows platforms with the default shell and no comspec. <add> delete process.env.comspec; <add> test('win32', true, 'cmd.exe'); <add> <add> // Test Windows platforms with the default shell and a comspec value. <add> process.env.comspec = 'powershell.exe'; <add> test('win32', true, process.env.comspec); <add> <add> // Restore the original value of process.platform. <add> platform = originalPlatform; <add> <add> // Restore the original comspec environment variable if necessary. <add> if (originalComspec) <add> process.env.comspec = originalComspec; <add>}
1
Javascript
Javascript
fix linter error
62d9d02f979dcc6dd5725c93edda0b40504fd657
<ide><path>src/window-event-handler.js <ide> class WindowEventHandler { <ide> if (/^https?:\/\//.test(uri)) { <ide> this.applicationDelegate.openExternal(uri) <ide> } else if (uri.startsWith('atom://')) { <del> this.atomEnvironment.uriHandlerRegistry.handleURI(uri); <add> this.atomEnvironment.uriHandlerRegistry.handleURI(uri) <ide> } <ide> } <ide> }
1
PHP
PHP
allow sub-directories for fixture folder
316fd50443f9713094c978caef52cbf5ea0ca619
<ide><path>lib/Cake/TestSuite/Fixture/CakeFixtureManager.php <ide> protected function _loadFixtures($fixtures) { <ide> $fixture = substr($fixture, strlen('core.')); <ide> $fixturePaths[] = CAKE . 'Test' . DS . 'Fixture'; <ide> } elseif (strpos($fixture, 'app.') === 0) { <del> $fixture = substr($fixture, strlen('app.')); <add> $fixturePrefixLess = substr($fixture, strlen('app.')); <add> $pathTokenArray = explode('/', $fixturePrefixLess); <add> $fixture = array_pop($pathTokenArray); <add> $additionalPath = ''; <add> foreach ($pathTokenArray as $pathToken) { <add> $additionalPath .= DS . $pathToken; <add> } <ide> $fixturePaths = array( <del> TESTS . 'Fixture' <add> TESTS . 'Fixture' . $additionalPath <ide> ); <ide> } elseif (strpos($fixture, 'plugin.') === 0) { <del> $parts = explode('.', $fixture, 3); <del> $pluginName = $parts[1]; <del> $fixture = $parts[2]; <add> $explodedFixture = explode('.', $fixturePrefixLess,3); <add> $pluginName = $explodedFixture[1]; <add> $pathTokenArray = explode('/', $explodedFixture[2]); <add> $fixture = array_pop($pathTokenArray); <add> $additionalPath = ''; <add> foreach ($pathTokenArray as $pathToken) { <add> $additionalPath .= DS . $pathToken; <add> } <ide> $fixturePaths = array( <del> CakePlugin::path(Inflector::camelize($pluginName)) . 'Test' . DS . 'Fixture', <del> TESTS . 'Fixture' <add> CakePlugin::path(Inflector::camelize($pluginName)) . 'Test' . DS . 'Fixture' . $additionalPath, <add> TESTS . 'Fixture' . $additionalPath <ide> ); <ide> } else { <ide> $fixturePaths = array(
1
Javascript
Javascript
use python3 instead of python
f9bfe785ee4aaf2cffc3059b56dfa28d0bb9a57e
<ide><path>test/parallel/test-child-process-set-blocking.js <ide> const assert = require('assert'); <ide> const ch = require('child_process'); <ide> <ide> const SIZE = 100000; <del>const python = process.env.PYTHON || 'python'; <add>const python = process.env.PYTHON || (common.isWindows ? 'python' : 'python3'); <ide> <ide> const cp = ch.spawn(python, ['-c', `print(${SIZE} * "C")`], { <ide> stdio: 'inherit'
1
PHP
PHP
return actual receive count from sqs queue driver
6d1913b9f4d303f8464625dfef33f06cfc404328
<ide><path>src/Illuminate/Queue/Jobs/SqsJob.php <ide> public function release($delay = 0) <ide> */ <ide> public function attempts() <ide> { <del> return 1; <add> return (int) $this->job['Attributes']['ApproximateReceiveCount']; <ide> } <ide> <ide> /** <ide><path>src/Illuminate/Queue/SqsQueue.php <ide> public function pop($queue = null) <ide> { <ide> $queue = $this->getQueue($queue); <ide> <del> $response = $this->sqs->receiveMessage(array('QueueUrl' => $queue)); <add> $response = $this->sqs->receiveMessage( <add> array('QueueUrl' => $queue, 'AttributeNames' => array('ApproximateReceiveCount')) <add> ); <ide> <ide> if (count($response['Messages']) > 0) <ide> {
2
Javascript
Javascript
add emcascript5 compatibility to three.extend
82af7f0c9fc60e50d1c82c3eaf7bc6f5f9275076
<ide><path>src/Three.js <ide> String.prototype.trim = String.prototype.trim || function () { <ide> // based on https://github.com/documentcloud/underscore/blob/bf657be243a075b5e72acc8a83e6f12a564d8f55/underscore.js#L767 <ide> THREE.extend = function ( obj, source ) { <ide> <del> for (var prop in source) { <add> // ECMAScript5 compatibility based on: http://www.nczonline.net/blog/2012/12/11/are-your-mixins-ecmascript-5-compatible/ <add> if ( Object.keys ) { <ide> <del> obj[prop] = source[prop]; <add> Object.keys( source ).forEach( <add> function ( prop ) { <add> Object.defineProperty( obj, prop, Object.getOwnPropertyDescriptor( source, prop ) ); <add> } <add> ); <add> <add> } <add> else { <add> <add> for ( var prop in source ) { <add> <add> if ( source.hasOwnProperty( prop ) ) { <add> <add> obj[prop] = source[prop]; <add> <add> } <add> <add> } <ide> <ide> } <ide>
1
Ruby
Ruby
fix wrong tap argument check
6f9f3faef3d566641b3fa18e887819da7889018a
<ide><path>Library/Homebrew/cmd/test-bot.rb <ide> def resolve_test_tap <ide> bot_argv = ENV["UPSTREAM_BOT_PARAMS"].split " " <ide> bot_argv.extend HomebrewArgvExtension <ide> if tap = bot_argv.value("tap") <del> return Tap.fetch(tap) if url_path =~ HOMEBREW_TAP_REGEX <add> return Tap.fetch(tap) <ide> end <ide> end <ide> <ide> if git_url = ENV["UPSTREAM_GIT_URL"] || ENV["GIT_URL"] <ide> # Also can get tap from Jenkins GIT_URL. <ide> url_path = git_url.sub(%r{^https?://github\.com/}, "").chomp("/").sub(%r{\.git$}, "") <ide> begin <del> return Tap.fetch(url_path) <add> return Tap.fetch(url_path) if url_path =~ HOMEBREW_TAP_REGEX <ide> rescue <ide> end <ide> end
1
Javascript
Javascript
fix coverage reporting
85e1819d8bc8fad398ac0674064a19a691ea0fd7
<ide><path>lib/internal/util.js <ide> function isInsideNodeModules() { <ide> // Use `runInNewContext()` to get something tamper-proof and <ide> // side-effect-free. Since this is currently only used for a deprecated API, <ide> // the perf implications should be okay. <del> getStructuredStack = runInNewContext('(' + function() { <add> getStructuredStack = runInNewContext(`(function() { <ide> Error.prepareStackTrace = function(err, trace) { <ide> err.stack = trace; <ide> }; <ide> Error.stackTraceLimit = Infinity; <ide> <ide> return function structuredStack() { <del> // eslint-disable-next-line no-restricted-syntax <ide> return new Error().stack; <ide> }; <del> } + ')()', {}, { filename: 'structured-stack' }); <add> })()`, {}, { filename: 'structured-stack' }); <ide> } <ide> <ide> const stack = getStructuredStack();
1
Text
Text
update index.md absolute value
9201699caf9fd0ff726aa508da71eb697fe0abca
<ide><path>guide/english/mathematics/absolute-value/index.md <ide> Pythagorean Theorem: If z=a+bi, where a=Re{z} and b=Im{z}, then |z|=sqrt(a^2+b^2 <ide> * **Simplify |(-5)^3|** <ide> |-125| = 125 <ide> <add>* **Simplify -|(4)^2|** <add>-|16| = -16 <add> <ide> #### More Information: <ide> [Purplemath](https://www.purplemath.com/modules/absolute.htm) <ide>
1
Text
Text
add section on iterator-based loops
0b964091c9e6c2639206eda920f7ca9033404192
<ide><path>guide/english/cplusplus/for-loop/index.md <ide> For loop is an entry controlled loop unlike do-while loop. <ide> <ide> ## Syntax <ide> <del>``` <del>for (init; condition; increment ) <del>{ <add>```c++ <add>for (init; condition; increment ) { <ide> update_statement(s); <ide> } <ide> ``` <ide> <ide> The increment can also placed inside the for loop i.e. in its body- <ide> <del>``` <del>for ( init; condition;) <del>{ <add>```c++ <add>for ( init; condition;) { <ide> update_statement(s); <ide> increment; <ide> } <ide> ``` <ide> <ide> It is also allowed to ignore the init variables if and only if they are declared beforehand. For example : <ide> <del>``` <add>```c++ <ide> int a = 1; <del>for (; a <= 10 ;) <del>{ <add>for (; a <= 10 ;) { <ide> cout << a << '\n'; <ide> a++; <ide> } <ide> The update statement is used to alter the loop variable by using simple operatio <ide> You will often see an increment operation as the update statement (e.g. i++, count++). This is often seen as one of the distinguishing features and possible name sources for the C++ language. <ide> <ide> ## Implementation <del>```C++ <add>```c++ <ide> #include <iostream> <ide> using std::cout; // Here we use the scope resolution operator to define the scope of the standard functions as std <ide> using std::endl; <ide> C++ also has what we call "range-based" `for` loops which iterate through all th <ide> ### Syntax <ide> <ide> ```c++ <del>for ( element: container ) <add>for ( element: container ) { <ide> statement(s); <ide> } <ide> ``` <ide> <ide> ```c++ <ide> int[5] array = { 1, 2, 3, 4, 5 } <del>for ( int i: array ) <add>for ( int i: array ) { <ide> cout << i << endl; <ide> } <ide> ``` <ide> Output: <ide> 5 <ide> ``` <ide> <add>## Iterator-based for-loop <add>Iterator based for loops are also possible in C++ and functionality for them exists in many of the data structures found within the STL. Unlike for-each loops, iterator based loops allow for mutating the contents of the container during iteration. This is rather useful when one needs to remove or insert values while looping over data. <add> <add>### Syntax <add>```c++ <add>// Create a vector <add>std::vector<int> vec; <add> <add>// Populate the vector <add>vec.push_back(1); <add>vec.push_back(2); <add>vec.push_back(3); <add> <add>// Iterate over the vector using the 'it' object. <add>for(std::vector<string>::iterator it = vec.begin(); it != vec.end(); it++) { <add> // Print the value held by the iterator (this is the object or primitive contained within <add> // the vector, in this case, an int). <add> cout<< *it << endl; // prints d. <add>} <add>``` <add> <ide> ## Applications of the for loops <ide> <ide> ### Use as infinite loops <ide> <ide> This C-style for-loop is commonly the source of an infinite loop since the fundamental steps of iteration are completely in the control of the programmer. In fact, when infinite loops are intended, this type of for-loop can be used (with empty expressions), such as: <del>``` <del>for (;;) <add>```c++ <add>for (;;) { <ide> //loop body <add>} <ide> ``` <ide> <ide> ## Additional Resources
1
PHP
PHP
fix overzealous array cast in multicheckbox
292d7e81cdb791b0edbcf730424c6ee5a512bc56
<ide><path>src/View/Helper/FormHelper.php <ide> public function select($fieldName, $options = [], array $attributes = []) { <ide> * Can be used in place of a select box with the multiple attribute. <ide> * <ide> * @param string $fieldName Name attribute of the SELECT <del> * @param array $options Array of the OPTION elements (as 'value'=>'Text' pairs) to be used in the <add> * @param array $options|\Traversable Array of the OPTION elements (as 'value'=>'Text' pairs) to be used in the <ide> * checkboxes element. <ide> * @param array $attributes The HTML attributes of the select element. <ide> * @return string Formatted SELECT element <ide> * @see \Cake\View\Helper\FormHelper::select() for supported option formats. <ide> */ <del> public function multiCheckbox($fieldName, array $options, array $attributes = []) { <add> public function multiCheckbox($fieldName, $options, array $attributes = []) { <ide> $attributes += [ <ide> 'disabled' => null, <ide> 'escape' => true, <ide><path>tests/TestCase/View/Helper/FormHelperTest.php <ide> public function testSelectCheckboxMultipleOverrideName() { <ide> ); <ide> $this->assertTags($result, $expected); <ide> <add> $result = $this->Form->multiCheckbox( <add> 'category', <add> new Collection(['1', '2']), <add> ['name' => 'fish'] <add> ); <ide> $result = $this->Form->multiCheckbox('category', ['1', '2'], [ <ide> 'name' => 'fish', <ide> ]);
2
Ruby
Ruby
fix rubocop offense
86ee45477c23b413779a263267417ba270b5946d
<ide><path>actioncable/lib/rails/generators/channel/channel_generator.rb <ide> def import_channels_in_javascript_entrypoint <ide> <ide> def import_channel_in_javascript_entrypoint <ide> append_to_file "app/javascript/channels/index.js", <del> using_node? ? %(import "./#{file_name}_channel"\n) : %(import "channels/#{file_name}_channel"\n) <add> using_node? ? %(import "./#{file_name}_channel"\n) : %(import "channels/#{file_name}_channel"\n) <ide> end <ide> <ide> def install_javascript_dependencies
1
Ruby
Ruby
fix force_homebrew_on_linux behaviour.""
7448a6be587a84ee6a6df9c5cf81c81e5f56ac82
<ide><path>Library/Homebrew/extend/os/mac/software_spec.rb <ide> # typed: false <ide> # frozen_string_literal: true <ide> <add># The Library/Homebrew/extend/os/software_spec.rb conditional logic will need to be more nuanced <add># if this file ever includes more than `uses_from_macos`. <ide> class SoftwareSpec <ide> undef uses_from_macos <ide> <ide><path>Library/Homebrew/extend/os/software_spec.rb <ide> # typed: strict <ide> # frozen_string_literal: true <ide> <del>if OS.linux? <del> require "extend/os/linux/software_spec" <del>elsif OS.mac? <add># This logic will need to be more nuanced if this file includes more than `uses_from_macos`. <add>if OS.mac? || Homebrew::EnvConfig.force_homebrew_on_linux? <ide> require "extend/os/mac/software_spec" <add>elsif OS.linux? <add> require "extend/os/linux/software_spec" <ide> end
2
Go
Go
apply build labels to images with only a from tag
1a85c8ebbe1ab508bcd47b883b9732c032509503
<ide><path>builder/dockerfile/builder.go <ide> func (b *Builder) build(config *types.ImageBuildOptions, context builder.Context <ide> } <ide> return "", err <ide> } <add> <add> // Commit the layer when there are only one children in <add> // the dockerfile, this is only the `FROM` tag, and <add> // build labels. Otherwise, the new image won't be <add> // labeled properly. <add> // Commit here, so the ID of the final image is reported <add> // properly. <add> if len(b.dockerfile.Children) == 1 && len(b.options.Labels) > 0 { <add> b.commit("", b.runConfig.Cmd, "") <add> } <add> <ide> shortImgID = stringid.TruncateID(b.image) <ide> fmt.Fprintf(b.Stdout, " ---> %s\n", shortImgID) <ide> if b.options.Remove { <ide><path>builder/dockerfile/internals.go <ide> func (b *Builder) processImageFrom(img builder.Image) error { <ide> b.image = img.ImageID() <ide> <ide> if img.RunConfig() != nil { <del> b.runConfig = img.RunConfig() <add> imgConfig := *img.RunConfig() <add> // inherit runConfig labels from the current <add> // state if they've been set already. <add> // Ensures that images with only a FROM <add> // get the labels populated properly. <add> if b.runConfig.Labels != nil { <add> if imgConfig.Labels == nil { <add> imgConfig.Labels = make(map[string]string) <add> } <add> for k, v := range b.runConfig.Labels { <add> imgConfig.Labels[k] = v <add> } <add> } <add> b.runConfig = &imgConfig <ide> } <ide> } <ide> <ide><path>integration-cli/docker_cli_build_test.go <ide> func (s *DockerSuite) TestBuildLabel(c *check.C) { <ide> _, err := buildImage(name, ` <ide> FROM `+minimalBaseImage()+` <ide> LABEL default foo <del>`, false, []string{"--label", testLabel}...) <add>`, false, "--label", testLabel) <ide> <del> if err != nil { <del> c.Fatal("error building image with labels", err) <del> } <add> c.Assert(err, checker.IsNil) <ide> <ide> res := inspectFieldJSON(c, name, "Config.Labels") <ide> <ide> func (s *DockerSuite) TestBuildLabel(c *check.C) { <ide> } <ide> } <ide> <add>func (s *DockerSuite) TestBuildLabelOneNode(c *check.C) { <add> name := "testbuildlabel" <add> <add> _, err := buildImage(name, "FROM busybox", false, "--label", "foo=bar") <add> <add> c.Assert(err, checker.IsNil) <add> <add> res, err := inspectImage(name, "json .Config.Labels") <add> c.Assert(err, checker.IsNil) <add> var labels map[string]string <add> <add> if err := json.Unmarshal([]byte(res), &labels); err != nil { <add> c.Fatal(err) <add> } <add> <add> v, ok := labels["foo"] <add> if !ok { <add> c.Fatal("label `foo` not found in image") <add> } <add> c.Assert(v, checker.Equals, "bar") <add>} <add> <ide> func (s *DockerSuite) TestBuildLabelCacheCommit(c *check.C) { <ide> name := "testbuildlabelcachecommit" <ide> testLabel := "foo" <ide> func (s *DockerSuite) TestBuildLabelCacheCommit(c *check.C) { <ide> _, err := buildImage(name, ` <ide> FROM `+minimalBaseImage()+` <ide> LABEL default foo <del>`, true, []string{"--label", testLabel}...) <add>`, true, "--label", testLabel) <ide> <del> if err != nil { <del> c.Fatal("error building image with labels", err) <del> } <add> c.Assert(err, checker.IsNil) <ide> <ide> res := inspectFieldJSON(c, name, "Config.Labels") <ide> <ide><path>integration-cli/docker_utils.go <ide> func inspectMountPointJSON(j, destination string) (types.MountPoint, error) { <ide> return *m, nil <ide> } <ide> <add>func inspectImage(name, filter string) (string, error) { <add> args := []string{"inspect", "--type", "image"} <add> if filter != "" { <add> format := fmt.Sprintf("{{%s}}", filter) <add> args = append(args, "-f", format) <add> } <add> args = append(args, name) <add> inspectCmd := exec.Command(dockerBinary, args...) <add> out, exitCode, err := runCommandWithOutput(inspectCmd) <add> if err != nil || exitCode != 0 { <add> return "", fmt.Errorf("failed to inspect %s: %s", name, out) <add> } <add> return strings.TrimSpace(out), nil <add>} <add> <ide> func getIDByName(name string) (string, error) { <ide> return inspectFieldWithError(name, "Id") <ide> }
4
Python
Python
fix export_tfhub module with bertv2
aa870ff4df967857e21a5460fb8f9ca25f0cd4fd
<ide><path>official/nlp/tools/export_tfhub_lib.py <ide> def _create_model( <ide> """Creates the model to export and the model to restore the checkpoint. <ide> <ide> Args: <del> bert_config: A legacy `BertConfig` to create a `BertEncoder` object. <del> Exactly one of encoder_config and bert_config must be set. <add> bert_config: A legacy `BertConfig` to create a `BertEncoder` object. Exactly <add> one of encoder_config and bert_config must be set. <ide> encoder_config: An `EncoderConfig` to create an encoder of the configured <ide> type (`BertEncoder` or other). <del> with_mlm: A bool to control the second component of the result. <del> If True, will create a `BertPretrainerV2` object; otherwise, will <del> create a `BertEncoder` object. <add> with_mlm: A bool to control the second component of the result. If True, <add> will create a `BertPretrainerV2` object; otherwise, will create a <add> `BertEncoder` object. <ide> <ide> Returns: <ide> A Tuple of (1) a Keras model that will be exported, (2) a `BertPretrainerV2` <ide> def _create_model( <ide> <ide> # Convert from list of named inputs to dict of inputs keyed by name. <ide> # Only the latter accepts a dict of inputs after restoring from SavedModel. <del> encoder_inputs_dict = {x.name: x for x in encoder.inputs} <add> if isinstance(encoder.inputs, list) or isinstance(encoder.inputs, tuple): <add> encoder_inputs_dict = {x.name: x for x in encoder.inputs} <add> else: <add> # encoder.inputs by default is dict for BertEncoderV2. <add> encoder_inputs_dict = encoder.inputs <ide> encoder_output_dict = encoder(encoder_inputs_dict) <ide> # For interchangeability with other text representations, <ide> # add "default" as an alias for BERT's whole-input reptesentations. <ide> def export_model(export_path: Text, <ide> encoder_config: An optional `encoders.EncoderConfig` object. <ide> model_checkpoint_path: The path to the checkpoint. <ide> with_mlm: Whether to export the additional mlm sub-object. <del> copy_pooler_dense_to_encoder: Whether to copy the pooler's dense layer <del> used in the next sentence prediction task to the encoder. <add> copy_pooler_dense_to_encoder: Whether to copy the pooler's dense layer used <add> in the next sentence prediction task to the encoder. <ide> vocab_file: The path to the wordpiece vocab file, or None. <del> sp_model_file: The path to the sentencepiece model file, or None. <del> Exactly one of vocab_file and sp_model_file must be set. <add> sp_model_file: The path to the sentencepiece model file, or None. Exactly <add> one of vocab_file and sp_model_file must be set. <ide> do_lower_case: Whether to lower-case text before tokenization. <ide> """ <ide> if with_mlm: <del> core_model, pretrainer = _create_model(bert_config=bert_config, <del> encoder_config=encoder_config, <del> with_mlm=with_mlm) <add> core_model, pretrainer = _create_model( <add> bert_config=bert_config, <add> encoder_config=encoder_config, <add> with_mlm=with_mlm) <ide> encoder = pretrainer.encoder_network <ide> # It supports both the new pretrainer checkpoint produced by TF-NLP and <ide> # the checkpoint converted from TF1 (original BERT, SmallBERTs). <ide> checkpoint_items = pretrainer.checkpoint_items <ide> checkpoint = tf.train.Checkpoint(**checkpoint_items) <ide> else: <del> core_model, encoder = _create_model(bert_config=bert_config, <del> encoder_config=encoder_config, <del> with_mlm=with_mlm) <add> core_model, encoder = _create_model( <add> bert_config=bert_config, <add> encoder_config=encoder_config, <add> with_mlm=with_mlm) <ide> checkpoint = tf.train.Checkpoint( <ide> model=encoder, # Legacy checkpoints. <ide> encoder=encoder) <ide> def __init__(self, bert_pack_inputs: layers.BertPackInputs): <ide> # overridable. Having this dynamically determined default argument <ide> # requires self.__call__ to be defined in this indirect way. <ide> default_seq_length = bert_pack_inputs.seq_length <add> <ide> @tf.function(autograph=False) <ide> def call(inputs, seq_length=default_seq_length): <ide> return layers.BertPackInputs.bert_pack_inputs( <del> inputs, seq_length=seq_length, <add> inputs, <add> seq_length=seq_length, <ide> start_of_sequence_id=bert_pack_inputs.start_of_sequence_id, <ide> end_of_segment_id=bert_pack_inputs.end_of_segment_id, <ide> padding_id=bert_pack_inputs.padding_id) <add> <ide> self.__call__ = call <ide> <ide> for ragged_rank in range(1, 3): <ide> for num_segments in range(1, 3): <del> _ = self.__call__.get_concrete_function( <del> [tf.RaggedTensorSpec([None] * (ragged_rank + 1), dtype=tf.int32) <del> for _ in range(num_segments)], <del> seq_length=tf.TensorSpec([], tf.int32)) <add> _ = self.__call__.get_concrete_function([ <add> tf.RaggedTensorSpec([None] * (ragged_rank + 1), dtype=tf.int32) <add> for _ in range(num_segments) <add> ], <add> seq_length=tf.TensorSpec( <add> [], tf.int32)) <ide> <ide> <ide> def create_preprocessing(*, <ide> def create_preprocessing(*, <ide> <ide> Args: <ide> vocab_file: The path to the wordpiece vocab file, or None. <del> sp_model_file: The path to the sentencepiece model file, or None. <del> Exactly one of vocab_file and sp_model_file must be set. <del> This determines the type of tokenzer that is used. <add> sp_model_file: The path to the sentencepiece model file, or None. Exactly <add> one of vocab_file and sp_model_file must be set. This determines the type <add> of tokenzer that is used. <ide> do_lower_case: Whether to do lower case. <ide> tokenize_with_offsets: Whether to include the .tokenize_with_offsets <ide> subobject. <del> default_seq_length: The sequence length of preprocessing results from <del> root callable. This is also the default sequence length for the <add> default_seq_length: The sequence length of preprocessing results from root <add> callable. This is also the default sequence length for the <ide> bert_pack_inputs subobject. <ide> <ide> Returns: <ide> def create_preprocessing(*, <ide> <ide> def _move_to_tmpdir(file_path: Optional[Text], tmpdir: Text) -> Optional[Text]: <ide> """Returns new path with same basename and hash of original path.""" <del> if file_path is None: return None <add> if file_path is None: <add> return None <ide> olddir, filename = os.path.split(file_path) <ide> hasher = hashlib.sha1() <ide> hasher.update(olddir.encode("utf-8")) <ide> def _check_no_assert(saved_model_path): <ide> <ide> assert_nodes = [] <ide> graph_def = saved_model.meta_graphs[0].graph_def <del> assert_nodes += ["node '{}' in global graph".format(n.name) <del> for n in graph_def.node if n.op == "Assert"] <add> assert_nodes += [ <add> "node '{}' in global graph".format(n.name) <add> for n in graph_def.node <add> if n.op == "Assert" <add> ] <ide> for fdef in graph_def.library.function: <ide> assert_nodes += [ <ide> "node '{}' in function '{}'".format(n.name, fdef.signature.name) <del> for n in fdef.node_def if n.op == "Assert"] <add> for n in fdef.node_def <add> if n.op == "Assert" <add> ] <ide> if assert_nodes: <ide> raise AssertionError( <ide> "Internal tool error: " <ide><path>official/nlp/tools/export_tfhub_lib_test.py <ide> from official.nlp.tools import export_tfhub_lib <ide> <ide> <del>def _get_bert_config_or_encoder_config(use_bert_config, hidden_size, <del> num_hidden_layers, vocab_size=100): <del> """Returns config args for export_tfhub_lib._create_model().""" <add>def _get_bert_config_or_encoder_config(use_bert_config, <add> hidden_size, <add> num_hidden_layers, <add> encoder_type="albert", <add> vocab_size=100): <add> """Generates config args for export_tfhub_lib._create_model(). <add> <add> Args: <add> use_bert_config: bool. If True, returns legacy BertConfig. <add> hidden_size: int. <add> num_hidden_layers: int. <add> encoder_type: str. Can be ['albert', 'bert', 'bert_v2']. If use_bert_config <add> == True, then model_type is not used. <add> vocab_size: int. <add> <add> Returns: <add> bert_config, encoder_config. Only one is not None. If <add> `use_bert_config` == True, the first config is valid. Otherwise <add> `bert_config` == None. <add> """ <ide> if use_bert_config: <ide> bert_config = configs.BertConfig( <ide> vocab_size=vocab_size, <ide> def _get_bert_config_or_encoder_config(use_bert_config, hidden_size, <ide> encoder_config = None <ide> else: <ide> bert_config = None <del> encoder_config = encoders.EncoderConfig( <del> type="albert", <del> albert=encoders.AlbertEncoderConfig( <del> vocab_size=vocab_size, <del> embedding_width=16, <del> hidden_size=hidden_size, <del> intermediate_size=32, <del> max_position_embeddings=128, <del> num_attention_heads=2, <del> num_layers=num_hidden_layers, <del> dropout_rate=0.1)) <add> if encoder_type == "albert": <add> encoder_config = encoders.EncoderConfig( <add> type="albert", <add> albert=encoders.AlbertEncoderConfig( <add> vocab_size=vocab_size, <add> embedding_width=16, <add> hidden_size=hidden_size, <add> intermediate_size=32, <add> max_position_embeddings=128, <add> num_attention_heads=2, <add> num_layers=num_hidden_layers, <add> dropout_rate=0.1)) <add> else: <add> # encoder_type can be 'bert' or 'bert_v2'. <add> model_config = encoders.BertEncoderConfig( <add> vocab_size=vocab_size, <add> embedding_size=16, <add> hidden_size=hidden_size, <add> intermediate_size=32, <add> max_position_embeddings=128, <add> num_attention_heads=2, <add> num_layers=num_hidden_layers, <add> dropout_rate=0.1) <add> kwargs = {"type": encoder_type, encoder_type: model_config} <add> encoder_config = encoders.EncoderConfig(**kwargs) <ide> <ide> return bert_config, encoder_config <ide> <ide> class ExportModelTest(tf.test.TestCase, parameterized.TestCase): <ide> alternative to BertTokenizer). <ide> """ <ide> <del> @parameterized.named_parameters(("Bert", True), ("Albert", False)) <del> def test_export_model(self, use_bert): <add> @parameterized.named_parameters( <add> ("Bert_Legacy", True, None), ("Albert", False, "albert"), <add> ("BertEncoder", False, "bert"), ("BertEncoderV2", False, "bert_v2")) <add> def test_export_model(self, use_bert, encoder_type): <ide> # Create the encoder and export it. <ide> hidden_size = 16 <ide> num_hidden_layers = 1 <ide> bert_config, encoder_config = _get_bert_config_or_encoder_config( <del> use_bert, hidden_size, num_hidden_layers) <add> use_bert, <add> hidden_size=hidden_size, <add> num_hidden_layers=num_hidden_layers, <add> encoder_type=encoder_type) <ide> bert_model, encoder = export_tfhub_lib._create_model( <ide> bert_config=bert_config, encoder_config=encoder_config, with_mlm=False) <ide> self.assertEmpty( <ide> def test_export_model(self, use_bert): <ide> _read_asset(hub_layer.resolved_object.sp_model_file)) <ide> <ide> # Check restored weights. <del> self.assertEqual(len(bert_model.trainable_weights), <del> len(hub_layer.trainable_weights)) <add> self.assertEqual( <add> len(bert_model.trainable_weights), len(hub_layer.trainable_weights)) <ide> for source_weight, hub_weight in zip(bert_model.trainable_weights, <ide> hub_layer.trainable_weights): <ide> self.assertAllClose(source_weight.numpy(), hub_weight.numpy()) <ide> def test_export_model_with_mlm(self, use_bert): <ide> # Note that we set `_auto_track_sub_layers` to False when exporting the <ide> # SavedModel, so hub_layer has the same number of weights as bert_model; <ide> # otherwise, hub_layer will have extra weights from its `mlm` subobject. <del> self.assertEqual(len(bert_model.trainable_weights), <del> len(hub_layer.trainable_weights)) <add> self.assertEqual( <add> len(bert_model.trainable_weights), len(hub_layer.trainable_weights)) <ide> for source_weight, hub_weight in zip(bert_model.trainable_weights, <ide> hub_layer.trainable_weights): <ide> self.assertAllClose(source_weight, hub_weight) <ide> def _make_vocab_file(self, vocab, filename="vocab.txt", add_mask_token=False): <ide> The absolute filename of the created vocab file. <ide> """ <ide> full_vocab = ["[PAD]", "[UNK]", "[CLS]", "[SEP]" <del> ] + ["[MASK]"]*add_mask_token + vocab <add> ] + ["[MASK]"] * add_mask_token + vocab <ide> path = os.path.join( <del> tempfile.mkdtemp(dir=self.get_temp_dir(), # New subdir each time. <del> prefix=_STRING_NOT_TO_LEAK), <add> tempfile.mkdtemp( <add> dir=self.get_temp_dir(), # New subdir each time. <add> prefix=_STRING_NOT_TO_LEAK), <ide> filename) <ide> with tf.io.gfile.GFile(path, "w") as f: <ide> f.write("\n".join(full_vocab + [""])) <ide> def _make_sp_model_file(self, vocab, prefix="spm", add_mask_token=False): <ide> model_prefix=model_prefix, <ide> model_type="word", <ide> input=input_file, <del> pad_id=0, unk_id=1, control_symbols=control_symbols, <add> pad_id=0, <add> unk_id=1, <add> control_symbols=control_symbols, <ide> vocab_size=full_vocab_size, <del> bos_id=full_vocab_size-2, eos_id=full_vocab_size-1) <del> SentencePieceTrainer.Train( <del> " ".join(["--{}={}".format(k, v) for k, v in flags.items()])) <add> bos_id=full_vocab_size - 2, <add> eos_id=full_vocab_size - 1) <add> SentencePieceTrainer.Train(" ".join( <add> ["--{}={}".format(k, v) for k, v in flags.items()])) <ide> return model_prefix + ".model" <ide> <del> def _do_export(self, vocab, do_lower_case, default_seq_length=128, <del> tokenize_with_offsets=True, use_sp_model=False, <del> experimental_disable_assert=False, add_mask_token=False): <add> def _do_export(self, <add> vocab, <add> do_lower_case, <add> default_seq_length=128, <add> tokenize_with_offsets=True, <add> use_sp_model=False, <add> experimental_disable_assert=False, <add> add_mask_token=False): <ide> """Runs SavedModel export and returns the export_path.""" <ide> export_path = tempfile.mkdtemp(dir=self.get_temp_dir()) <ide> vocab_file = sp_model_file = None <ide> if use_sp_model: <del> sp_model_file = self._make_sp_model_file(vocab, <del> add_mask_token=add_mask_token) <add> sp_model_file = self._make_sp_model_file( <add> vocab, add_mask_token=add_mask_token) <ide> else: <ide> vocab_file = self._make_vocab_file(vocab, add_mask_token=add_mask_token) <ide> export_tfhub_lib.export_preprocessing( <ide> def _do_export(self, vocab, do_lower_case, default_seq_length=128, <ide> <ide> def test_no_leaks(self): <ide> """Tests not leaking the path to the original vocab file.""" <del> path = self._do_export( <del> ["d", "ef", "abc", "xy"], do_lower_case=True, use_sp_model=False) <add> path = self._do_export(["d", "ef", "abc", "xy"], <add> do_lower_case=True, <add> use_sp_model=False) <ide> with tf.io.gfile.GFile(os.path.join(path, "saved_model.pb"), "rb") as f: <ide> self.assertFalse( # pylint: disable=g-generic-assert <ide> _STRING_NOT_TO_LEAK.encode("ascii") in f.read()) <ide> <ide> @parameterized.named_parameters(("Bert", False), ("Sentencepiece", True)) <ide> def test_exported_callables(self, use_sp_model): <del> preprocess = tf.saved_model.load(self._do_export( <del> ["d", "ef", "abc", "xy"], do_lower_case=True, <del> tokenize_with_offsets=not use_sp_model, # TODO(b/181866850): drop this. <del> experimental_disable_assert=True, # TODO(b/175369555): drop this. <del> use_sp_model=use_sp_model)) <add> preprocess = tf.saved_model.load( <add> self._do_export( <add> ["d", "ef", "abc", "xy"], <add> do_lower_case=True, <add> # TODO(b/181866850): drop this. <add> tokenize_with_offsets=not use_sp_model, <add> # TODO(b/175369555): drop this. <add> experimental_disable_assert=True, <add> use_sp_model=use_sp_model)) <ide> <ide> def fold_dim(rt): <ide> """Removes the word/subword distinction of BertTokenizer.""" <ide> def fold_dim(rt): <ide> # .tokenize() <ide> inputs = tf.constant(["abc d ef", "ABC D EF d"]) <ide> token_ids = preprocess.tokenize(inputs) <del> self.assertAllEqual(fold_dim(token_ids), <del> tf.ragged.constant([[6, 4, 5], <del> [6, 4, 5, 4]])) <add> self.assertAllEqual( <add> fold_dim(token_ids), tf.ragged.constant([[6, 4, 5], [6, 4, 5, 4]])) <ide> <ide> special_tokens_dict = { <ide> k: v.numpy().item() # Expecting eager Tensor, converting to Python. <del> for k, v in preprocess.tokenize.get_special_tokens_dict().items()} <del> self.assertDictEqual(special_tokens_dict, <del> dict(padding_id=0, <del> start_of_sequence_id=2, <del> end_of_segment_id=3, <del> vocab_size=4+6 if use_sp_model else 4+4)) <add> for k, v in preprocess.tokenize.get_special_tokens_dict().items() <add> } <add> self.assertDictEqual( <add> special_tokens_dict, <add> dict( <add> padding_id=0, <add> start_of_sequence_id=2, <add> end_of_segment_id=3, <add> vocab_size=4 + 6 if use_sp_model else 4 + 4)) <ide> <ide> # .tokenize_with_offsets() <ide> if use_sp_model: <ide> def fold_dim(rt): <ide> else: <ide> token_ids, start_offsets, limit_offsets = ( <ide> preprocess.tokenize_with_offsets(inputs)) <del> self.assertAllEqual(fold_dim(token_ids), <del> tf.ragged.constant([[6, 4, 5], <del> [6, 4, 5, 4]])) <del> self.assertAllEqual(fold_dim(start_offsets), <del> tf.ragged.constant([[0, 4, 6], <del> [0, 4, 6, 9]])) <del> self.assertAllEqual(fold_dim(limit_offsets), <del> tf.ragged.constant([[3, 5, 8], <del> [3, 5, 8, 10]])) <add> self.assertAllEqual( <add> fold_dim(token_ids), tf.ragged.constant([[6, 4, 5], [6, 4, 5, 4]])) <add> self.assertAllEqual( <add> fold_dim(start_offsets), tf.ragged.constant([[0, 4, 6], [0, 4, 6, <add> 9]])) <add> self.assertAllEqual( <add> fold_dim(limit_offsets), tf.ragged.constant([[3, 5, 8], [3, 5, 8, <add> 10]])) <ide> self.assertIs(preprocess.tokenize.get_special_tokens_dict, <ide> preprocess.tokenize_with_offsets.get_special_tokens_dict) <ide> <ide> # Root callable. <ide> bert_inputs = preprocess(inputs) <ide> self.assertAllEqual(bert_inputs["input_word_ids"].shape.as_list(), [2, 128]) <del> self.assertAllEqual(bert_inputs["input_word_ids"][:, :10], <del> tf.constant([[2, 6, 4, 5, 3, 0, 0, 0, 0, 0], <del> [2, 6, 4, 5, 4, 3, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_word_ids"][:, :10], <add> tf.constant([[2, 6, 4, 5, 3, 0, 0, 0, 0, 0], <add> [2, 6, 4, 5, 4, 3, 0, 0, 0, 0]])) <ide> self.assertAllEqual(bert_inputs["input_mask"].shape.as_list(), [2, 128]) <del> self.assertAllEqual(bert_inputs["input_mask"][:, :10], <del> tf.constant([[1, 1, 1, 1, 1, 0, 0, 0, 0, 0], <del> [1, 1, 1, 1, 1, 1, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_mask"][:, :10], <add> tf.constant([[1, 1, 1, 1, 1, 0, 0, 0, 0, 0], <add> [1, 1, 1, 1, 1, 1, 0, 0, 0, 0]])) <ide> self.assertAllEqual(bert_inputs["input_type_ids"].shape.as_list(), [2, 128]) <del> self.assertAllEqual(bert_inputs["input_type_ids"][:, :10], <del> tf.constant([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], <del> [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_type_ids"][:, :10], <add> tf.constant([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], <add> [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])) <ide> <ide> # .bert_pack_inputs() <ide> inputs_2 = tf.constant(["d xy", "xy abc"]) <ide> token_ids_2 = preprocess.tokenize(inputs_2) <del> bert_inputs = preprocess.bert_pack_inputs( <del> [token_ids, token_ids_2], seq_length=256) <add> bert_inputs = preprocess.bert_pack_inputs([token_ids, token_ids_2], <add> seq_length=256) <ide> self.assertAllEqual(bert_inputs["input_word_ids"].shape.as_list(), [2, 256]) <del> self.assertAllEqual(bert_inputs["input_word_ids"][:, :10], <del> tf.constant([[2, 6, 4, 5, 3, 4, 7, 3, 0, 0], <del> [2, 6, 4, 5, 4, 3, 7, 6, 3, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_word_ids"][:, :10], <add> tf.constant([[2, 6, 4, 5, 3, 4, 7, 3, 0, 0], <add> [2, 6, 4, 5, 4, 3, 7, 6, 3, 0]])) <ide> self.assertAllEqual(bert_inputs["input_mask"].shape.as_list(), [2, 256]) <del> self.assertAllEqual(bert_inputs["input_mask"][:, :10], <del> tf.constant([[1, 1, 1, 1, 1, 1, 1, 1, 0, 0], <del> [1, 1, 1, 1, 1, 1, 1, 1, 1, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_mask"][:, :10], <add> tf.constant([[1, 1, 1, 1, 1, 1, 1, 1, 0, 0], <add> [1, 1, 1, 1, 1, 1, 1, 1, 1, 0]])) <ide> self.assertAllEqual(bert_inputs["input_type_ids"].shape.as_list(), [2, 256]) <del> self.assertAllEqual(bert_inputs["input_type_ids"][:, :10], <del> tf.constant([[0, 0, 0, 0, 0, 1, 1, 1, 0, 0], <del> [0, 0, 0, 0, 0, 0, 1, 1, 1, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_type_ids"][:, :10], <add> tf.constant([[0, 0, 0, 0, 0, 1, 1, 1, 0, 0], <add> [0, 0, 0, 0, 0, 0, 1, 1, 1, 0]])) <ide> <ide> # For BertTokenizer only: repeat relevant parts for do_lower_case=False, <ide> # default_seq_length=10, experimental_disable_assert=False, <ide> # tokenize_with_offsets=False, and without folding the word/subword dimension. <ide> def test_cased_length10(self): <del> preprocess = tf.saved_model.load(self._do_export( <del> ["d", "##ef", "abc", "ABC"], <del> do_lower_case=False, default_seq_length=10, <del> tokenize_with_offsets=False, <del> use_sp_model=False, <del> experimental_disable_assert=False)) <add> preprocess = tf.saved_model.load( <add> self._do_export(["d", "##ef", "abc", "ABC"], <add> do_lower_case=False, <add> default_seq_length=10, <add> tokenize_with_offsets=False, <add> use_sp_model=False, <add> experimental_disable_assert=False)) <ide> inputs = tf.constant(["abc def", "ABC DEF"]) <ide> token_ids = preprocess.tokenize(inputs) <del> self.assertAllEqual(token_ids, tf.ragged.constant([[[6], [4, 5]], <del> [[7], [1]]])) <add> self.assertAllEqual(token_ids, <add> tf.ragged.constant([[[6], [4, 5]], [[7], [1]]])) <ide> <ide> self.assertFalse(hasattr(preprocess, "tokenize_with_offsets")) <ide> <ide> bert_inputs = preprocess(inputs) <del> self.assertAllEqual(bert_inputs["input_word_ids"], <del> tf.constant([[2, 6, 4, 5, 3, 0, 0, 0, 0, 0], <del> [2, 7, 1, 3, 0, 0, 0, 0, 0, 0]])) <del> self.assertAllEqual(bert_inputs["input_mask"], <del> tf.constant([[1, 1, 1, 1, 1, 0, 0, 0, 0, 0], <del> [1, 1, 1, 1, 0, 0, 0, 0, 0, 0]])) <del> self.assertAllEqual(bert_inputs["input_type_ids"], <del> tf.constant([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], <del> [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_word_ids"], <add> tf.constant([[2, 6, 4, 5, 3, 0, 0, 0, 0, 0], <add> [2, 7, 1, 3, 0, 0, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_mask"], <add> tf.constant([[1, 1, 1, 1, 1, 0, 0, 0, 0, 0], <add> [1, 1, 1, 1, 0, 0, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_type_ids"], <add> tf.constant([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], <add> [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])) <ide> <ide> inputs_2 = tf.constant(["d ABC", "ABC abc"]) <ide> token_ids_2 = preprocess.tokenize(inputs_2) <ide> bert_inputs = preprocess.bert_pack_inputs([token_ids, token_ids_2]) <ide> # Test default seq_length=10. <del> self.assertAllEqual(bert_inputs["input_word_ids"], <del> tf.constant([[2, 6, 4, 5, 3, 4, 7, 3, 0, 0], <del> [2, 7, 1, 3, 7, 6, 3, 0, 0, 0]])) <del> self.assertAllEqual(bert_inputs["input_mask"], <del> tf.constant([[1, 1, 1, 1, 1, 1, 1, 1, 0, 0], <del> [1, 1, 1, 1, 1, 1, 1, 0, 0, 0]])) <del> self.assertAllEqual(bert_inputs["input_type_ids"], <del> tf.constant([[0, 0, 0, 0, 0, 1, 1, 1, 0, 0], <del> [0, 0, 0, 0, 1, 1, 1, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_word_ids"], <add> tf.constant([[2, 6, 4, 5, 3, 4, 7, 3, 0, 0], <add> [2, 7, 1, 3, 7, 6, 3, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_mask"], <add> tf.constant([[1, 1, 1, 1, 1, 1, 1, 1, 0, 0], <add> [1, 1, 1, 1, 1, 1, 1, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_type_ids"], <add> tf.constant([[0, 0, 0, 0, 0, 1, 1, 1, 0, 0], <add> [0, 0, 0, 0, 1, 1, 1, 0, 0, 0]])) <ide> <ide> # XLA requires fixed shapes for tensors found in graph mode. <ide> # Statically known shapes in Python are a particularly firm way to <ide> def test_cased_length10(self): <ide> # inference when applied to fully or partially known input shapes. <ide> @parameterized.named_parameters(("Bert", False), ("Sentencepiece", True)) <ide> def test_shapes(self, use_sp_model): <del> preprocess = tf.saved_model.load(self._do_export( <del> ["abc", "def"], do_lower_case=True, <del> tokenize_with_offsets=not use_sp_model, # TODO(b/181866850): drop this. <del> experimental_disable_assert=True, # TODO(b/175369555): drop this. <del> use_sp_model=use_sp_model)) <add> preprocess = tf.saved_model.load( <add> self._do_export( <add> ["abc", "def"], <add> do_lower_case=True, <add> # TODO(b/181866850): drop this. <add> tokenize_with_offsets=not use_sp_model, <add> # TODO(b/175369555): drop this. <add> experimental_disable_assert=True, <add> use_sp_model=use_sp_model)) <ide> <ide> def expected_bert_input_shapes(batch_size, seq_length): <del> return dict(input_word_ids=[batch_size, seq_length], <del> input_mask=[batch_size, seq_length], <del> input_type_ids=[batch_size, seq_length]) <add> return dict( <add> input_word_ids=[batch_size, seq_length], <add> input_mask=[batch_size, seq_length], <add> input_type_ids=[batch_size, seq_length]) <ide> <ide> for batch_size in [7, None]: <ide> if use_sp_model: <ide> token_out_shape = [batch_size, None] # No word/subword distinction. <ide> else: <ide> token_out_shape = [batch_size, None, None] <ide> self.assertEqual( <del> _result_shapes_in_tf_function( <del> preprocess.tokenize, <del> tf.TensorSpec([batch_size], tf.string)), <del> token_out_shape, <del> "with batch_size=%s" % batch_size) <add> _result_shapes_in_tf_function(preprocess.tokenize, <add> tf.TensorSpec([batch_size], tf.string)), <add> token_out_shape, "with batch_size=%s" % batch_size) <ide> # TODO(b/181866850): Enable tokenize_with_offsets when it works and test. <ide> if use_sp_model: <ide> self.assertFalse(hasattr(preprocess, "tokenize_with_offsets")) <ide> else: <ide> self.assertEqual( <ide> _result_shapes_in_tf_function( <ide> preprocess.tokenize_with_offsets, <del> tf.TensorSpec([batch_size], tf.string)), <del> [token_out_shape] * 3, <add> tf.TensorSpec([batch_size], tf.string)), [token_out_shape] * 3, <ide> "with batch_size=%s" % batch_size) <ide> self.assertEqual( <ide> _result_shapes_in_tf_function( <ide> def expected_bert_input_shapes(batch_size, seq_length): <ide> def test_reexport(self, use_sp_model): <ide> """Test that preprocess keeps working after another save/load cycle.""" <ide> path1 = self._do_export( <del> ["d", "ef", "abc", "xy"], do_lower_case=True, default_seq_length=10, <add> ["d", "ef", "abc", "xy"], <add> do_lower_case=True, <add> default_seq_length=10, <ide> tokenize_with_offsets=False, <ide> experimental_disable_assert=True, # TODO(b/175369555): drop this. <ide> use_sp_model=use_sp_model) <ide> def test_reexport(self, use_sp_model): <ide> <ide> inputs = tf.constant(["abc d ef", "ABC D EF d"]) <ide> bert_inputs = model2(inputs) <del> self.assertAllEqual(bert_inputs["input_word_ids"], <del> tf.constant([[2, 6, 4, 5, 3, 0, 0, 0, 0, 0], <del> [2, 6, 4, 5, 4, 3, 0, 0, 0, 0]])) <del> self.assertAllEqual(bert_inputs["input_mask"], <del> tf.constant([[1, 1, 1, 1, 1, 0, 0, 0, 0, 0], <del> [1, 1, 1, 1, 1, 1, 0, 0, 0, 0]])) <del> self.assertAllEqual(bert_inputs["input_type_ids"], <del> tf.constant([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], <del> [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_word_ids"], <add> tf.constant([[2, 6, 4, 5, 3, 0, 0, 0, 0, 0], <add> [2, 6, 4, 5, 4, 3, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_mask"], <add> tf.constant([[1, 1, 1, 1, 1, 0, 0, 0, 0, 0], <add> [1, 1, 1, 1, 1, 1, 0, 0, 0, 0]])) <add> self.assertAllEqual( <add> bert_inputs["input_type_ids"], <add> tf.constant([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], <add> [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])) <ide> <ide> @parameterized.named_parameters(("Bert", True), ("Albert", False)) <ide> def test_preprocessing_for_mlm(self, use_bert): <ide> """Combines both SavedModel types and TF.text helpers for MLM.""" <ide> # Create the preprocessing SavedModel with a [MASK] token. <del> non_special_tokens = ["hello", "world", <del> "nice", "movie", "great", "actors", <del> "quick", "fox", "lazy", "dog"] <del> preprocess = tf.saved_model.load(self._do_export( <del> non_special_tokens, do_lower_case=True, <del> tokenize_with_offsets=use_bert, # TODO(b/181866850): drop this. <del> experimental_disable_assert=True, # TODO(b/175369555): drop this. <del> add_mask_token=True, use_sp_model=not use_bert)) <add> non_special_tokens = [ <add> "hello", "world", "nice", "movie", "great", "actors", "quick", "fox", <add> "lazy", "dog" <add> ] <add> <add> preprocess = tf.saved_model.load( <add> self._do_export( <add> non_special_tokens, <add> do_lower_case=True, <add> tokenize_with_offsets=use_bert, # TODO(b/181866850): drop this. <add> experimental_disable_assert=True, # TODO(b/175369555): drop this. <add> add_mask_token=True, <add> use_sp_model=not use_bert)) <ide> vocab_size = len(non_special_tokens) + (5 if use_bert else 7) <ide> <ide> # Create the encoder SavedModel with an .mlm subobject. <ide> hidden_size = 16 <ide> num_hidden_layers = 2 <ide> bert_config, encoder_config = _get_bert_config_or_encoder_config( <del> use_bert, hidden_size, num_hidden_layers, vocab_size) <add> use_bert_config=use_bert, <add> hidden_size=hidden_size, <add> num_hidden_layers=num_hidden_layers, <add> vocab_size=vocab_size) <ide> _, pretrainer = export_tfhub_lib._create_model( <ide> bert_config=bert_config, encoder_config=encoder_config, with_mlm=True) <ide> model_checkpoint_dir = os.path.join(self.get_temp_dir(), "checkpoint") <ide> def test_preprocessing_for_mlm(self, use_bert): <ide> self.assertEqual(mask_id, 4) <ide> <ide> # A batch of 3 segment pairs. <del> raw_segments = [tf.constant(["hello", "nice movie", "quick fox"]), <del> tf.constant(["world", "great actors", "lazy dog"])] <add> raw_segments = [ <add> tf.constant(["hello", "nice movie", "quick fox"]), <add> tf.constant(["world", "great actors", "lazy dog"]) <add> ] <ide> batch_size = 3 <ide> <ide> # Misc hyperparameters. <ide> def test_preprocessing_for_mlm(self, use_bert): <ide> selection_rate=0.5, # Adjusted for the short test examples. <ide> unselectable_ids=[start_of_sequence_id, end_of_segment_id]), <ide> mask_values_chooser=text.MaskValuesChooser( <del> vocab_size=vocab_size, mask_token=mask_id, <add> vocab_size=vocab_size, <add> mask_token=mask_id, <ide> # Always put [MASK] to have a predictable result. <del> mask_token_rate=1.0, random_token_rate=0.0)) <add> mask_token_rate=1.0, <add> random_token_rate=0.0)) <ide> # Pad to fixed-length Transformer encoder inputs. <del> input_word_ids, _ = text.pad_model_inputs(masked_input_ids, <del> seq_length, <del> pad_value=padding_id) <del> input_type_ids, input_mask = text.pad_model_inputs(segment_ids, seq_length, <del> pad_value=0) <del> masked_lm_positions, _ = text.pad_model_inputs(masked_lm_positions, <del> max_selections_per_seq, <del> pad_value=0) <add> input_word_ids, _ = text.pad_model_inputs( <add> masked_input_ids, seq_length, pad_value=padding_id) <add> input_type_ids, input_mask = text.pad_model_inputs( <add> segment_ids, seq_length, pad_value=0) <add> masked_lm_positions, _ = text.pad_model_inputs( <add> masked_lm_positions, max_selections_per_seq, pad_value=0) <ide> masked_lm_positions = tf.cast(masked_lm_positions, tf.int32) <ide> num_predictions = int(tf.shape(masked_lm_positions)[1]) <ide> <ide> def test_preprocessing_for_mlm(self, use_bert): <ide> # [CLS] nice movie [SEP] great actors [SEP] <ide> [2, 7, 8, 3, 9, 10, 3, 0, 0, 0], <ide> # [CLS] brown fox [SEP] lazy dog [SEP] <del> [2, 11, 12, 3, 13, 14, 3, 0, 0, 0]]) <add> [2, 11, 12, 3, 13, 14, 3, 0, 0, 0] <add> ]) <ide> for i in range(batch_size): <ide> for j in range(num_predictions): <ide> k = int(masked_lm_positions[i, j]) <ide> def test_preprocessing_for_mlm(self, use_bert): <ide> @parameterized.named_parameters(("Bert", False), ("Sentencepiece", True)) <ide> def test_special_tokens_in_estimator(self, use_sp_model): <ide> """Tests getting special tokens without an Eager init context.""" <del> preprocess_export_path = self._do_export( <del> ["d", "ef", "abc", "xy"], do_lower_case=True, <del> use_sp_model=use_sp_model, tokenize_with_offsets=False) <add> preprocess_export_path = self._do_export(["d", "ef", "abc", "xy"], <add> do_lower_case=True, <add> use_sp_model=use_sp_model, <add> tokenize_with_offsets=False) <ide> <ide> def _get_special_tokens_dict(obj): <ide> """Returns special tokens of restored tokenizer as Python values.""" <ide> if tf.executing_eagerly(): <del> special_tokens_numpy = {k: v.numpy() <del> for k, v in obj.get_special_tokens_dict()} <add> special_tokens_numpy = { <add> k: v.numpy() for k, v in obj.get_special_tokens_dict() <add> } <ide> else: <ide> with tf.Graph().as_default(): <ide> # This code expects `get_special_tokens_dict()` to be a tf.function <ide> def _get_special_tokens_dict(obj): <ide> special_tokens_tensors = obj.get_special_tokens_dict() <ide> with tf.compat.v1.Session() as sess: <ide> special_tokens_numpy = sess.run(special_tokens_tensors) <del> return {k: v.item() # Numpy to Python. <del> for k, v in special_tokens_numpy.items()} <add> return { <add> k: v.item() # Numpy to Python. <add> for k, v in special_tokens_numpy.items() <add> } <ide> <ide> def input_fn(): <ide> self.assertFalse(tf.executing_eagerly()) <ide> def input_fn(): <ide> self.assertIsInstance(v, int, "Unexpected type for {}".format(k)) <ide> tokens = tokenize(sentences) <ide> packed_inputs = layers.BertPackInputs( <del> 4, special_tokens_dict=special_tokens_dict)(tokens) <add> 4, special_tokens_dict=special_tokens_dict)( <add> tokens) <ide> preprocessing = tf.keras.Model(sentences, packed_inputs) <ide> # Map the dataset. <ide> ds = tf.data.Dataset.from_tensors( <ide> def input_fn(): <ide> <ide> def model_fn(features, labels, mode): <ide> del labels # Unused. <del> return tf.estimator.EstimatorSpec(mode=mode, <del> predictions=features["input_word_ids"]) <add> return tf.estimator.EstimatorSpec( <add> mode=mode, predictions=features["input_word_ids"]) <ide> <ide> estimator = tf.estimator.Estimator(model_fn=model_fn) <ide> outputs = list(estimator.predict(input_fn)) <del> self.assertAllEqual(outputs, np.array([[2, 6, 3, 0], <del> [2, 4, 5, 3]])) <add> self.assertAllEqual(outputs, np.array([[2, 6, 3, 0], [2, 4, 5, 3]])) <ide> <ide> # TODO(b/175369555): Remove that code and its test. <ide> @parameterized.named_parameters(("Bert", False), ("Sentencepiece", True)) <ide> def test_check_no_assert(self, use_sp_model): <ide> """Tests the self-check during export without assertions.""" <del> preprocess_export_path = self._do_export( <del> ["d", "ef", "abc", "xy"], do_lower_case=True, <del> use_sp_model=use_sp_model, tokenize_with_offsets=False, <del> experimental_disable_assert=False) <add> preprocess_export_path = self._do_export(["d", "ef", "abc", "xy"], <add> do_lower_case=True, <add> use_sp_model=use_sp_model, <add> tokenize_with_offsets=False, <add> experimental_disable_assert=False) <ide> with self.assertRaisesRegex(AssertionError, <ide> r"failed to suppress \d+ Assert ops"): <ide> export_tfhub_lib._check_no_assert(preprocess_export_path) <ide> def _result_shapes_in_tf_function(fn, *args, **kwargs): <ide> <ide> Args: <ide> fn: A callable. <del> *args: TensorSpecs for Tensor-valued arguments and actual values <del> for Python-valued arguments to fn. <add> *args: TensorSpecs for Tensor-valued arguments and actual values for <add> Python-valued arguments to fn. <ide> **kwargs: Same for keyword arguments. <ide> <ide> Returns:
2
Java
Java
introduce failing test case in antpathmatchertests
c7cdbe126db6ab5e3422e764d4effa3a5ef2148b
<ide><path>spring-core/src/test/java/org/springframework/util/AntPathMatcherTests.java <ide> import java.util.Map; <ide> <ide> import org.junit.Before; <add>import org.junit.Ignore; <ide> import org.junit.Test; <ide> <ide> import static org.junit.Assert.*; <ide> * @author Juergen Hoeller <ide> * @author Arjen Poutsma <ide> * @author Rossen Stoyanchev <add> * @author Sam Brannen <ide> */ <ide> public class AntPathMatcherTests { <ide> <ide> public void combine() { <ide> assertEquals("/hotel/booking", pathMatcher.combine("/hotel/", "/booking")); // SPR-12975 <ide> } <ide> <add> @Ignore("Disabled until SPR-12998 is resolved") <add> @Test(expected = IllegalArgumentException.class) <add> public void combineWithTwoFileExtensionPatterns() { <add> pathMatcher.combine("/*.html", "/*.txt"); <add> } <add> <ide> @Test <ide> public void patternComparator() { <ide> Comparator<String> comparator = pathMatcher.getPatternComparator("/hotels/new");
1
Javascript
Javascript
add delay before starting timerstest
0698b2beca165dc77fb23194d7d61ac46987cc09
<ide><path>IntegrationTests/TimersTest.js <ide> var TimersTest = React.createClass({ <ide> }, <ide> <ide> componentDidMount() { <del> this.testSetTimeout0(); <add> this.setTimeout(this.testSetTimeout0, 1000); <ide> }, <ide> <ide> testSetTimeout0() {
1
Ruby
Ruby
use hash#fetch instead of has_key? check
81f92cbc2b4c1f99c08d04a8a360efe64a12faf8
<ide><path>actionpack/lib/action_controller/metal/params_wrapper.rb <ide> def _set_wrapper_defaults(options, model=nil) <ide> <ide> unless options[:include] || options[:exclude] <ide> model ||= _default_wrap_model <del> role = options.has_key?(:as) ? options[:as] : :default <add> role = options.fetch(:as, :default) <ide> if model.respond_to?(:accessible_attributes) && model.accessible_attributes(role).present? <ide> options[:include] = model.accessible_attributes(role).to_a <ide> elsif model.respond_to?(:attribute_names) && model.attribute_names.present?
1
Ruby
Ruby
fix failing test caused by `repo_info`
cb8af6d751d1c31bc379f96f5aeee9cc84cacf49
<ide><path>Library/Homebrew/cask/lib/hbc/cli/info.rb <ide> def self.info(cask) <ide> puts "#{cask.token}: #{cask.version}" <ide> puts Formatter.url(cask.homepage) if cask.homepage <ide> installation_info(cask) <del> puts "From: #{Formatter.url(repo_info(cask))}" if repo_info(cask) <add> puts "From: #{Formatter.url(repo_info(cask))}" <ide> name_info(cask) <ide> artifact_info(cask) <ide> Installer.print_caveats(cask) <ide> def self.name_info(cask) <ide> def self.repo_info(cask) <ide> user, repo, token = QualifiedToken.parse(Hbc.all_tokens.detect { |t| t.split("/").last == cask.token }) <ide> remote_tap = Tap.fetch(user, repo) <del> return remote_tap.remote.to_s if remote_tap.custom_remote? <add> <add> if remote_tap.custom_remote? && !remote_tap.remote.nil? <add> return remote_tap.remote.to_s <add> end <add> <ide> "#{remote_tap.default_remote}/blob/master/Casks/#{token}.rb" <ide> end <ide> <ide><path>Library/Homebrew/cask/test/cask/cli/info_test.rb <ide> local-caffeine: 1.2.3 <ide> http://example.com/local-caffeine <ide> Not installed <del> From: https://github.com/caskroom/homebrew-testcasks/blob/master/Casks/local-caffeine.rb <add> From: https://github.com/caskroom/homebrew-test/blob/master/Casks/local-caffeine.rb <ide> ==> Name <ide> None <ide> ==> Artifacts <ide> local-caffeine: 1.2.3 <ide> http://example.com/local-caffeine <ide> Not installed <del> From: https://github.com/caskroom/homebrew-testcasks/blob/master/Casks/local-caffeine.rb <add> From: https://github.com/caskroom/homebrew-test/blob/master/Casks/local-caffeine.rb <ide> ==> Name <ide> None <ide> ==> Artifacts <ide> Caffeine.app (app) <ide> local-transmission: 2.61 <ide> http://example.com/local-transmission <ide> Not installed <del> From: https://github.com/caskroom/homebrew-testcasks/blob/master/Casks/local-transmission.rb <add> From: https://github.com/caskroom/homebrew-test/blob/master/Casks/local-transmission.rb <ide> ==> Name <ide> None <ide> ==> Artifacts <ide> with-caveats: 1.2.3 <ide> http://example.com/local-caffeine <ide> Not installed <del> From: https://github.com/caskroom/homebrew-testcasks/blob/master/Casks/with-caveats.rb <add> From: https://github.com/caskroom/homebrew-test/blob/master/Casks/with-caveats.rb <ide> ==> Name <ide> None <ide> ==> Artifacts <ide> with-conditional-caveats: 1.2.3 <ide> http://example.com/local-caffeine <ide> Not installed <del> From: https://github.com/caskroom/homebrew-testcasks/blob/master/Casks/with-conditional-caveats.rb <add> From: https://github.com/caskroom/homebrew-test/blob/master/Casks/with-conditional-caveats.rb <ide> ==> Name <ide> None <ide> ==> Artifacts
2
Python
Python
change scipy -> numpy in who() docstring
d7cee5d2ff7cd16fe4c7ab7f8dae7218b783d7c9
<ide><path>numpy/lib/utils.py <ide> def may_share_memory(a, b): <ide> <ide> <ide> def who(vardict=None): <del> """Print the scipy arrays in the given dictionary (or globals() if None). <add> """Print the Numpy arrays in the given dictionary (or globals() if None). <ide> """ <ide> if vardict is None: <ide> frame = sys._getframe().f_back
1
Text
Text
fix changelog entry for
2717b08f76a20ad01301545148bc8714c7904632
<ide><path>activerecord/CHANGELOG.md <ide> # => #<Product ...> (if one Product with given price) <ide> # => ActiveRecord::SoleRecordExceeded (if more than one Product with given price) <ide> <del> user.api_keys.find_by_sole(key: key) <add> user.api_keys.find_sole_by(key: key) <ide> # as above <ide> ``` <ide>
1
PHP
PHP
add more tests
9bd9ebe2e3653cd40eb19160d005d2ae7740f352
<ide><path>src/Illuminate/Database/Eloquent/Factories/BelongsToManyRelationship.php <ide> public function __construct(Factory $factory, $pivot, $relationship) <ide> */ <ide> public function createFor(Model $model) <ide> { <del> $model->{$this->relationship}()->attach( <del> $this->factory->create([], $model), <del> is_callable($this->pivot) ? call_user_func($this->pivot, $model) : $this->pivot <del> ); <add> $this->factory->create([], $model)->each(function ($attachable) use ($model) { <add> $model->{$this->relationship}()->attach( <add> $attachable, <add> is_callable($this->pivot) ? call_user_func($this->pivot, $model) : $this->pivot <add> ); <add> }); <ide> } <ide> } <ide><path>tests/Database/DatabaseEloquentFactoryTest.php <ide> use Illuminate\Container\Container; <ide> use Illuminate\Database\Capsule\Manager as DB; <ide> use Illuminate\Database\Eloquent\Factories\Factory; <add>use Illuminate\Database\Eloquent\Factories\Sequence; <ide> use Illuminate\Database\Eloquent\Model as Eloquent; <ide> use PHPUnit\Framework\TestCase; <ide> <ide> public function createSchema() <ide> $table->string('title'); <ide> $table->timestamps(); <ide> }); <add> <add> $this->schema()->create('roles', function ($table) { <add> $table->increments('id'); <add> $table->string('name'); <add> $table->timestamps(); <add> }); <add> <add> $this->schema()->create('role_user', function ($table) { <add> $table->foreignId('role_id'); <add> $table->foreignId('user_id'); <add> $table->string('admin')->default('N'); <add> }); <ide> } <ide> <ide> /** <ide> public function test_belongs_to_relationship() <ide> $this->assertCount(3, FactoryTestPost::all()); <ide> } <ide> <add> public function test_belongs_to_many_relationship() <add> { <add> $users = FactoryTestUserFactory::times(3) <add> ->hasAttached( <add> FactoryTestRoleFactory::times(3)->afterCreating(function ($role, $user) { <add> $_SERVER['__test.role.creating-role'] = $role; <add> $_SERVER['__test.role.creating-user'] = $user; <add> }), <add> ['admin' => 'Y'], <add> 'roles' <add> ) <add> ->create(); <add> <add> $this->assertCount(9, FactoryTestRole::all()); <add> <add> $user = FactoryTestUser::latest()->first(); <add> <add> $this->assertCount(3, $user->roles); <add> $this->assertEquals('Y', $user->roles->first()->pivot->admin); <add> <add> $this->assertInstanceOf(Eloquent::class, $_SERVER['__test.role.creating-role']); <add> $this->assertInstanceOf(Eloquent::class, $_SERVER['__test.role.creating-user']); <add> <add> unset($_SERVER['__test.role.creating-role']); <add> unset($_SERVER['__test.role.creating-user']); <add> } <add> <add> public function test_sequences() <add> { <add> $users = FactoryTestUserFactory::times(2)->sequence( <add> ['name' => 'Taylor Otwell'], <add> ['name' => 'Abigail Otwell'], <add> )->create(); <add> <add> $this->assertEquals('Taylor Otwell', $users[0]->name); <add> $this->assertEquals('Abigail Otwell', $users[1]->name); <add> <add> $user = FactoryTestUserFactory::new() <add> ->hasAttached( <add> FactoryTestRoleFactory::times(4)->afterCreating(function ($role, $user) { <add> $_SERVER['__test.role.creating-role'] = $role; <add> $_SERVER['__test.role.creating-user'] = $user; <add> }), <add> new Sequence(['admin' => 'Y'], ['admin' => 'N']), <add> 'roles' <add> ) <add> ->create(); <add> <add> $this->assertCount(4, $user->roles); <add> <add> $this->assertCount(2, $user->roles->filter(function ($role) { <add> return $role->pivot->admin == 'Y'; <add> })); <add> <add> $this->assertCount(2, $user->roles->filter(function ($role) { <add> return $role->pivot->admin == 'N'; <add> })); <add> } <add> <ide> /** <ide> * Get a database connection instance. <ide> * <ide> public function posts() <ide> { <ide> return $this->hasMany(FactoryTestPost::class, 'user_id'); <ide> } <add> <add> public function roles() <add> { <add> return $this->belongsToMany(FactoryTestRole::class, 'role_user', 'user_id', 'role_id')->withPivot('admin'); <add> } <ide> } <ide> <ide> class FactoryTestPostFactory extends Factory <ide> public function user() <ide> return $this->belongsTo(FactoryTestUser::class, 'user_id'); <ide> } <ide> } <add> <add>class FactoryTestRoleFactory extends Factory <add>{ <add> protected $model = FactoryTestRole::class; <add> <add> public function definition() <add> { <add> return [ <add> 'name' => $this->faker->name, <add> ]; <add> } <add>} <add> <add>class FactoryTestRole extends Eloquent <add>{ <add> protected $table = 'roles'; <add> <add> public function users() <add> { <add> return $this->belongsToMany(FactoryTestUser::class, 'role_user', 'role_id', 'user_id')->withPivot('admin'); <add> } <add>}
2
Python
Python
fix trainer with remove_unused_columns=false
3ed5e97ba04ce9b24b4a7161ea74572598a4c480
<ide><path>src/transformers/trainer.py <ide> def remove_callback(self, callback): <ide> <ide> def _remove_unused_columns(self, dataset: "datasets.Dataset", description: Optional[str] = None): <ide> if not self.args.remove_unused_columns: <del> return <add> return dataset <ide> if self._signature_columns is None: <ide> # Inspect model forward signature to keep only the arguments it accepts. <ide> signature = inspect.signature(self.model.forward)
1
Javascript
Javascript
switch push style
cbafcbec083fcc06a46fdfd6619a9270efd37ccb
<ide><path>examples/js/loaders/FBXLoader.js <ide> <ide> try { <ide> <add> console.time( 'parse: ' ); <ide> var scene = self.parse( buffer, resourceDirectory ); <del> <add> console.timeEnd( 'parse: ' ); <ide> onLoad( scene ); <ide> <ide> } catch ( error ) { <ide> <ide> for ( var i = 2; i < faceLength; i ++ ) { <ide> <del> vertexB.push( <del> vertexPositions[ vertexPositionIndexes[ 0 ] ], <del> vertexPositions[ vertexPositionIndexes[ 1 ] ], <del> vertexPositions[ vertexPositionIndexes[ 2 ] ], <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ 0 ] ] ); <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ 1 ] ] ); <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ 2 ] ] ); <ide> <del> vertexPositions[ vertexPositionIndexes[ ( i - 1 ) * 3 ] ], <del> vertexPositions[ vertexPositionIndexes[ ( i - 1 ) * 3 + 1 ] ], <del> vertexPositions[ vertexPositionIndexes[ ( i - 1 ) * 3 + 2 ] ], <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ ( i - 1 ) * 3 ] ] ); <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ ( i - 1 ) * 3 + 1 ] ] ); <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ ( i - 1 ) * 3 + 2 ] ] ); <ide> <del> vertexPositions[ vertexPositionIndexes[ i * 3 ] ], <del> vertexPositions[ vertexPositionIndexes[ i * 3 + 1 ] ], <del> vertexPositions[ vertexPositionIndexes[ i * 3 + 2 ] ] <del> ); <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ i * 3 ] ] ); <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ i * 3 + 1 ] ] ); <add> vertexB.push( vertexPositions[ vertexPositionIndexes[ i * 3 + 2 ] ] ); <ide> <ide> } <ide> <ide> if ( deformer ) { <ide> <ide> for ( var i = 2; i < faceLength; i ++ ) { <ide> <del> weightsB.push( <del> faceWeights[ 0 ], <del> faceWeights[ 1 ], <del> faceWeights[ 2 ], <del> faceWeights[ 3 ], <del> <del> faceWeights[ ( i - 1 ) * 4 ], <del> faceWeights[ ( i - 1 ) * 4 + 1 ], <del> faceWeights[ ( i - 1 ) * 4 + 2 ], <del> faceWeights[ ( i - 1 ) * 4 + 3 ], <del> <del> faceWeights[ i * 4 ], <del> faceWeights[ i * 4 + 1 ], <del> faceWeights[ i * 4 + 2 ], <del> faceWeights[ i * 4 + 3 ] <del> ); <del> <del> weightsIndicesB.push( <del> faceWeightIndices[ 0 ], <del> faceWeightIndices[ 1 ], <del> faceWeightIndices[ 2 ], <del> faceWeightIndices[ 3 ], <del> <del> faceWeightIndices[ ( i - 1 ) * 4 ], <del> faceWeightIndices[ ( i - 1 ) * 4 + 1 ], <del> faceWeightIndices[ ( i - 1 ) * 4 + 2 ], <del> faceWeightIndices[ ( i - 1 ) * 4 + 3 ], <del> <del> faceWeightIndices[ i * 4 ], <del> faceWeightIndices[ i * 4 + 1 ], <del> faceWeightIndices[ i * 4 + 2 ], <del> faceWeightIndices[ i * 4 + 3 ] <del> ); <add> weightsB.push( faceWeights[ 0 ] ); <add> weightsB.push( faceWeights[ 1 ] ); <add> weightsB.push( faceWeights[ 2 ] ); <add> weightsB.push( faceWeights[ 3 ] ); <add> <add> weightsB.push( faceWeights[ ( i - 1 ) * 4 ] ); <add> weightsB.push( faceWeights[ ( i - 1 ) * 4 + 1 ] ); <add> weightsB.push( faceWeights[ ( i - 1 ) * 4 + 2 ] ); <add> weightsB.push( faceWeights[ ( i - 1 ) * 4 + 3 ] ); <add> <add> weightsB.push( faceWeights[ i * 4 ] ); <add> weightsB.push( faceWeights[ i * 4 + 1 ] ); <add> weightsB.push( faceWeights[ i * 4 + 2 ] ); <add> weightsB.push( faceWeights[ i * 4 + 3 ] ); <add> <add> weightsIndicesB.push( faceWeightIndices[ 0 ] ); <add> weightsIndicesB.push( faceWeightIndices[ 1 ] ); <add> weightsIndicesB.push( faceWeightIndices[ 2 ] ); <add> weightsIndicesB.push( faceWeightIndices[ 3 ] ); <add> <add> weightsIndicesB.push( faceWeightIndices[ ( i - 1 ) * 4 ] ); <add> weightsIndicesB.push( faceWeightIndices[ ( i - 1 ) * 4 + 1 ] ); <add> weightsIndicesB.push( faceWeightIndices[ ( i - 1 ) * 4 + 2 ] ); <add> weightsIndicesB.push( faceWeightIndices[ ( i - 1 ) * 4 + 3 ] ); <add> <add> weightsIndicesB.push( faceWeightIndices[ i * 4 ] ); <add> weightsIndicesB.push( faceWeightIndices[ i * 4 + 1 ] ); <add> weightsIndicesB.push( faceWeightIndices[ i * 4 + 2 ] ); <add> weightsIndicesB.push( faceWeightIndices[ i * 4 + 3 ] ); <ide> <ide> } <ide> <ide> <ide> for ( var i = 2; i < faceLength; i ++ ) { <ide> <del> normalB.push( <del> faceNormals[ 0 ], <del> faceNormals[ 1 ], <del> faceNormals[ 2 ], <add> normalB.push( faceNormals[ 0 ] ); <add> normalB.push( faceNormals[ 1 ] ); <add> normalB.push( faceNormals[ 2 ] ); <ide> <del> faceNormals[ ( i - 1 ) * 3 ], <del> faceNormals[ ( i - 1 ) * 3 + 1 ], <del> faceNormals[ ( i - 1 ) * 3 + 2 ], <add> normalB.push( faceNormals[ ( i - 1 ) * 3 ] ); <add> normalB.push( faceNormals[ ( i - 1 ) * 3 + 1 ] ); <add> normalB.push( faceNormals[ ( i - 1 ) * 3 + 2 ] ); <ide> <del> faceNormals[ i * 3 ], <del> faceNormals[ i * 3 + 1 ], <del> faceNormals[ i * 3 + 2 ] <del> ); <add> normalB.push( faceNormals[ i * 3 ] ); <add> normalB.push( faceNormals[ i * 3 + 1 ] ); <add> normalB.push( faceNormals[ i * 3 + 2 ] ); <ide> <ide> } <ide> <ide> <ide> for ( var i = 2; i < faceLength; i ++ ) { <ide> <del> uvsB[ j ].push( <del> <del> faceUVs[ j ][ 0 ], <del> faceUVs[ j ][ 1 ], <add> uvsB[ j ].push( faceUVs[ j ][ 0 ] ); <add> uvsB[ j ].push( faceUVs[ j ][ 1 ] ); <ide> <del> faceUVs[ j ][ ( i - 1 ) * 2 ], <del> faceUVs[ j ][ ( i - 1 ) * 2 + 1 ], <add> uvsB[ j ].push( faceUVs[ j ][ ( i - 1 ) * 2 ] ); <add> uvsB[ j ].push( faceUVs[ j ][ ( i - 1 ) * 2 + 1 ] ); <ide> <del> faceUVs[ j ][ i * 2 ], <del> faceUVs[ j ][ i * 2 + 1 ] <del> <del> ); <add> uvsB[ j ].push( faceUVs[ j ][ i * 2 ] ); <add> uvsB[ j ].push( faceUVs[ j ][ i * 2 + 1 ] ); <ide> <ide> } <ide> <ide> <ide> for ( var i = 2; i < faceLength; i ++ ) { <ide> <del> colorsB.push( <del> faceColors[ 0 ], <del> faceColors[ 1 ], <del> faceColors[ 2 ], <ide> <del> faceColors[ ( i - 1 ) * 3 ], <del> faceColors[ ( i - 1 ) * 3 + 1 ], <del> faceColors[ ( i - 1 ) * 3 + 2 ], <add> colorsB.push( faceColors[ 0 ] ); <add> colorsB.push( faceColors[ 1 ] ); <add> colorsB.push( faceColors[ 2 ] ); <add> <add> colorsB.push( faceColors[ ( i - 1 ) * 3 ] ); <add> colorsB.push( faceColors[ ( i - 1 ) * 3 + 1 ] ); <add> colorsB.push( faceColors[ ( i - 1 ) * 3 + 2 ] ); <ide> <del> faceColors[ i * 3 ], <del> faceColors[ i * 3 + 1 ], <del> faceColors[ i * 3 + 2 ] <del> ); <add> colorsB.push( faceColors[ i * 3 ] ); <add> colorsB.push( faceColors[ i * 3 + 1 ] ); <add> colorsB.push( faceColors[ i * 3 + 2 ] ); <ide> <ide> } <ide> <ide> <ide> for ( var i = 2; i < faceLength; i ++ ) { <ide> <del> materialsB.push( <del> materialIndex, <del> materialIndex, <del> materialIndex <del> ); <add> materialsB.push( materialIndex ); <add> materialsB.push( materialIndex ); <add> materialsB.push( materialIndex ); <ide> <ide> } <ide> <ide> <ide> geo.addAttribute( 'skinWeight', new THREE.Float32BufferAttribute( weightsB, 4 ) ); <ide> <add> // used later to bind the skeleton to the model <ide> geo.FBX_Deformer = deformer; <ide> <ide> }
1
Javascript
Javascript
fix negative values in process.hrtime()
1e5a02628c50f9a1fc6b214aa0926769c03b7dd1
<ide><path>src/node.js <ide> <ide> if (typeof ar !== 'undefined') { <ide> if (Array.isArray(ar)) { <del> return [ <del> (hrValues[0] * 0x100000000 + hrValues[1]) - ar[0], <del> hrValues[2] - ar[1] <del> ]; <add> const sec = (hrValues[0] * 0x100000000 + hrValues[1]) - ar[0]; <add> const nsec = hrValues[2] - ar[1]; <add> return [nsec < 0 ? sec - 1 : sec, nsec < 0 ? nsec + 1e9 : nsec]; <ide> } <ide> <ide> throw new TypeError('process.hrtime() only accepts an Array tuple'); <ide><path>test/parallel/test-process-hrtime.js <ide> function validateTuple(tuple) { <ide> assert(isFinite(v)); <ide> }); <ide> } <add> <add>const diff = process.hrtime([0, 1e9 - 1]); <add>assert(diff[1] >= 0); // https://github.com/nodejs/node/issues/4751
2
Ruby
Ruby
fix path to dev-cmd/irb and add test
db615a419afb99f186fb6df7c859f7538a7524ba
<ide><path>Library/Homebrew/dev-cmd/ruby.rb <ide> module Homebrew <ide> module_function <ide> <ide> def ruby <del> exec ENV["HOMEBREW_RUBY_PATH"], "-I#{HOMEBREW_LIBRARY_PATH}", "-rglobal", "-rcmd/irb", *ARGV <add> exec ENV["HOMEBREW_RUBY_PATH"], "-I#{HOMEBREW_LIBRARY_PATH}", "-rglobal", "-rdev-cmd/irb", *ARGV <ide> end <ide> end <ide><path>Library/Homebrew/test/dev-cmd/ruby_spec.rb <add>describe "brew ruby", :integration_test do <add> it "executes ruby code with Homebrew's libraries loaded" do <add> expect { brew "ruby", "-e", "exit 0" } <add> .to be_a_success <add> .and not_to_output.to_stdout <add> .and not_to_output.to_stderr <add> <add> expect { brew "ruby", "-e", "exit 1" } <add> .to be_a_failure <add> .and not_to_output.to_stdout <add> .and not_to_output.to_stderr <add> end <add>end
2
Javascript
Javascript
add inline dependency annotation
6d23591c31f2b41097ceaa380af09998e4a62f09
<ide><path>src/ngMock/angular-mocks.js <ide> angular.module('ngMock', ['ng']).provider({ <ide> $interval: angular.mock.$IntervalProvider, <ide> $httpBackend: angular.mock.$HttpBackendProvider, <ide> $rootElement: angular.mock.$RootElementProvider <del>}).config(function($provide) { <add>}).config(['$provide', function($provide) { <ide> $provide.decorator('$timeout', angular.mock.$TimeoutDecorator); <del>}); <add>}]); <ide> <ide> /** <ide> * @ngdoc overview <ide> angular.module('ngMock', ['ng']).provider({ <ide> * Currently there is only one mock present in this module - <ide> * the {@link ngMockE2E.$httpBackend e2e $httpBackend} mock. <ide> */ <del>angular.module('ngMockE2E', ['ng']).config(function($provide) { <add>angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> $provide.decorator('$httpBackend', angular.mock.e2e.$httpBackendDecorator); <del>}); <add>}]); <ide> <ide> /** <ide> * @ngdoc object
1
Javascript
Javascript
fix ios9 typedarray.subarray(from, undefined) bug
49eeb97c60a783377be842e2e2a1132a124f42e6
<ide><path>src/animation/AnimationUtils.js <ide> var AnimationUtils = { <ide> <ide> if ( AnimationUtils.isTypedArray( array ) ) { <ide> <del> return new array.constructor( array.subarray( from, to ) ); <add> // in ios9 array.subarray(from, undefined) will return empty array <add> // but array.subarray(from) or array.subarray(from, len) is correct <add> return new array.constructor( array.subarray( from, to || array.length ) ); <ide> <ide> } <ide>
1
Text
Text
add advertise address, clarify join token
1e7219d40fcd3b9e3ef6d224e2926d5562fb7b74
<ide><path>docs/reference/commandline/swarm_join_token.md <ide> Options: <ide> --rotate Rotate join token <ide> ``` <ide> <del>Join tokens are secrets that determine whether or not a node will join the swarm as a manager node <del>or a worker node. You pass the token using the `--token flag` when you run <del>[swarm join](swarm_join.md). You can access the current tokens or rotate the tokens using <del>`swarm join-token`. <add>Join tokens are secrets that allow a node to join the swarm. There are two <add>different join tokens available, one for the worker role and one for the manager <add>role. You pass the token using the `--token` flag when you run <add>[swarm join](swarm_join.md). Nodes use the join token only when they join the <add>swarm. <ide> <del>Run with only a single `worker` or `manager` argument, it will print a command for joining a new <del>node to the swarm, including the necessary token: <add>You can view or rotate the join tokens using `swarm join-token`. <add> <add>As a convenience, you can pass `worker` or `manager` as an argument to <add>`join-token` to print the full `docker swarm join` command to join a new node to <add>the swarm: <ide> <ide> ```bash <ide> $ docker swarm join-token worker <ide> SWMTKN-1-3pu6hszjas19xyp7ghgosyx9k8atbfcr8p2is99znpy26u2lkl-b30ljddcqhef9b9v4rs7 <ide> <ide> ### `--rotate` <ide> <del>Update the join token for a specified role with a new token and print the token. <add>Because tokens allow new nodes to join the swarm, you should keep them secret. <add>Be particularly careful with manager tokens since they allow new manager nodes <add>to join the swarm. A rogue manager has the potential to disrupt the operation of <add>your swarm. <add> <add>Rotate your swarm's join token if a token gets checked-in to version control, <add>stolen, or a node is compromised. You may also want to periodically rotate the <add>token to ensure any unknown token leaks do not allow a rogue node to join <add>the swarm. <add> <add>To rotate the join token and print the newly generated token, run <add>`docker swarm join-token --rotate` and pass the role: `manager` or `worker`. <add> <add>Rotating a join-token means that no new nodes will be able to join the swarm <add>using the old token. Rotation does not affect existing nodes in the swarm <add>because the join token is only used for authorizing new nodes joining the swarm. <ide> <ide> ### `--quiet` <ide> <ide><path>docs/swarm/join-nodes.md <ide> also run tasks. <ide> Before you add nodes to a swarm you must install Docker Engine 1.12 or later on <ide> the host machine. <ide> <add>The Docker Engine joins the swarm depending on the **join-token** you provide to <add>the `docker swarm join` command. The node only uses the token at join time. If <add>you subsequently rotate the token, it doesn't affect existing swarm nodes. Refer <add>to [Run Docker Engine in swarm mode](swarm-mode.md#view-the-join-command-or-update-a-swarm-join-token). <add> <ide> ## Join as a worker node <ide> <ide> To retrieve the join command including the join token for worker nodes, run the <ide> $ docker swarm join \ <ide> This node joined a swarm as a manager. <ide> ``` <ide> <del><!--TODO WIP <del>Manager nodes use the listen address for cluster management communications. The <del>other nodes on the swarm must be able to access the manager node on the <del>IP address and port you specify for the listen address. <del> <del>Especially when there are multiple active network interfaces, you should <del>you explicitly define the listen address when you add a manager node to the a <del>swarm: <del> <del> ```bash <del> docker swarm join \ <del> --token <MANAGER-TOKEN> \ <del> --listen-addr <NODE-IP>:<PORT> \ <del> <MANAGER-IP>:<PORT> <del> ``` <del>this will change for https://github.com/docker/docker/pull/24237 ->> <del>Replace <NODE-IP> with the IP address of the node that is joining the swarm. <del>Replace <MANAGER-IP> with the address of the swarm manager. <del> <del>Only manager nodes use the listen address. If you specify `--listen-addr` for a <del>worker node, the node only uses the listen address if it is promoted to a <del>manager. <del>--> <del> <ide> ## Learn More <ide> <ide> * `swarm join`[command line reference](../reference/commandline/swarm_join.md) <ide><path>docs/swarm/swarm-mode.md <ide> To add a manager to this swarm, run the following command: <ide> 192.168.99.100:2377 <ide> ``` <ide> <add>### Configure the advertise address <add> <add>Manager nodes use an advertise address to allow other nodes in the swarm access <add>to the Swarmkit API and overlay networking. The other nodes on the swarm must be <add>able to access the manager node on its advertise address IP address. <add> <add>If you don't specify an advertise address, Docker checks if the system has a <add>single IP address. If so, Docker uses the IP address with with the listening <add>port `2377` by default. If the system has multiple IP addresses, you must <add>specify the correct `--advertise-addr` to enable inter-manager communication <add>and overlay networking: <add> <add>```bash <add>$ docker swarm init --advertise-addr <MANAGER-IP> <add>``` <add> <add>You must also specify the `--advertise-addr` if the address where other nodes <add>reach the first manager node is not the same address the manager sees as its <add>own. For instance, in a cloud setup that spans different regions, hosts have <add>both internal addresses for access within the region and external addresses that <add>you use for access from outside that region. In this case, specify the external <add>address with `--advertise-addr` so that the node can propogate that information <add>to other nodes that subsequently connect to it. <add> <add>Refer to the `docker swarm init` [CLI reference](../reference/commandline/swarm_init.md) <add>for more detail on the advertise address. <add> <ide> ### View the join command or update a swarm join token <ide> <del>The manager node requires a secret token for a new node to join the swarm. The <del>token for worker nodes is different from the token for manager nodes. <add>Nodes require a secret token to join the swarm. The token for worker nodes is <add>different from the token for manager nodes. Nodes only use the join-token at the <add>moment they join the swarm. Rotating the join token after a node has already <add>joined a swarm does not affect the node's swarm membership. Token rotation <add>ensures an old token cannot be used by any new nodes attempting to join the <add>swarm. <ide> <ide> To retrieve the join command including the join token for worker nodes, run: <ide> <ide> $ docker swarm join-token --quiet worker <ide> SWMTKN-1-49nj1cmql0jkz5s954yi3oex3nedyz0fb0xx14ie39trti4wxv-8vxv8rssmk743ojnwacrr2e7c <ide> ``` <ide> <del>Pass the `--rotate` for `swarm join-token` to the token for a worker or manager <add>Be careful with the join tokens because they are the secrets necessary to join <add>the swarm. In particular, checking a secret into version control is a bad <add>practice because it would allow anyone with access to the the application source <add>code to add new nodes to the swarm. Manager tokens are especially sensitive <add>because they allow a new manager node to join and gain control over the whole <add>swarm. <add> <add>We recommend that you rotate the join tokens in the following circumstances: <add> <add>* If a token was checked-in by accident into a version control system, group <add>chat or accidentally printed to your logs. <add>* If you suspect a node has been compromised. <add>* If you wish to guarantee that no new nodes can join the swarm. <add> <add>Additionally, it is a best practice to implement a regular rotation schedule for <add>any secret including swarm join tokens. We recommend that you rotate your tokens <add>at least every 6 months. <add> <add>Run `swarm join-token --rotate` to invalidate the old token and generate a new <add>token. Specify whether you want to rotate the token for `worker` or `manager` <ide> nodes: <ide> <del>``` <add>```bash <ide> $docker swarm join-token --rotate worker <ide> <ide> To add a worker to this swarm, run the following command:
3
Javascript
Javascript
use stable identities for snapshot iterables
751e123e959df300750d817ae3a5009a422eb07f
<ide><path>lib/FileSystemInfo.js <ide> class SnapshotIterable { <ide> class Snapshot { <ide> constructor() { <ide> this._flags = 0; <add> /** @type {Iterable<string> | undefined} */ <add> this._cachedFileIterable = undefined; <add> /** @type {Iterable<string> | undefined} */ <add> this._cachedContextIterable = undefined; <add> /** @type {Iterable<string> | undefined} */ <add> this._cachedMissingIterable = undefined; <ide> /** @type {number | undefined} */ <ide> this.startTime = undefined; <ide> /** @type {Map<string, FileSystemInfoEntry | null> | undefined} */ <ide> class Snapshot { <ide> * @returns {Iterable<string>} iterable <ide> */ <ide> getFileIterable() { <del> return this._createIterable(s => [ <del> s.fileTimestamps, <del> s.fileHashes, <del> s.fileTshs, <del> s.managedFiles <del> ]); <add> if (this._cachedFileIterable === undefined) { <add> this._cachedFileIterable = this._createIterable(s => [ <add> s.fileTimestamps, <add> s.fileHashes, <add> s.fileTshs, <add> s.managedFiles <add> ]); <add> } <add> return this._cachedFileIterable; <ide> } <ide> <ide> /** <ide> * @returns {Iterable<string>} iterable <ide> */ <ide> getContextIterable() { <del> return this._createIterable(s => [ <del> s.contextTimestamps, <del> s.contextHashes, <del> s.contextTshs, <del> s.managedContexts <del> ]); <add> if (this._cachedContextIterable === undefined) { <add> this._cachedContextIterable = this._createIterable(s => [ <add> s.contextTimestamps, <add> s.contextHashes, <add> s.contextTshs, <add> s.managedContexts <add> ]); <add> } <add> return this._cachedContextIterable; <ide> } <ide> <ide> /** <ide> * @returns {Iterable<string>} iterable <ide> */ <ide> getMissingIterable() { <del> return this._createIterable(s => [s.missingExistence, s.managedMissing]); <add> if (this._cachedMissingIterable === undefined) { <add> this._cachedMissingIterable = this._createIterable(s => [ <add> s.missingExistence, <add> s.managedMissing <add> ]); <add> } <add> return this._cachedMissingIterable; <ide> } <ide> } <ide>
1
Javascript
Javascript
move reactnativetesttools-test to github
168ec03b0f99c0f8044b696815e477eec7493763
<ide><path>Libraries/Utilities/__tests__/ReactNativeTestTools-test.js <add>/** <add> * Copyright (c) Meta Platforms, Inc. and affiliates. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow <add> * @format <add> * @emails oncall+react_native <add> */ <add> <add>import * as React from 'react'; <add>import {byTestID, byTextMatching, enter, tap} from '../ReactNativeTestTools'; <add> <add>import {Text, TextInput, TouchableWithoutFeedback, View} from 'react-native'; <add> <add>const ReactTestRenderer = require('react-test-renderer'); <add> <add>describe('ReactNativeTestTools', () => { <add> const ExampleNull = () => null; <add> <add> it('matches byTestID()', () => { <add> const renderSimple = ReactTestRenderer.create(<ExampleNull testID="foo" />); <add> const foo = renderSimple.root.find(byTestID('foo')); <add> expect(foo).toEqual(renderSimple.root); <add> <add> const renderNested = ReactTestRenderer.create( <add> <View testID="bar"> <add> <View testID="baz" /> <add> <ExampleNull testID="bing"> <add> <View testID="impossible" /> <add> </ExampleNull> <add> </View>, <add> ); <add> const bar = renderNested.root.find(byTestID('bar')); <add> const baz = renderNested.root.find(byTestID('baz')); <add> const bing = renderNested.root.find(byTestID('bing')); <add> expect(bar).toEqual(renderNested.root); <add> expect(baz.type).toEqual(View); <add> expect(bing.type).toEqual(ExampleNull); <add> expect(renderNested.root.findAll(byTestID('impossible'))).toHaveLength(0); <add> }); <add> <add> it('matches byTextMatching()', () => { <add> const hasFooText = byTextMatching(/foo/); <add> const hasBarText = byTextMatching(/bar/); <add> const renderSimple = ReactTestRenderer.create(<Text>foobarbaz</Text>); <add> const foo = renderSimple.root.find(hasFooText); <add> expect(foo).toEqual(renderSimple.root); <add> expect(foo.type).toEqual(Text); <add> expect(foo.props.children).toEqual('foobarbaz'); <add> <add> const renderNested = ReactTestRenderer.create( <add> <Text> <add> foozy <add> <Text>woobar</Text> <add> fobarof <add> <Text>barwoo</Text> <add> woofoo <add> </Text>, <add> ); <add> <add> const bar = renderNested.root.find(hasBarText); <add> const barAll = renderNested.root.findAll(hasBarText); <add> const barAllShallow = renderNested.root.findAll(hasBarText, {deep: false}); <add> expect(bar.props.children.toString()).toEqual( <add> 'foozy,[object Object],fobarof,[object Object],woofoo', <add> ); <add> expect(barAll).toHaveLength(6); <add> expect(barAllShallow).toHaveLength(1); <add> }); <add> <add> it('interacts via tap()', () => { <add> const touchFn = jest.fn(); <add> const renderTouch = ReactTestRenderer.create( <add> <TouchableWithoutFeedback onPress={touchFn}> <add> <ExampleNull /> <add> </TouchableWithoutFeedback>, <add> ); <add> tap(renderTouch.root); <add> expect(touchFn).toBeCalled(); <add> <add> // example of tapping <Text /> <add> const textFn = jest.fn(); <add> const renderText = ReactTestRenderer.create(<Text onPress={textFn} />); <add> tap(renderText.root); <add> expect(textFn).toBeCalled(); <add> }); <add> <add> it('interacts via enter()', () => { <add> const changeFn = jest.fn(); <add> const changeTextFn = jest.fn(); <add> const renderInput = ReactTestRenderer.create( <add> <View> <add> <TextInput onChange={changeFn} onChangeText={changeTextFn} /> <add> </View>, <add> ); <add> const text = 'test message text'; <add> enter(renderInput.root, text); <add> expect(changeFn).toBeCalled(); <add> expect(changeTextFn).toBeCalledWith(text); <add> }); <add>});
1
Ruby
Ruby
remove deprecated --use-{gcc,llvm,clang}
f6fda5651bb08a60cd943c14aa08fadaebdd9a3f
<ide><path>Library/Homebrew/cmd/install.rb <ide> module Homebrew extend self <ide> def install <ide> raise FormulaUnspecifiedError if ARGV.named.empty? <ide> <del> { <del> 'gcc' => 'gcc-4.2', <del> 'llvm' => 'llvm-gcc', <del> 'clang' => 'clang' <del> }.each_pair do |old, new| <del> opt = "--use-#{old}" <del> if ARGV.include? opt then opoo <<-EOS.undent <del> #{opt.inspect} is deprecated and will be removed in a future version. <del> Please use "--cc=#{new}" instead. <del> EOS <del> end <del> end <del> <ide> if ARGV.include? '--head' <ide> raise "Specify `--HEAD` in uppercase to build from trunk." <ide> end <ide><path>Library/Homebrew/extend/ENV/shared.rb <ide> def compiler <ide> raise "Invalid value for --cc: #{other}" <ide> end <ide> end <del> elsif ARGV.include? '--use-gcc' <del> if MacOS.locate("gcc-4.2") || HOMEBREW_PREFIX.join("opt/apple-gcc42/bin/gcc-4.2").exist? <del> :gcc <del> else <del> raise "gcc-4.2 not found!" <del> end <del> elsif ARGV.include? '--use-llvm' <del> :llvm <del> elsif ARGV.include? '--use-clang' <del> :clang <ide> elsif homebrew_cc <ide> cc = COMPILER_ALIASES.fetch(homebrew_cc, homebrew_cc) <ide> COMPILER_SYMBOL_MAP.fetch(cc) { MacOS.default_compiler }
2
Javascript
Javascript
simplify accessor code a bit
495002207212709f47af6612a5ec3467ddba45ac
<ide><path>packages/ember-metal/lib/accessors.js <ide> Ember.getPath = function(root, path) { <ide> Ember.setPath = function(root, path, value, tolerant) { <ide> var keyName; <ide> <del> if (arguments.length===2 && 'string' === typeof root) { <add> if (IS_GLOBAL.test(root)) { <ide> value = path; <ide> path = root; <ide> root = null; <ide> } <ide> <del> <ide> if (path.indexOf('.') > 0) { <del> keyName = path.slice(path.lastIndexOf('.')+1); <add> // get the last part of the path <add> keyName = path.slice(path.lastIndexOf('.') + 1); <add> <add> // get the first part of the part <ide> path = path.slice(0, path.length-(keyName.length+1)); <add> <add> // unless the path is this, look up the first part to <add> // get the root <ide> if (path !== 'this') { <ide> root = Ember.getPath(root, path); <ide> } <del> <ide> } else { <del> if (IS_GLOBAL.test(path)) throw new Error('Invalid Path'); <add> Ember.assert("A global path passed to setPath must have at least one period", !IS_GLOBAL.test(path) || path.indexOf(".") > -1); <ide> keyName = path; <ide> } <ide> <del> if (!keyName || keyName.length===0 || keyName==='*') { <del> throw new Error('Invalid Path'); <add> if (!keyName || keyName.length === 0) { <add> throw new Error('You passed an empty path'); <ide> } <ide> <ide> if (!root) { <ide> Ember.setPath = function(root, path, value, tolerant) { <ide> an object has been destroyed. <ide> */ <ide> Ember.trySetPath = function(root, path, value) { <del> if (arguments.length===2 && 'string' === typeof root) { <del> value = path; <del> path = root; <del> root = null; <del> } <del> <ide> return Ember.setPath(root, path, value, true); <ide> }; <ide> <ide><path>packages/ember-metal/tests/accessors/setPath_test.js <ide> test('[obj, foo] -> obj.foo', function() { <ide> equal(Ember.getPath(obj, 'foo'), "BAM"); <ide> }); <ide> <del>test('[obj, *] -> EXCEPTION [cannot set *]', function() { <del> raises(function() { <del> Ember.setPath(obj, '*', "BAM"); <del> }, Error); <del>}); <del> <ide> test('[obj, foo.bar] -> obj.foo.bar', function() { <ide> Ember.setPath(obj, 'foo.bar', "BAM"); <ide> equal(Ember.getPath(obj, 'foo.bar'), "BAM");
2
PHP
PHP
show members causing failure in array validation
732686785d16968964e0216379eca4b4d781b0a1
<ide><path>src/Illuminate/Validation/Validator.php <ide> public function invalid() <ide> $this->passes(); <ide> } <ide> <del> return array_intersect_key( <add> $invalid = array_intersect_key( <ide> $this->data, $this->attributesThatHaveMessages() <ide> ); <add> <add> $result = []; <add> $failed = Arr::only(Arr::dot($invalid), array_keys($this->failed())); <add> foreach ($failed as $key => $failure) { <add> Arr::set($result, $key, $failure); <add> } <add> <add> return $result; <ide> } <ide> <ide> /** <ide><path>tests/Validation/ValidationValidatorTest.php <ide> public function testValidMethod() <ide> ]); <ide> } <ide> <add> public function testNestedInvalidMethod() <add> { <add> $trans = $this->getIlluminateArrayTranslator(); <add> $v = new Validator($trans, [ <add> 'testvalid' => 'filled', <add> 'testinvalid' => '', <add> 'records' => [ <add> 'ABC123', <add> 'ABC122', <add> 'ABB132', <add> 'ADCD23', <add> ], <add> ], [ <add> 'testvalid' => 'filled', <add> 'testinvalid' => 'filled', <add> 'records.*' => [ <add> 'required', <add> 'regex:/[A-F]{3}[0-9]{3}/', <add> ], <add> ]); <add> $this->assertEquals($v->invalid(), [ <add> 'testinvalid' => '', <add> 'records' => [ <add> 3 => 'ADCD23', <add> ], <add> ]); <add> } <add> <ide> public function testMultipleFileUploads() <ide> { <ide> $trans = $this->getIlluminateArrayTranslator();
2
Ruby
Ruby
convert `argv` test to spec
46a1e2f22d866275da68d11b871883f34669705f
<ide><path>Library/Homebrew/test/ARGV_spec.rb <add>require "extend/ARGV" <add> <add>describe HomebrewArgvExtension do <add> subject { argv.extend(HomebrewArgvExtension) } <add> let(:argv) { ["mxcl"] } <add> <add> describe "#formulae" do <add> it "raises an error when a Formula is unavailable" do <add> expect { subject.formulae }.to raise_error FormulaUnavailableError <add> end <add> <add> context "when there are no Formulae" do <add> let(:argv) { [] } <add> <add> it "returns an empty array" do <add> expect(subject.formulae).to be_empty <add> end <add> end <add> end <add> <add> describe "#casks" do <add> it "returns an empty array if there is no match" do <add> expect(subject.casks).to eq [] <add> end <add> end <add> <add> describe "#kegs" do <add> context "when there are matching Kegs" do <add> before(:each) do <add> keg = HOMEBREW_CELLAR + "mxcl/10.0" <add> keg.mkpath <add> end <add> <add> it "returns an array of Kegs" do <add> expect(subject.kegs.length).to eq 1 <add> end <add> end <add> <add> context "when there are no matching Kegs" do <add> let(:argv) { [] } <add> <add> it "returns an empty array" do <add> expect(subject.kegs).to be_empty <add> end <add> end <add> end <add> <add> describe "#named" do <add> let(:argv) { ["foo", "--debug", "-v"] } <add> <add> it "returns an array of non-option arguments" do <add> expect(subject.named).to eq ["foo"] <add> end <add> <add> context "when there are no named arguments" do <add> let(:argv) { [] } <add> <add> it "returns an empty array" do <add> expect(subject.named).to be_empty <add> end <add> end <add> end <add> <add> describe "#options_only" do <add> let(:argv) { ["--foo", "-vds", "a", "b", "cdefg"] } <add> <add> it "returns an array of option arguments" do <add> expect(subject.options_only).to eq ["--foo", "-vds"] <add> end <add> end <add> <add> describe "#flags_only" do <add> let(:argv) { ["--foo", "-vds", "a", "b", "cdefg"] } <add> <add> it "returns an array of flags" do <add> expect(subject.flags_only).to eq ["--foo"] <add> end <add> end <add> <add> describe "#empty?" do <add> let(:argv) { [] } <add> <add> it "returns true if it is empty" do <add> expect(subject).to be_empty <add> end <add> end <add> <add> describe "#switch?" do <add> let(:argv) { ["-ns", "-i", "--bar", "-a-bad-arg"] } <add> <add> it "returns true if the given string is a switch" do <add> %w[n s i].each do |s| <add> expect(subject.switch?(s)).to be true <add> end <add> end <add> <add> it "returns false if the given string is not a switch" do <add> %w[b ns bar --bar -n a bad arg].each do |s| <add> expect(subject.switch?(s)).to be false <add> end <add> end <add> end <add> <add> describe "#flag?" do <add> let(:argv) { ["--foo", "-bq", "--bar"] } <add> <add> it "returns true if the given string is a flag" do <add> expect(subject.flag?("--foo")).to eq true <add> expect(subject.flag?("--bar")).to eq true <add> end <add> <add> it "returns true if there is a switch with the same initial character" do <add> expect(subject.flag?("--baz")).to eq true <add> expect(subject.flag?("--qux")).to eq true <add> end <add> <add> it "returns false if there is no matching flag" do <add> expect(subject.flag?("--frotz")).to eq false <add> expect(subject.flag?("--debug")).to eq false <add> end <add> end <add> <add> describe "#value" do <add> let(:argv) { ["--foo=", "--bar=ab"] } <add> <add> it "returns the value for a given string" do <add> expect(subject.value("foo")).to eq "" <add> expect(subject.value("bar")).to eq "ab" <add> end <add> <add> it "returns nil if there is no matching argument" do <add> expect(subject.value("baz")).to be nil <add> end <add> end <add> <add> describe "#values" do <add> let(:argv) { ["--foo=", "--bar=a", "--baz=b,c"] } <add> <add> it "returns the value for a given argument" do <add> expect(subject.values("foo")).to eq [] <add> expect(subject.values("bar")).to eq ["a"] <add> expect(subject.values("baz")).to eq ["b", "c"] <add> end <add> <add> it "returns nil if there is no matching argument" do <add> expect(subject.values("qux")).to be nil <add> end <add> end <add>end <ide><path>Library/Homebrew/test/ARGV_test.rb <del>require "testing_env" <del>require "extend/ARGV" <del> <del>class ArgvExtensionTests < Homebrew::TestCase <del> def setup <del> super <del> @argv = [].extend(HomebrewArgvExtension) <del> end <del> <del> def test_argv_formulae <del> @argv.unshift "mxcl" <del> assert_raises(FormulaUnavailableError) { @argv.formulae } <del> end <del> <del> def test_argv_casks <del> @argv.unshift "mxcl" <del> assert_equal [], @argv.casks <del> end <del> <del> def test_argv_kegs <del> keg = HOMEBREW_CELLAR + "mxcl/10.0" <del> keg.mkpath <del> @argv << "mxcl" <del> assert_equal 1, @argv.kegs.length <del> end <del> <del> def test_argv_named <del> @argv << "foo" << "--debug" << "-v" <del> assert_equal %w[foo], @argv.named <del> end <del> <del> def test_options_only <del> @argv << "--foo" << "-vds" << "a" << "b" << "cdefg" <del> assert_equal %w[--foo -vds], @argv.options_only <del> end <del> <del> def test_flags_only <del> @argv << "--foo" << "-vds" << "a" << "b" << "cdefg" <del> assert_equal %w[--foo], @argv.flags_only <del> end <del> <del> def test_empty_argv <del> assert_empty @argv.named <del> assert_empty @argv.kegs <del> assert_empty @argv.formulae <del> assert_empty @argv <del> end <del> <del> def test_switch? <del> @argv << "-ns" << "-i" << "--bar" << "-a-bad-arg" <del> %w[n s i].each { |s| assert @argv.switch?(s) } <del> %w[b ns bar --bar -n a bad arg].each { |s| assert [email protected]?(s) } <del> end <del> <del> def test_flag? <del> @argv << "--foo" << "-bq" << "--bar" <del> assert @argv.flag?("--foo") <del> assert @argv.flag?("--bar") <del> assert @argv.flag?("--baz") <del> assert @argv.flag?("--qux") <del> assert [email protected]?("--frotz") <del> assert [email protected]?("--debug") <del> end <del> <del> def test_value <del> @argv << "--foo=" << "--bar=ab" <del> assert_equal "", @argv.value("foo") <del> assert_equal "ab", @argv.value("bar") <del> assert_nil @argv.value("baz") <del> end <del> <del> def test_values <del> @argv << "--foo=" << "--bar=a" << "--baz=b,c" <del> assert_equal [], @argv.values("foo") <del> assert_equal ["a"], @argv.values("bar") <del> assert_equal ["b", "c"], @argv.values("baz") <del> assert_nil @argv.values("qux") <del> end <del>end
2
Javascript
Javascript
enable possibility to add flow annotations
bc9bbb5533f223a200f8bea65a8c1873a7084e14
<ide><path>packager/react-packager/src/JSTransformer/worker/index.js <ide> * LICENSE file in the root directory of this source tree. An additional grant <ide> * of patent rights can be found in the PATENTS file in the same directory. <ide> */ <del>'use strict'; <del> <del>const constantFolding = require('./constant-folding'); <del>const extractDependencies = require('./extract-dependencies'); <del>const inline = require('./inline'); <del>const minify = require('./minify'); <del> <del>function makeTransformParams(filename, sourceCode, options) { <del> if (filename.endsWith('.json')) { <del> sourceCode = 'module.exports=' + sourceCode; <del> } <del> return {filename, sourceCode, options}; <del>} <del> <del>function transformCode(transform, filename, sourceCode, options, callback) { <del> const params = makeTransformParams(filename, sourceCode, options.transform); <del> const isJson = filename.endsWith('.json'); <del> <del> const transformFileStartLogEntry = { <del> action_name: 'Transforming file', <del> action_phase: 'start', <del> file_name: filename, <del> log_entry_label: 'Transforming file', <del> start_timestamp: process.hrtime(), <del> }; <del> <del> transform(params, (error, transformed) => { <del> if (error) { <del> callback(error); <del> return; <del> } <del> <del> var code, map; <del> if (options.minify) { <del> const optimized = <del> constantFolding(filename, inline(filename, transformed, options)); <del> code = optimized.code; <del> map = optimized.map; <del> } else { <del> code = transformed.code; <del> map = transformed.map; <del> } <ide> <del> if (isJson) { <del> code = code.replace(/^\w+\.exports=/, ''); <del> } else { <del> // Remove shebang <del> code = code.replace(/^#!.*/, ''); <del> } <del> <del> const result = isJson || options.extern <del> ? {dependencies: [], dependencyOffsets: []} <del> : extractDependencies(code); <del> <del> const timeDelta = process.hrtime(transformFileStartLogEntry.start_timestamp); <del> const duration_ms = Math.round((timeDelta[0] * 1e9 + timeDelta[1]) / 1e6); <del> const transformFileEndLogEntry = { <del> action_name: 'Transforming file', <del> action_phase: 'end', <del> file_name: filename, <del> duration_ms: duration_ms, <del> log_entry_label: 'Transforming file', <del> }; <del> <del> result.code = code; <del> result.map = map; <del> <del> return callback(null, { <del> result, <del> transformFileStartLogEntry, <del> transformFileEndLogEntry, <del> }); <del> }); <del>} <del> <del>exports.transformAndExtractDependencies = ( <del> transform, <del> filename, <del> sourceCode, <del> options, <del> callback <del>) => { <del> transformCode(require(transform), filename, sourceCode, options || {}, callback); <del>}; <del> <del>exports.minify = (filename, code, sourceMap, callback) => { <del> var result; <del> try { <del> result = minify(filename, code, sourceMap); <del> } catch (error) { <del> callback(error); <del> } <del> callback(null, result); <del>}; <add>'use strict'; <ide> <del>exports.transformCode = transformCode; // for easier testing <add>const path = require('path'); <add>require('../../../../babelRegisterOnly')([ <add> path.resolve(path.join(__dirname, '../../**/*')), <add>]); <add>module.exports = require('./worker'); <ide><path>packager/react-packager/src/JSTransformer/worker/worker.js <add>/** <add> * Copyright (c) 2016-present, Facebook, Inc. <add> * All rights reserved. <add> * <add> * This source code is licensed under the BSD-style license found in the <add> * LICENSE file in the root directory of this source tree. An additional grant <add> * of patent rights can be found in the PATENTS file in the same directory. <add> * <add> * @flow <add> */ <add> <add>'use strict'; <add> <add>const constantFolding = require('./constant-folding'); <add>const extractDependencies = require('./extract-dependencies'); <add>const inline = require('./inline'); <add>const minify = require('./minify'); <add> <add>function makeTransformParams(filename, sourceCode, options) { <add> if (filename.endsWith('.json')) { <add> sourceCode = 'module.exports=' + sourceCode; <add> } <add> return {filename, sourceCode, options}; <add>} <add> <add>export type TransformedCode = { <add> code: string, <add> dependencies: Array<string>, <add> dependencyOffsets: Array<number>, <add> map?: ?{}, <add>}; <add> <add>type Transform = (params: { <add> filename: string, <add> sourceCode: string, <add> options: ?{}, <add>}) => mixed; <add> <add>type Options = {transform?: {}}; <add> <add>type Callback = ( <add> error: ?Error, <add> data: ?{ <add> result: TransformedCode, <add> transformFileStartLogEntry: {}, <add> transformFileEndLogEntry: {}, <add> }, <add>) => mixed; <add> <add>function transformCode( <add> transform: Transform, <add> filename: string, <add> sourceCode: string, <add> options: Options, <add> callback: Callback, <add>) { <add> const params = makeTransformParams(filename, sourceCode, options.transform); <add> const isJson = filename.endsWith('.json'); <add> <add> const transformFileStartLogEntry = { <add> action_name: 'Transforming file', <add> action_phase: 'start', <add> file_name: filename, <add> log_entry_label: 'Transforming file', <add> start_timestamp: process.hrtime(), <add> }; <add> <add> transform(params, (error, transformed) => { <add> if (error) { <add> callback(error); <add> return; <add> } <add> <add> var code, map; <add> if (options.minify) { <add> const optimized = <add> constantFolding(filename, inline(filename, transformed, options)); <add> code = optimized.code; <add> map = optimized.map; <add> } else { <add> code = transformed.code; <add> map = transformed.map; <add> } <add> <add> if (isJson) { <add> code = code.replace(/^\w+\.exports=/, ''); <add> } else { <add> // Remove shebang <add> code = code.replace(/^#!.*/, ''); <add> } <add> <add> const depsResult = isJson || options.extern <add> ? {dependencies: [], dependencyOffsets: []} <add> : extractDependencies(code); <add> <add> const timeDelta = process.hrtime(transformFileStartLogEntry.start_timestamp); <add> const duration_ms = Math.round((timeDelta[0] * 1e9 + timeDelta[1]) / 1e6); <add> const transformFileEndLogEntry = { <add> action_name: 'Transforming file', <add> action_phase: 'end', <add> file_name: filename, <add> duration_ms: duration_ms, <add> log_entry_label: 'Transforming file', <add> }; <add> <add> return callback(null, { <add> result: {...depsResult, code, map}, <add> transformFileStartLogEntry, <add> transformFileEndLogEntry, <add> }); <add> }); <add>} <add> <add>exports.transformAndExtractDependencies = ( <add> transform: string, <add> filename: string, <add> sourceCode: string, <add> options: ?Options, <add> callback: Callback, <add>) => { <add> /* $FlowFixMe: impossible to type a dynamic require */ <add> const transformModule = require(transform); <add> transformCode(transformModule, filename, sourceCode, options || {}, callback); <add>}; <add> <add>exports.minify = ( <add> filename: string, <add> code: string, <add> sourceMap: string, <add> callback: (error: ?Error, result: mixed) => mixed, <add>) => { <add> var result; <add> try { <add> result = minify(filename, code, sourceMap); <add> } catch (error) { <add> callback(error); <add> } <add> callback(null, result); <add>}; <add> <add>exports.transformCode = transformCode; // for easier testing <ide><path>packager/react-packager/src/node-haste/Module.js <ide> const jsonStableStringify = require('json-stable-stringify'); <ide> <ide> const {join: joinPath, relative: relativePath, extname} = require('path'); <ide> <add>import type {TransformedCode} from '../JSTransformer/worker/worker'; <ide> import type Cache from './Cache'; <ide> import type ModuleCache from './ModuleCache'; <ide> import type FastFs from './fastfs'; <ide> <del>type TransformedCode = { <del> code: string, <del> dependencies: Array<string>, <del> dependencyOffsets: Array<number>, <del> map?: ?{}, <del>}; <del> <ide> type ReadResult = { <ide> code?: string, <ide> dependencies?: ?Array<string>,
3
PHP
PHP
improve messages logged for exceptions
d8551c49e57b6a724fd5e4515555551392583dca
<ide><path>lib/Cake/Error/ErrorHandler.php <ide> App::uses('Debugger', 'Utility'); <ide> App::uses('CakeLog', 'Log'); <ide> App::uses('ExceptionRenderer', 'Error'); <add>App::uses('Router', 'Routing'); <ide> <ide> /** <ide> * <ide> class ErrorHandler { <ide> public static function handleException(Exception $exception) { <ide> $config = Configure::read('Exception'); <ide> if (!empty($config['log'])) { <del> $message = sprintf("[%s] %s\n%s", <del> get_class($exception), <del> $exception->getMessage(), <del> $exception->getTraceAsString() <del> ); <del> CakeLog::write(LOG_ERR, $message); <add> CakeLog::write(LOG_ERR, self::_getMessage($exception)); <ide> } <ide> $renderer = $config['renderer']; <ide> if ($renderer !== 'ExceptionRenderer') { <ide> public static function handleException(Exception $exception) { <ide> } <ide> } <ide> <add>/** <add> * Generates a formatted error message <add> * @param Exception $exception Exception instance <add> * @return string Formatted message <add> */ <add> protected function _getMessage($exception) { <add> $message = sprintf("[%s] %s", <add> get_class($exception), <add> $exception->getMessage() <add> ); <add> if (method_exists($exception, 'getAttributes')) { <add> $attributes = $exception->getAttributes(); <add> if ($attributes) { <add> $message .= "\nException Attributes: " . var_export($exception->getAttributes(), true); <add> } <add> } <add> if (php_sapi_name() != 'cli') { <add> $request = Router::getRequest(); <add> if ($request) { <add> $message .= "\nRequest URL: " . $request->here(); <add> } <add> } <add> $message .= "\nStack Trace:\n" . $exception->getTraceAsString(); <add> return $message; <add> } <add> <ide> /** <ide> * Set as the default error handler by CakePHP. Use Configure::write('Error.handler', $callback), to use your own <ide> * error handling methods. This function will use Debugger to display errors when debug > 0. And <ide><path>lib/Cake/Test/Case/Error/ErrorHandlerTest.php <ide> public function testHandleExceptionLog() { <ide> <ide> $log = file(LOGS . 'error.log'); <ide> $this->assertRegExp('/\[NotFoundException\] Kaboom!/', $log[0], 'message missing.'); <del> $this->assertRegExp('/\#0.*ErrorHandlerTest->testHandleExceptionLog/', $log[1], 'Stack trace missing.'); <add> $this->assertRegExp('/\#0.*ErrorHandlerTest->testHandleExceptionLog/', $log[2], 'Stack trace missing.'); <ide> } <ide> <ide> /**
2
PHP
PHP
add basic foreign key generation to postgresschema
853fcb44b8c7f43fdbafe66b504c69f3604fc7f9
<ide><path>lib/Cake/Database/Schema/PostgresSchema.php <ide> public function constraintSql(Table $table, $name) { <ide> $data = $table->constraint($name); <ide> $out = 'CONSTRAINT ' . $this->_driver->quoteIdentifier($name); <ide> if ($data['type'] === Table::CONSTRAINT_PRIMARY) { <del> $out = 'PRIMARY KEY '; <add> $out = 'PRIMARY KEY'; <ide> } <ide> if ($data['type'] === Table::CONSTRAINT_UNIQUE) { <del> $out .= ' UNIQUE '; <add> $out .= ' UNIQUE'; <ide> } <add> return $this->_keySql($out, $data); <add> } <add> <add>/** <add> * Helper method for generating key SQL snippets. <add> * <add> * @param string $prefix The key prefix <add> * @param array $data Key data. <add> * @return string <add> */ <add> protected function _keySql($prefix, $data) { <ide> $columns = array_map( <ide> [$this->_driver, 'quoteIdentifier'], <ide> $data['columns'] <ide> ); <del> return $out . '(' . implode(', ', $columns) . ')'; <add> if ($data['type'] === Table::CONSTRAINT_FOREIGN) { <add> return $prefix . sprintf( <add> ' FOREIGN KEY (%s) REFERENCES %s (%s) ON UPDATE %s ON DELETE %s', <add> implode(', ', $columns), <add> $this->_driver->quoteIdentifier($data['references'][0]), <add> $this->_driver->quoteIdentifier($data['references'][1]), <add> $this->_foreignOnClause($data['update']), <add> $this->_foreignOnClause($data['delete']) <add> ); <add> } <add> return $prefix . ' (' . implode(', ', $columns) . ')'; <ide> } <ide> <add>/** <add> * Generate an ON clause for a foreign key. <add> * <add> * @param string|null $on The on clause <add> * @return string <add> */ <add> protected function _foreignOnClause($on) { <add> if ($on === Table::ACTION_SET_NULL) { <add> return 'SET NULL'; <add> } <add> if ($on === Table::ACTION_CASCADE) { <add> return 'CASCADE'; <add> } <add> if ($on === Table::ACTION_RESTRICT) { <add> return 'RESTRICT'; <add> } <add> if ($on === Table::ACTION_NO_ACTION) { <add> return 'NO ACTION'; <add> } <add> } <add> <add> <ide> /** <ide> * Generate the SQL to create a table. <ide> * <ide><path>lib/Cake/Test/TestCase/Database/Schema/PostgresSchemaTest.php <ide> protected function _needsConnection() { <ide> protected function _createTables($connection) { <ide> $this->_needsConnection(); <ide> <del> $connection->execute('DROP TABLE IF EXISTS articles'); <del> $connection->execute('DROP TABLE IF EXISTS authors'); <add> $connection->execute('DROP TABLE IF EXISTS schema_articles'); <add> $connection->execute('DROP TABLE IF EXISTS schema_authors'); <ide> <ide> $table = <<<SQL <del>CREATE TABLE authors( <add>CREATE TABLE schema_authors ( <ide> id SERIAL, <ide> name VARCHAR(50), <ide> bio DATE, <del>created TIMESTAMP <add>created TIMESTAMP, <add>PRIMARY KEY (id) <ide> ) <ide> SQL; <ide> $connection->execute($table); <ide> <ide> $table = <<<SQL <del>CREATE TABLE articles( <add>CREATE TABLE schema_articles ( <ide> id BIGINT PRIMARY KEY, <ide> title VARCHAR(20), <ide> body TEXT, <ide> author_id INTEGER NOT NULL, <ide> published BOOLEAN DEFAULT false, <ide> views SMALLINT DEFAULT 0, <ide> created TIMESTAMP, <del>CONSTRAINT "content_idx" UNIQUE ("title", "body") <add>CONSTRAINT "content_idx" UNIQUE ("title", "body"), <add>CONSTRAINT "author_idx" FOREIGN KEY ("author_id") REFERENCES "schema_authors" ("id") ON DELETE RESTRICT ON UPDATE CASCADE <ide> ) <ide> SQL; <ide> $connection->execute($table); <del> $connection->execute('COMMENT ON COLUMN "articles"."title" IS \'a title\''); <del> $connection->execute('CREATE INDEX "author_idx" ON "articles" ("author_id")'); <add> $connection->execute('COMMENT ON COLUMN "schema_articles"."title" IS \'a title\''); <add> $connection->execute('CREATE INDEX "author_idx" ON "schema_articles" ("author_id")'); <ide> } <ide> <ide> /** <ide> public function testListTables() { <ide> $result = $schema->listTables(); <ide> $this->assertInternalType('array', $result); <ide> $this->assertCount(2, $result); <del> $this->assertEquals('articles', $result[0]); <del> $this->assertEquals('authors', $result[1]); <add> $this->assertEquals('schema_articles', $result[0]); <add> $this->assertEquals('schema_authors', $result[1]); <ide> } <ide> <ide> /** <ide> public function testDescribeTable() { <ide> $this->_createTables($connection); <ide> <ide> $schema = new SchemaCollection($connection); <del> $result = $schema->describe('articles'); <add> $result = $schema->describe('schema_articles'); <ide> $expected = [ <ide> 'id' => [ <ide> 'type' => 'biginteger', <ide> public function testDescribeTableIndexes() { <ide> $this->_createTables($connection); <ide> <ide> $schema = new SchemaCollection($connection); <del> $result = $schema->describe('articles'); <add> $result = $schema->describe('schema_articles'); <ide> $this->assertInstanceOf('Cake\Database\Schema\Table', $result); <ide> $expected = [ <ide> 'primary' => [ <ide> public function testDescribeTableIndexes() { <ide> 'length' => [] <ide> ] <ide> ]; <del> $this->assertCount(2, $result->constraints()); <add> $this->assertCount(3, $result->constraints()); <add> $expected = [ <add> 'primary' => [ <add> 'type' => 'primary', <add> 'columns' => ['id'], <add> 'length' => [] <add> ], <add> 'content_idx' => [ <add> 'type' => 'unique', <add> 'columns' => ['title', 'body'], <add> 'length' => [] <add> ], <add> 'schema_articles_fk_1' => [ <add> 'type' => 'foreign', <add> 'columns' => ['author_id'], <add> 'references' => ['schema_authors', 'id'], <add> 'length' => [], <add> 'update' => 'cascade', <add> 'delete' => 'restrict', <add> ] <add> ]; <ide> $this->assertEquals($expected['primary'], $result->constraint('primary')); <ide> $this->assertEquals($expected['content_idx'], $result->constraint('content_idx')); <add> $this->assertEquals($expected['schema_articles_fk_1'], $result->constraint('schema_articles_fk_1')); <ide> <ide> $this->assertCount(1, $result->indexes()); <ide> $expected = [ <ide> public function testColumnSql($name, $data, $expected) { <ide> $driver = $this->_getMockedDriver(); <ide> $schema = new PostgresSchema($driver); <ide> <del> $table = (new Table('articles'))->addColumn($name, $data); <add> $table = (new Table('schema_articles'))->addColumn($name, $data); <ide> $this->assertEquals($expected, $schema->columnSql($table, $name)); <ide> } <ide> <ide> public function testColumnSqlPrimaryKey() { <ide> $driver = $this->_getMockedDriver(); <ide> $schema = new PostgresSchema($driver); <ide> <del> $table = new Table('articles'); <add> $table = new Table('schema_articles'); <ide> $table->addColumn('id', [ <ide> 'type' => 'integer', <ide> 'null' => false <ide> public static function constraintSqlProvider() { <ide> ['type' => 'unique', 'columns' => ['title', 'author_id']], <ide> 'CONSTRAINT "unique_idx" UNIQUE ("title", "author_id")' <ide> ], <add> [ <add> 'author_id_idx', <add> ['type' => 'foreign', 'columns' => ['author_id'], 'references' => ['authors', 'id']], <add> 'CONSTRAINT "author_id_idx" FOREIGN KEY ("author_id") ' . <add> 'REFERENCES "authors" ("id") ON UPDATE RESTRICT ON DELETE RESTRICT' <add> ], <add> [ <add> 'author_id_idx', <add> ['type' => 'foreign', 'columns' => ['author_id'], 'references' => ['authors', 'id'], 'update' => 'cascade'], <add> 'CONSTRAINT "author_id_idx" FOREIGN KEY ("author_id") ' . <add> 'REFERENCES "authors" ("id") ON UPDATE CASCADE ON DELETE RESTRICT' <add> ], <add> [ <add> 'author_id_idx', <add> ['type' => 'foreign', 'columns' => ['author_id'], 'references' => ['authors', 'id'], 'update' => 'restrict'], <add> 'CONSTRAINT "author_id_idx" FOREIGN KEY ("author_id") ' . <add> 'REFERENCES "authors" ("id") ON UPDATE RESTRICT ON DELETE RESTRICT' <add> ], <add> [ <add> 'author_id_idx', <add> ['type' => 'foreign', 'columns' => ['author_id'], 'references' => ['authors', 'id'], 'update' => 'setNull'], <add> 'CONSTRAINT "author_id_idx" FOREIGN KEY ("author_id") ' . <add> 'REFERENCES "authors" ("id") ON UPDATE SET NULL ON DELETE RESTRICT' <add> ], <add> [ <add> 'author_id_idx', <add> ['type' => 'foreign', 'columns' => ['author_id'], 'references' => ['authors', 'id'], 'update' => 'noAction'], <add> 'CONSTRAINT "author_id_idx" FOREIGN KEY ("author_id") ' . <add> 'REFERENCES "authors" ("id") ON UPDATE NO ACTION ON DELETE RESTRICT' <add> ], <ide> ]; <ide> } <ide> <ide> public function testConstraintSql($name, $data, $expected) { <ide> $driver = $this->_getMockedDriver(); <ide> $schema = new PostgresSchema($driver); <ide> <del> $table = (new Table('articles'))->addColumn('title', [ <add> $table = (new Table('schema_articles'))->addColumn('title', [ <ide> 'type' => 'string', <ide> 'length' => 255 <ide> ])->addColumn('author_id', [ <ide> public function testCreateSql() { <ide> $connection->expects($this->any())->method('driver') <ide> ->will($this->returnValue($driver)); <ide> <del> $table = (new Table('articles'))->addColumn('id', [ <add> $table = (new Table('schema_articles'))->addColumn('id', [ <ide> 'type' => 'integer', <ide> 'null' => false <ide> ]) <ide> public function testCreateSql() { <ide> ]); <ide> <ide> $expected = <<<SQL <del>CREATE TABLE "articles" ( <add>CREATE TABLE "schema_articles" ( <ide> "id" SERIAL, <ide> "title" VARCHAR NOT NULL, <ide> "body" TEXT, <ide> public function testCreateSql() { <ide> $this->assertCount(3, $result); <ide> $this->assertEquals($expected, $result[0]); <ide> $this->assertEquals( <del> 'CREATE INDEX "title_idx" ON "articles" ("title")', <add> 'CREATE INDEX "title_idx" ON "schema_articles" ("title")', <ide> $result[1] <ide> ); <ide> $this->assertEquals( <del> 'COMMENT ON COLUMN "articles"."title" IS "This is the title"', <add> 'COMMENT ON COLUMN "schema_articles"."title" IS "This is the title"', <ide> $result[2] <ide> ); <ide> } <ide> public function testDropSql() { <ide> $connection->expects($this->any())->method('driver') <ide> ->will($this->returnValue($driver)); <ide> <del> $table = new Table('articles'); <add> $table = new Table('schema_articles'); <ide> $result = $table->dropSql($connection); <ide> $this->assertCount(1, $result); <del> $this->assertEquals('DROP TABLE "articles"', $result[0]); <add> $this->assertEquals('DROP TABLE "schema_articles"', $result[0]); <ide> } <ide> <ide> /** <ide> public function testTruncateSql() { <ide> $connection->expects($this->any())->method('driver') <ide> ->will($this->returnValue($driver)); <ide> <del> $table = new Table('articles'); <add> $table = new Table('schema_articles'); <ide> $table->addColumn('id', 'integer') <ide> ->addConstraint('primary', [ <ide> 'type' => 'primary', <ide> 'columns' => ['id'] <ide> ]); <ide> $result = $table->truncateSql($connection); <ide> $this->assertCount(1, $result); <del> $this->assertEquals('TRUNCATE "articles" RESTART IDENTITY', $result[0]); <add> $this->assertEquals('TRUNCATE "schema_articles" RESTART IDENTITY', $result[0]); <ide> } <ide> <ide> /**
2
PHP
PHP
use base64 encoding for csrf tokens
f02d8b9b0d12bfd78d22cdb1dc0d5f7d28774948
<ide><path>src/Http/Middleware/SessionCsrfProtectionMiddleware.php <ide> public function skipCheckCallback(callable $callback) <ide> */ <ide> public function saltToken(string $token): string <ide> { <del> $length = strlen($token); <del> $salt = Security::randomString($length); <add> $decoded = base64_decode($token); <add> $length = strlen($decoded); <add> $salt = Security::randomBytes($length); <ide> $salted = ''; <ide> for ($i = 0; $i < $length; $i++) { <ide> // XOR the token and salt together so that we can reverse it later. <del> $salted .= chr(ord($token[$i]) ^ ord($salt[$i])); <add> $salted .= chr(ord($decoded[$i]) ^ ord($salt[$i])); <ide> } <ide> <del> return $salted . $salt; <add> return base64_encode($salted . $salt); <ide> } <ide> <ide> /** <ide> public function saltToken(string $token): string <ide> */ <ide> protected function unsaltToken(string $token): string <ide> { <del> if (strlen($token) != static::TOKEN_VALUE_LENGTH * 2) { <add> $decoded = base64_decode($token); <add> if (strlen($decoded) != static::TOKEN_VALUE_LENGTH * 2) { <ide> return $token; <ide> } <del> $salted = substr($token, 0, static::TOKEN_VALUE_LENGTH); <del> $salt = substr($token, static::TOKEN_VALUE_LENGTH); <add> $salted = substr($decoded, 0, static::TOKEN_VALUE_LENGTH); <add> $salt = substr($decoded, static::TOKEN_VALUE_LENGTH); <ide> <ide> $unsalted = ''; <ide> for ($i = 0; $i < static::TOKEN_VALUE_LENGTH; $i++) { <ide> // Reverse the the XOR to desalt. <ide> $unsalted .= chr(ord($salted[$i]) ^ ord($salt[$i])); <ide> } <ide> <del> return $unsalted; <add> return base64_encode($unsalted); <ide> } <ide> <ide> /** <ide> protected function unsetTokenField(ServerRequestInterface $request): ServerReque <ide> */ <ide> public function createToken(): string <ide> { <del> return Security::randomString(static::TOKEN_VALUE_LENGTH); <add> return base64_encode(Security::randomBytes(static::TOKEN_VALUE_LENGTH)); <ide> } <ide> <ide> /** <ide><path>tests/TestCase/Http/Middleware/SessionCsrfProtectionMiddlewareTest.php <ide> public function testSettingTokenInSession() <ide> $this->assertInstanceOf(Response::class, $response); <ide> $token = $updatedRequest->getSession()->read('csrfToken'); <ide> $this->assertNotEmpty($token, 'Should set a token.'); <del> $this->assertMatchesRegularExpression('/^[a-f0-9]+$/', $token, 'Should look like a hash.'); <add> $this->assertMatchesRegularExpression('/^[A-Z0-9+\/]+=*$/i', $token, 'Should look like base64 data.'); <ide> $requestAttr = $updatedRequest->getAttribute('csrfToken'); <ide> $this->assertNotEquals($token, $requestAttr); <ide> $this->assertEquals(strlen($token) * 2, strlen($requestAttr)); <add> $this->assertMatchesRegularExpression('/^[A-Z0-9\/+]+=*$/i', $requestAttr); <ide> } <ide> <ide> /** <ide> public function testConfigurationCookieCreate() <ide> $this->assertEmpty($session->read('csrfToken')); <ide> $token = $session->read('csrf'); <ide> $this->assertNotEmpty($token, 'Should set a token.'); <del> $this->assertMatchesRegularExpression('/^[a-f0-9]+$/', $token, 'Should look like a hash.'); <add> $this->assertMatchesRegularExpression('/^[A-Z0-9\/+]+=*$/i', $token, 'Should look like base64 data.'); <ide> } <ide> <ide> /**
2
PHP
PHP
add more group annotations
304848d9d7a190f6c02b97303892a1cb069f1597
<ide><path>tests/TestCase/Controller/ControllerTest.php <ide> public function testDeprecatedControllerPropertySetterMessage($property, $getter <ide> /** <ide> * Tests deprecated controller properties message <ide> * <add> * @group deprecated <ide> * @param $property Deprecated property name <ide> * @param $getter Getter name <ide> * @param $setter Setter name <ide> public function testDeprecatedViewPropertySetterMessage($property, $getter, $set <ide> /** <ide> * Tests deprecated view properties message <ide> * <add> * @group deprecated <ide> * @param $property Deprecated property name <ide> * @param $getter Getter name <ide> * @param $setter Setter name
1
Ruby
Ruby
specify return value of `save!`
5573b2148469abc730a041a0e06c1afc68a2c5ca
<ide><path>activerecord/lib/active_record/persistence.rb <ide> def save(*args) <ide> # <ide> # Attributes marked as readonly are silently ignored if the record is <ide> # being updated. <add> # <add> # Unless an error is raised, returns true. <ide> def save!(*args) <ide> create_or_update(*args) || raise(RecordNotSaved.new("Failed to save the record", self)) <ide> end
1
Ruby
Ruby
fix warning in ac flash
1c52bca2664457af5c004545f90c1eb3d47c487c
<ide><path>actionpack/lib/action_controller/metal/flash.rb <ide> def flash #:doc: <ide> <ide> protected <ide> def process_action(method_name) <add> @_flash = nil <ide> super <ide> @_flash.store(session) if @_flash <ide> @_flash = nil
1
Java
Java
improve shutdown responsiveness of subprotocolwsh
ace6bd2418cba892f793e9e3666ac02a541074c7
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/handler/ConcurrentWebSocketSessionDecorator.java <ide> <ide> import org.apache.commons.logging.Log; <ide> import org.apache.commons.logging.LogFactory; <del>import org.springframework.util.Assert; <ide> import org.springframework.web.socket.CloseStatus; <ide> import org.springframework.web.socket.WebSocketMessage; <ide> import org.springframework.web.socket.WebSocketSession; <ide> public class ConcurrentWebSocketSessionDecorator extends WebSocketSessionDecorat <ide> <ide> private volatile boolean limitExceeded; <ide> <add> private volatile boolean shutDownInProgress; <add> <ide> <ide> private final Lock flushLock = new ReentrantLock(); <ide> <ide> public long getTimeSinceSendStarted() { <ide> <ide> public void sendMessage(WebSocketMessage<?> message) throws IOException { <ide> <del> if (this.limitExceeded) { <add> if (isDisabled()) { <ide> return; <ide> } <ide> <ide> public void sendMessage(WebSocketMessage<?> message) throws IOException { <ide> break; <ide> } <ide> } <del> while (!this.buffer.isEmpty()); <add> while (!this.buffer.isEmpty() && !isDisabled()); <add> } <add> <add> private boolean isDisabled() { <add> return (this.limitExceeded || this.shutDownInProgress); <ide> } <ide> <ide> private boolean tryFlushMessageBuffer() throws IOException { <del> if (this.flushLock.tryLock() && !this.limitExceeded) { <add> if (this.flushLock.tryLock()) { <ide> try { <ide> while (true) { <ide> WebSocketMessage<?> messageToSend = this.buffer.poll(); <del> if (messageToSend == null) { <add> if (messageToSend == null || isDisabled()) { <ide> break; <ide> } <ide> this.bufferSize.addAndGet(messageToSend.getPayloadLength() * -1); <ide> private boolean tryFlushMessageBuffer() throws IOException { <ide> } <ide> <ide> private void checkSessionLimits() throws IOException { <del> if (this.closeLock.tryLock() && !this.limitExceeded) { <add> if (!isDisabled() && this.closeLock.tryLock()) { <ide> try { <ide> if (getTimeSinceSendStarted() > this.sendTimeLimit) { <ide> <ide> private void sessionLimitReached(String reason, CloseStatus status) { <ide> throw new SessionLimitExceededException(reason, status); <ide> } <ide> <add> @Override <add> public void close(CloseStatus status) throws IOException { <add> this.shutDownInProgress = true; <add> super.close(status); <add> } <ide> } <ide><path>spring-websocket/src/main/java/org/springframework/web/socket/messaging/SubProtocolWebSocketHandler.java <ide> <ide> package org.springframework.web.socket.messaging; <ide> <add>import java.io.IOException; <ide> import java.util.ArrayList; <ide> import java.util.Arrays; <ide> import java.util.HashSet; <ide> public final void start() { <ide> @Override <ide> public final void stop() { <ide> synchronized (this.lifecycleMonitor) { <add> <ide> this.running = false; <ide> this.clientOutboundChannel.unsubscribe(this); <add> <add> // Notify sessions to stop flushing messages <add> for (WebSocketSession session : this.sessions.values()) { <add> try { <add> session.close(CloseStatus.GOING_AWAY); <add> } <add> catch (Throwable t) { <add> logger.error("Failed to close session id '" + session.getId() + "': " + t.getMessage()); <add> } <add> } <ide> } <ide> } <ide> <ide> public void handleMessage(Message<?> message) throws MessagingException { <ide> <ide> WebSocketSession session = this.sessions.get(sessionId); <ide> if (session == null) { <del> logger.error("Session not found for session with id " + sessionId); <add> logger.error("Session not found for session with id '" + sessionId + "', ignoring message " + message); <ide> return; <ide> } <ide>
2
Javascript
Javascript
remove unused function
3fc9ab8ffab3345c995d7327fe75ef824349ec63
<ide><path>static/index.js <ide> }, false) <ide> } <ide> <del> function isRunningCoreSpecs (loadSettings) { <del> return !!(loadSettings && <del> loadSettings.isSpec && <del> loadSettings.specDirectory && <del> loadSettings.resourcePath && <del> path.dirname(loadSettings.specDirectory) === loadSettings.resourcePath) <del> } <del> <ide> parseLoadSettings() <ide> setupWindowBackground() <ide> })()
1