content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Ruby | Ruby | use tap inside reporter | 849e62c7368cf9d927448548b52387dc0e96c9ce | <ide><path>Library/Homebrew/cmd/update-report.rb
<ide> def load_formula_renames
<ide> end
<ide>
<ide> class Reporter
<del> attr_reader :initial_revision, :current_revision, :repository
<del>
<del> def self.repository_variable(repository)
<del> if repository == HOMEBREW_REPOSITORY
<del> ""
<del> else
<del> repository.to_s.
<del> strip_prefix(Tap::TAP_DIRECTORY.to_s).
<del> tr("^A-Za-z0-9", "_").
<del> upcase
<add> class ReporterRevisionUnsetError < RuntimeError
<add> def initialize(var_name)
<add> super "#{var_name} is unset!"
<ide> end
<ide> end
<ide>
<del> def initialize(repository)
<del> @repository = repository
<add> attr_reader :tap, :initial_revision, :current_revision
<add>
<add> def initialize(tap)
<add> @tap = tap
<ide>
<del> repo_var = Reporter.repository_variable(@repository)
<ide> initial_revision_var = "HOMEBREW_UPDATE_BEFORE#{repo_var}"
<ide> @initial_revision = ENV[initial_revision_var].to_s
<del> if @initial_revision.empty?
<del> raise "#{initial_revision_var} is unset!" if ARGV.homebrew_developer?
<del> raise "update-report should not be called directly!"
<del> end
<add> raise ReporterRevisionUnsetError, initial_revision_var if @initial_revision.empty?
<ide>
<ide> current_revision_var = "HOMEBREW_UPDATE_AFTER#{repo_var}"
<ide> @current_revision = ENV[current_revision_var].to_s
<del> if @current_revision.empty?
<del> raise "#{current_revision_var} is unset!" if ARGV.homebrew_developer?
<del> raise "update-report should not be called directly!"
<del> end
<add> raise ReporterRevisionUnsetError, current_revision_var if @current_revision.empty?
<ide> end
<ide>
<ide> def report
<del> map = Hash.new { |h, k| h[k] = [] }
<del>
<del> if initial_revision && initial_revision != current_revision
<del> wc_revision = read_current_revision
<del>
<del> diff.each_line do |line|
<del> status, *paths = line.split
<del> src = paths.first
<del> dst = paths.last
<del>
<del> next unless File.extname(dst) == ".rb"
<del> next unless paths.any? { |p| File.dirname(p) == formula_directory }
<del>
<del> case status
<del> when "A", "D"
<del> map[status.to_sym] << repository.join(src)
<del> when "M"
<del> file = repository.join(src)
<del> begin
<del> formula = Formulary.factory(file)
<del> new_version = if wc_revision == current_revision
<del> formula.pkg_version
<del> else
<del> FormulaVersions.new(formula).formula_at_revision(@current_revision, &:pkg_version)
<del> end
<del> old_version = FormulaVersions.new(formula).formula_at_revision(@initial_revision, &:pkg_version)
<del> next if new_version == old_version
<del> # short term fix to prevent situation like https://github.com/Homebrew/homebrew/issues/45616
<del> rescue Exception => e
<del> onoe e if ARGV.homebrew_developer?
<del> end
<del> map[:M] << file
<del> when /^R\d{0,3}/
<del> map[:D] << repository.join(src) if File.dirname(src) == formula_directory
<del> map[:A] << repository.join(dst) if File.dirname(dst) == formula_directory
<add> return @report if @report
<add>
<add> @report = Hash.new { |h, k| h[k] = [] }
<add> return @report unless updated?
<add>
<add> diff.each_line do |line|
<add> status, *paths = line.split
<add> src = Pathname.new paths.first
<add> dst = Pathname.new paths.last
<add>
<add> next unless dst.extname == ".rb"
<add> next unless paths.any? { |p| tap.formula_file?(p) }
<add>
<add> case status
<add> when "A", "D"
<add> @report[status.to_sym] << tap.formula_file_to_name(src)
<add> when "M"
<add> begin
<add> formula = Formulary.factory(tap.path/src)
<add> new_version = formula.pkg_version
<add> old_version = FormulaVersions.new(formula).formula_at_revision(@initial_revision, &:pkg_version)
<add> next if new_version == old_version
<add> rescue Exception => e
<add> onoe e if ARGV.homebrew_developer?
<ide> end
<add> @report[:M] << tap.formula_file_to_name(src)
<add> when /^R\d{0,3}/
<add> @report[:D] << tap.formula_file_to_name(src) if tap.formula_file?(src)
<add> @report[:A] << tap.formula_file_to_name(dst) if tap.formula_file?(dst)
<ide> end
<ide> end
<ide>
<del> map
<add> renamed_formulae = []
<add> @report[:D].each do |old_full_name|
<add> old_name = old_full_name.split("/").last
<add> new_name = tap.formula_renames[old_name]
<add> next unless new_name
<add>
<add> if tap.core_formula_repository?
<add> new_full_name = new_name
<add> else
<add> new_full_name = "#{tap}/#{new_full_name}"
<add> end
<add>
<add> renamed_formulae << [old_full_name, new_full_name] if @report[:A].include? new_full_name
<add> end
<add>
<add> unless renamed_formulae.empty?
<add> @report[:A] -= renamed_formulae.map(&:last)
<add> @report[:D] -= renamed_formulae.map(&:first)
<add> @report[:R] = renamed_formulae
<add> end
<add>
<add> @report
<ide> end
<ide>
<ide> def updated?
<del> initial_revision && initial_revision != current_revision
<add> initial_revision != current_revision
<ide> end
<ide>
<del> private
<add> def migrate_tap_migration
<add> report[:D].each do |full_name|
<add> name = full_name.split("/").last
<add> next unless (dir = HOMEBREW_CELLAR/name).exist? # skip if formula is not installed.
<add> next unless new_tap_name = tap.tap_migrations[name] # skip if formula is not in tap_migrations list.
<add> tabs = dir.subdirs.map { |d| Tab.for_keg(Keg.new(d)) }
<add> next unless tabs.first.tap == tap # skip if installed formula is not from this tap.
<add> new_tap = Tap.fetch(new_tap_name)
<add> new_tap.install unless new_tap.installed?
<add> # update tap for each Tab
<add> tabs.each { |tab| tab.tap = new_tap }
<add> tabs.each(&:write)
<add> end
<add> end
<ide>
<del> def formula_directory
<del> if repository == HOMEBREW_REPOSITORY
<del> "Library/Formula"
<del> elsif repository.join("Formula").directory?
<del> "Formula"
<del> elsif repository.join("HomebrewFormula").directory?
<del> "HomebrewFormula"
<del> else
<del> "."
<add> def migrate_formula_rename
<add> report[:R].each do |old_full_name, new_full_name|
<add> old_name = old_full_name.split("/").last
<add> next unless (dir = HOMEBREW_CELLAR/old_name).directory? && !dir.subdirs.empty?
<add>
<add> begin
<add> f = Formulary.factory(new_full_name)
<add> rescue Exception => e
<add> onoe e if ARGV.homebrew_developer?
<add> next
<add> end
<add>
<add> begin
<add> migrator = Migrator.new(f)
<add> migrator.migrate
<add> rescue Migrator::MigratorDifferentTapsError
<add> rescue Exception => e
<add> onoe e
<add> end
<ide> end
<ide> end
<ide>
<del> def read_current_revision
<del> `git rev-parse -q --verify HEAD`.chomp
<add> private
<add>
<add> def repo_var
<add> @repo_var ||= if tap.path == HOMEBREW_REPOSITORY
<add> ""
<add> else
<add> tap.path.to_s.
<add> strip_prefix(Tap::TAP_DIRECTORY.to_s).
<add> tr("^A-Za-z0-9", "_").
<add> upcase
<add> end
<ide> end
<ide>
<ide> def diff
<ide> Utils.popen_read(
<del> "git", "diff-tree", "-r", "--name-status", "--diff-filter=AMDR",
<add> "git", "-C", tap.path, "diff-tree", "-r", "--name-status", "--diff-filter=AMDR",
<ide> "-M85%", initial_revision, current_revision
<ide> )
<ide> end
<del>
<del> def `(cmd)
<del> out = super
<del> unless $?.success?
<del> $stderr.puts(out) unless out.empty?
<del> raise ErrorDuringExecution.new(cmd)
<del> end
<del> ohai(cmd, out) if ARGV.verbose?
<del> out
<del> end
<ide> end
<ide>
<ide> class Report | 1 |
Java | Java | introduce base exception class for arg resolution | 2c1d5efbb021d7eed196bad550bcd675d43035b9 | <ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/AbstractMethodArgumentResolutionException.java
<add>/*
<add> * Copyright 2002-2014 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.messaging.handler.annotation.support;
<add>
<add>import org.springframework.core.MethodParameter;
<add>import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessagingException;
<add>
<add>/**
<add> * Base class for exceptions resulting from the invocation of
<add> * {@link org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolver}.
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0.3
<add> */
<add>@SuppressWarnings("serial")
<add>public abstract class AbstractMethodArgumentResolutionException extends MessagingException {
<add>
<add> private final MethodParameter parameter;
<add>
<add>
<add> /**
<add> * Create a new instance providing the invalid {@code MethodParameter}.
<add> */
<add> protected AbstractMethodArgumentResolutionException(Message<?> message, MethodParameter parameter) {
<add> this(message, parameter, getMethodParamMessage(parameter));
<add> }
<add>
<add> /**
<add> * Create a new instance providing the invalid {@code MethodParameter} and
<add> * a prepared description. Sub-classes should prepend the description with
<add> * the help of {@link #getMethodParamMessage(org.springframework.core.MethodParameter)}.
<add> */
<add> protected AbstractMethodArgumentResolutionException(Message<?> message,
<add> MethodParameter parameter, String description) {
<add>
<add> super(message, description);
<add> this.parameter = parameter;
<add> }
<add>
<add> /**
<add> * Return the MethodParameter that was rejected.
<add> */
<add> public MethodParameter getMethodParameter() {
<add> return this.parameter;
<add> }
<add>
<add> protected static String getMethodParamMessage(MethodParameter param) {
<add> return new StringBuilder("Could not resolve method parameter at index ")
<add> .append(param.getParameterIndex()).append(" in method: ")
<add> .append(param.getMethod().toGenericString()).toString();
<add> }
<add>
<add>}
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/MessageMethodArgumentResolver.java
<ide> /*
<del> * Copyright 2002-2013 the original author or authors.
<add> * Copyright 2002-2014 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolver;
<ide>
<add>import java.lang.reflect.Type;
<add>
<ide> /**
<ide> * A {@link HandlerMethodArgumentResolver} for {@link Message} parameters. Validates
<ide> * that the generic type of the payload matches with the message value.
<ide> public boolean supportsParameter(MethodParameter parameter) {
<ide>
<ide> @Override
<ide> public Object resolveArgument(MethodParameter parameter, Message<?> message) throws Exception {
<del> // Validate the message type is assignable
<del> if (!parameter.getParameterType().isAssignableFrom(message.getClass())) {
<del> throw new MethodArgumentTypeMismatchException(message,
<del> "Could not resolve Message parameter: invalid message type:"
<del> + "expected [" + message.getClass().getName() + "] but got ["
<del> + parameter.getParameterType().getName() + "]");
<add>
<add> Class<?> paramType = parameter.getParameterType();
<add>
<add> if (!paramType.isAssignableFrom(message.getClass())) {
<add> throw new MethodArgumentTypeMismatchException(message, parameter,
<add> "The actual message type [" + message.getClass().getName() + "] " +
<add> "does not match the expected type [" + paramType.getName() + "]");
<ide> }
<ide>
<del> // validate that the payload type matches
<del> Class<?> effectivePayloadType = getPayloadType(parameter);
<del> if (effectivePayloadType != null && !effectivePayloadType.isInstance(message.getPayload())) {
<del> throw new MethodArgumentTypeMismatchException(message,
<del> "Could not resolve Message parameter: invalid payload type: "
<del> + "expected [" + effectivePayloadType.getName() + "] but got ["
<del> + message.getPayload().getClass().getName() + "]");
<add> Class<?> expectedPayloadType = getPayloadType(parameter);
<add> Object payload = message.getPayload();
<add>
<add> if (expectedPayloadType != null && !expectedPayloadType.isInstance(payload)) {
<add> throw new MethodArgumentTypeMismatchException(message, parameter,
<add> "The expected Message<?> payload type [" + expectedPayloadType.getClass().getName() +
<add> "] does not match the actual payload type [" + payload.getClass().getName() + "]");
<ide> }
<add>
<ide> return message;
<ide> }
<ide>
<ide> private Class<?> getPayloadType(MethodParameter parameter) {
<del> ResolvableType resolvableType = ResolvableType
<del> .forType(parameter.getGenericParameterType()).as(Message.class);
<add> Type genericParamType = parameter.getGenericParameterType();
<add> ResolvableType resolvableType = ResolvableType.forType(genericParamType).as(Message.class);
<ide> return resolvableType.getGeneric(0).resolve(Object.class);
<ide> }
<ide>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/MethodArgumentNotValidException.java
<ide>
<ide> import org.springframework.core.MethodParameter;
<ide> import org.springframework.messaging.Message;
<del>import org.springframework.messaging.MessagingException;
<ide> import org.springframework.validation.BindingResult;
<ide> import org.springframework.validation.ObjectError;
<ide>
<ide> /**
<del> * Exception to be thrown when a method argument is not valid. For instance, this
<del> * can be issued if a validation on a method parameter annotated with
<del> * {@code @Valid} fails.
<add> * Exception to be thrown when a method argument fails validation perhaps as a
<add> * result of {@code @Valid} style validation, or perhaps because it is required.
<ide> *
<ide> * @author Brian Clozel
<add> * @author Rossen Stoyanchev
<ide> * @since 4.0.1
<ide> */
<ide> @SuppressWarnings("serial")
<del>public class MethodArgumentNotValidException extends MessagingException {
<del>
<del> private final MethodParameter parameter;
<add>public class MethodArgumentNotValidException extends AbstractMethodArgumentResolutionException {
<ide>
<ide> private final BindingResult bindingResult;
<ide>
<ide>
<ide> /**
<ide> * Create a new instance with the invalid {@code MethodParameter}.
<ide> */
<del>
<ide> public MethodArgumentNotValidException(Message<?> message, MethodParameter parameter) {
<ide> this(message, parameter, null);
<ide> }
<ide> public MethodArgumentNotValidException(Message<?> message, MethodParameter param
<ide> public MethodArgumentNotValidException(Message<?> message, MethodParameter parameter,
<ide> BindingResult bindingResult) {
<ide>
<del> super(message, generateMessage(parameter, bindingResult));
<del> this.parameter = parameter;
<add> super(message, parameter, getMethodParamMessage(parameter) +
<add> getValidationErrorMessage(parameter, bindingResult));
<add>
<ide> this.bindingResult = bindingResult;
<ide> }
<ide>
<ide>
<del> /**
<del> * Return the MethodParameter that was rejected.
<del> */
<del> public MethodParameter getMethodParameter() {
<del> return this.parameter;
<del> }
<del>
<ide> /**
<ide> * Return the BindingResult if the failure is validation-related or {@code null}.
<ide> */
<ide> public BindingResult getBindingResult() {
<ide> }
<ide>
<ide>
<del> private static String generateMessage(MethodParameter parameter, BindingResult bindingResult) {
<del>
<del> StringBuilder sb = new StringBuilder("Invalid parameter at index ")
<del> .append(parameter.getParameterIndex()).append(" in method: ")
<del> .append(parameter.getMethod().toGenericString());
<del>
<del>
<add> private static String getValidationErrorMessage(MethodParameter parameter, BindingResult bindingResult) {
<ide> if (bindingResult != null) {
<add> StringBuilder sb = new StringBuilder();
<ide> sb.append(", with ").append(bindingResult.getErrorCount()).append(" error(s): ");
<ide> for (ObjectError error : bindingResult.getAllErrors()) {
<ide> sb.append("[").append(error).append("] ");
<ide> }
<add> return sb.toString();
<add> }
<add> else {
<add> return "";
<ide> }
<del>
<del> return sb.toString();
<ide> }
<ide>
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/MethodArgumentTypeMismatchException.java
<ide>
<ide> package org.springframework.messaging.handler.annotation.support;
<ide>
<add>import org.springframework.core.MethodParameter;
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.MessagingException;
<ide>
<ide> * @author Stephane Nicoll
<ide> * @since 4.0.3
<ide> */
<del>public class MethodArgumentTypeMismatchException extends MessagingException {
<add>@SuppressWarnings("serial")
<add>public class MethodArgumentTypeMismatchException extends AbstractMethodArgumentResolutionException {
<ide>
<del> public MethodArgumentTypeMismatchException(Message<?> message, String description) {
<del> super(message, description);
<add>
<add> /**
<add> * Create a new instance with the invalid {@code MethodParameter}.
<add> */
<add> public MethodArgumentTypeMismatchException(Message<?> message, MethodParameter parameter, String description) {
<add> super(message, parameter, getMethodParamMessage(parameter) + description);
<ide> }
<ide> }
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/handler/annotation/support/MessageMethodArgumentResolverTests.java
<ide> import org.springframework.messaging.support.MessageBuilder;
<ide>
<ide> /**
<add> * Unit tests for
<add> * {@link org.springframework.messaging.handler.annotation.support.MessageMethodArgumentResolver}.
<ide> *
<ide> * @author Stephane Nicoll
<ide> */
<ide> public class MessageMethodArgumentResolverTests {
<ide>
<ide> private Method method;
<ide>
<add>
<ide> @Before
<ide> public void setup() throws Exception {
<del> method = MessageMethodArgumentResolverTests.class.getDeclaredMethod("handleMessage",
<add> this.method = MessageMethodArgumentResolverTests.class.getDeclaredMethod("handleMessage",
<ide> Message.class, Message.class, Message.class, Message.class, ErrorMessage.class);
<ide> }
<ide>
<add>
<ide> @Test
<ide> public void resolveAnyPayloadType() throws Exception {
<ide> Message<String> message = MessageBuilder.withPayload("test").build();
<del> MethodParameter parameter = new MethodParameter(method, 0);
<add> MethodParameter parameter = new MethodParameter(this.method, 0);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<del> assertSame(message, resolver.resolveArgument(parameter, message));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<add> assertSame(message, this.resolver.resolveArgument(parameter, message));
<ide> }
<ide>
<ide> @Test
<ide> public void resolvePayloadTypeExactType() throws Exception {
<ide> Message<Integer> message = MessageBuilder.withPayload(123).build();
<del> MethodParameter parameter = new MethodParameter(method, 1);
<add> MethodParameter parameter = new MethodParameter(this.method, 1);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<del> assertSame(message, resolver.resolveArgument(parameter, message));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<add> assertSame(message, this.resolver.resolveArgument(parameter, message));
<ide> }
<ide>
<ide> @Test
<ide> public void resolvePayloadTypeSubClass() throws Exception {
<ide> Message<Integer> message = MessageBuilder.withPayload(123).build();
<del> MethodParameter parameter = new MethodParameter(method, 2);
<add> MethodParameter parameter = new MethodParameter(this.method, 2);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<del> assertSame(message, resolver.resolveArgument(parameter, message));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<add> assertSame(message, this.resolver.resolveArgument(parameter, message));
<ide> }
<ide>
<ide> @Test
<ide> public void resolveInvalidPayloadType() throws Exception {
<ide> Message<String> message = MessageBuilder.withPayload("test").build();
<del> MethodParameter parameter = new MethodParameter(method, 1);
<add> MethodParameter parameter = new MethodParameter(this.method, 1);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<ide> thrown.expect(MethodArgumentTypeMismatchException.class);
<ide> thrown.expectMessage(Integer.class.getName());
<ide> thrown.expectMessage(String.class.getName());
<del> resolver.resolveArgument(parameter, message);
<add> this.resolver.resolveArgument(parameter, message);
<ide> }
<ide>
<ide> @Test
<ide> public void resolveUpperBoundPayloadType() throws Exception {
<ide> Message<Integer> message = MessageBuilder.withPayload(123).build();
<del> MethodParameter parameter = new MethodParameter(method, 3);
<add> MethodParameter parameter = new MethodParameter(this.method, 3);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<del> assertSame(message, resolver.resolveArgument(parameter, message));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<add> assertSame(message, this.resolver.resolveArgument(parameter, message));
<ide> }
<ide>
<ide> @Test
<ide> public void resolveOutOfBoundPayloadType() throws Exception {
<ide> Message<Locale> message = MessageBuilder.withPayload(Locale.getDefault()).build();
<del> MethodParameter parameter = new MethodParameter(method, 3);
<add> MethodParameter parameter = new MethodParameter(this.method, 3);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<ide> thrown.expect(MethodArgumentTypeMismatchException.class);
<ide> thrown.expectMessage(Number.class.getName());
<ide> thrown.expectMessage(Locale.class.getName());
<del> resolver.resolveArgument(parameter, message);
<add> this.resolver.resolveArgument(parameter, message);
<ide> }
<ide>
<ide> @Test
<ide> public void resolveMessageSubTypeExactMatch() throws Exception {
<ide> ErrorMessage message = new ErrorMessage(new UnsupportedOperationException());
<del> MethodParameter parameter = new MethodParameter(method, 4);
<add> MethodParameter parameter = new MethodParameter(this.method, 4);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<del> assertSame(message, resolver.resolveArgument(parameter, message));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<add> assertSame(message, this.resolver.resolveArgument(parameter, message));
<ide> }
<ide>
<ide> @Test
<ide> public void resolveMessageSubTypeSubClass() throws Exception {
<ide> ErrorMessage message = new ErrorMessage(new UnsupportedOperationException());
<del> MethodParameter parameter = new MethodParameter(method, 0);
<add> MethodParameter parameter = new MethodParameter(this.method, 0);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<del> assertSame(message, resolver.resolveArgument(parameter, message));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<add> assertSame(message, this.resolver.resolveArgument(parameter, message));
<ide> }
<ide>
<ide> @Test
<ide> public void resolveWrongMessageType() throws Exception {
<del> Message<? extends Throwable> message = new GenericMessage<Throwable>(
<del> new UnsupportedOperationException());
<del> MethodParameter parameter = new MethodParameter(method, 4);
<add> Message<? extends Throwable> message = new GenericMessage<Throwable>(new UnsupportedOperationException());
<add> MethodParameter parameter = new MethodParameter(this.method, 4);
<ide>
<del> assertTrue("Parameter '" + parameter + "' should be supported", resolver.supportsParameter(parameter));
<add> assertTrue("Parameter '" + parameter + "' should be supported", this.resolver.supportsParameter(parameter));
<ide> thrown.expect(MethodArgumentTypeMismatchException.class);
<ide> thrown.expectMessage(ErrorMessage.class.getName());
<ide> thrown.expectMessage(GenericMessage.class.getName());
<del> assertSame(message, resolver.resolveArgument(parameter, message));
<add> assertSame(message, this.resolver.resolveArgument(parameter, message));
<ide> }
<ide>
<ide> @SuppressWarnings("unused") | 5 |
Text | Text | add overview to debugging guide | 286c7e8f18a1742766701b3ee6ed7095e04620ae | <ide><path>docs/debugging.md
<ide> # Debugging
<ide>
<del>Atom provides several tools to help you understand unexpected behavior and debug problems. This guide describes some of those tools and a few approaches to help you debug and provide more helpful information when [submitting issues].
<add>Atom provides several tools to help you understand unexpected behavior and debug problems. This guide describes some of those tools and a few approaches to help you debug and provide more helpful information when [submitting issues]:
<add>
<add>* [Update to the latest version](#update-to-the-latest-version)
<add>* [Check Atom and package settings](#check-atom-and-package-settings)
<add>* [Check the keybindings](#check-the-keybindings)
<add>* [Check if the problem shows up in safe mode](#check-if-the-problem-shows-up-in-safe-mode)
<add>* [Check your config files](#check-your-config-files)
<add>* [Check for errors in the developer tools](#check-for-errors-in-the-developer-tools)
<ide>
<ide> ## Update to the latest version
<ide> | 1 |
PHP | PHP | remove default values for vanilla behaivor | 48bd343eb570f207f6e4d9bbaa6bc7dc8386c100 | <ide><path>src/View/Helper/FormHelper.php
<ide> public function create($model = null, array $options = [])
<ide> 'encoding' => strtolower(Configure::read('App.encoding')),
<ide> 'templates' => null,
<ide> 'idPrefix' => null,
<del> 'values' => ['data', 'context'],
<add> 'valuesSources' => [],
<ide> ];
<ide>
<ide> if (isset($options['action'])) {
<ide> trigger_error('Using key `action` is deprecated, use `url` directly instead.', E_USER_DEPRECATED);
<ide> }
<ide>
<del> $this->setValuesSources($options['values']);
<add> $this->setValuesSources($options['valuesSources']);
<ide>
<ide> if ($options['idPrefix'] !== null) {
<ide> $this->_idPrefix = $options['idPrefix']; | 1 |
Mixed | Python | add multiplechoice to tftrainer [wip] | e4512aab3bffcbaa28d65e4eca7ab73d8fdd7889 | <ide><path>examples/README.md
<ide> This is still a work-in-progress – in particular documentation is still sparse
<ide>
<ide> ## Tasks built on Trainer
<ide>
<del>| Task | Example datasets | Trainer support | TFTrainer support | pytorch-lightning | Colab | One-click Deploy to Azure (wip) |
<add>| Task | Example datasets | Trainer support | TFTrainer support | pytorch-lightning | Colab | One-click Deploy to Azure (wip) |
<ide> |---|---|:---:|:---:|:---:|:---:|:---:|
<ide> | [`language-modeling`](./language-modeling) | Raw text | ✅ | - | - | - | - |
<ide> | [`text-classification`](./text-classification) | GLUE, XNLI | ✅ | ✅ | ✅ | [](https://colab.research.google.com/github/huggingface/blog/blob/master/notebooks/trainer/01_text_classification.ipynb) | [](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FAzure%2Fazure-quickstart-templates%2Fmaster%2F101-storage-account-create%2Fazuredeploy.json) |
<ide> | [`token-classification`](./token-classification) | CoNLL NER | ✅ | ✅ | ✅ | - | - |
<del>| [`multiple-choice`](./multiple-choice) | SWAG, RACE, ARC | ✅ | - | - | - | - |
<add>| [`multiple-choice`](./multiple-choice) | SWAG, RACE, ARC | ✅ | ✅ | - | [](https://colab.research.google.com/github/ViktorAlm/notebooks/blob/master/MPC_GPU_Demo_for_TF_and_PT.ipynb) | - |
<ide>
<ide>
<ide>
<ide><path>examples/multiple-choice/README.md
<ide> Training with the defined hyper-parameters yields the following results:
<ide> eval_acc = 0.8338998300509847
<ide> eval_loss = 0.44457291918821606
<ide> ```
<add>
<add>
<add>## Tensorflow
<add>
<add>```bash
<add>export SWAG_DIR=/path/to/swag_data_dir
<add>python ./examples/multiple-choice/run_tf_multiple_choice.py \
<add>--task_name swag \
<add>--model_name_or_path bert-base-cased \
<add>--do_train \
<add>--do_eval \
<add>--data_dir $SWAG_DIR \
<add>--learning_rate 5e-5 \
<add>--num_train_epochs 3 \
<add>--max_seq_length 80 \
<add>--output_dir models_bert/swag_base \
<add>--per_gpu_eval_batch_size=16 \
<add>--per_gpu_train_batch_size=16 \
<add>--logging-dir logs \
<add>--gradient_accumulation_steps 2 \
<add>--overwrite_output
<add>```
<add>
<add># Run it in colab
<add>[](https://colab.research.google.com/github/ViktorAlm/notebooks/blob/master/MPC_GPU_Demo_for_TF_and_PT.ipynb)
<ide><path>examples/multiple-choice/run_tf_multiple_choice.py
<add># coding=utf-8
<add># Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
<add># Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add>""" Finetuning the library models for multiple choice (Bert, Roberta, XLNet)."""
<add>
<add>
<add>import logging
<add>import os
<add>from dataclasses import dataclass, field
<add>from typing import Dict, Optional
<add>
<add>import numpy as np
<add>
<add>from transformers import (
<add> AutoConfig,
<add> AutoTokenizer,
<add> EvalPrediction,
<add> HfArgumentParser,
<add> TFAutoModelForMultipleChoice,
<add> TFTrainer,
<add> TFTrainingArguments,
<add> set_seed,
<add>)
<add>from utils_multiple_choice import Split, TFMultipleChoiceDataset, processors
<add>
<add>
<add>logger = logging.getLogger(__name__)
<add>
<add>
<add>def simple_accuracy(preds, labels):
<add> return (preds == labels).mean()
<add>
<add>
<add>@dataclass
<add>class ModelArguments:
<add> """
<add> Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
<add> """
<add>
<add> model_name_or_path: str = field(
<add> metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
<add> )
<add> config_name: Optional[str] = field(
<add> default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
<add> )
<add> tokenizer_name: Optional[str] = field(
<add> default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
<add> )
<add> cache_dir: Optional[str] = field(
<add> default=None, metadata={"help": "Where do you want to store the pretrained models downloaded from s3"}
<add> )
<add>
<add>
<add>@dataclass
<add>class DataTrainingArguments:
<add> """
<add> Arguments pertaining to what data we are going to input our model for training and eval.
<add> """
<add>
<add> task_name: str = field(metadata={"help": "The name of the task to train on: " + ", ".join(processors.keys())})
<add> data_dir: str = field(metadata={"help": "Should contain the data files for the task."})
<add> max_seq_length: int = field(
<add> default=128,
<add> metadata={
<add> "help": "The maximum total input sequence length after tokenization. Sequences longer "
<add> "than this will be truncated, sequences shorter will be padded."
<add> },
<add> )
<add> overwrite_cache: bool = field(
<add> default=False, metadata={"help": "Overwrite the cached training and evaluation sets"}
<add> )
<add>
<add>
<add>def main():
<add> # See all possible arguments in src/transformers/training_args.py
<add> # or by passing the --help flag to this script.
<add> # We now keep distinct sets of args, for a cleaner separation of concerns.
<add>
<add> parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TFTrainingArguments))
<add> model_args, data_args, training_args = parser.parse_args_into_dataclasses()
<add>
<add> if (
<add> os.path.exists(training_args.output_dir)
<add> and os.listdir(training_args.output_dir)
<add> and training_args.do_train
<add> and not training_args.overwrite_output_dir
<add> ):
<add> raise ValueError(
<add> f"Output directory ({training_args.output_dir}) already exists and is not empty. Use --overwrite_output_dir to overcome."
<add> )
<add>
<add> # Setup logging
<add> logging.basicConfig(
<add> format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
<add> datefmt="%m/%d/%Y %H:%M:%S",
<add> level=logging.INFO,
<add> )
<add> logger.warning(
<add> "device: %s, n_gpu: %s, 16-bits training: %s", training_args.device, training_args.n_gpu, training_args.fp16,
<add> )
<add> logger.info("Training/evaluation parameters %s", training_args)
<add>
<add> # Set seed
<add> set_seed(training_args.seed)
<add>
<add> try:
<add> processor = processors[data_args.task_name]()
<add> label_list = processor.get_labels()
<add> num_labels = len(label_list)
<add> except KeyError:
<add> raise ValueError("Task not found: %s" % (data_args.task_name))
<add>
<add> # Load pretrained model and tokenizer
<add> #
<add> # Distributed training:
<add> # The .from_pretrained methods guarantee that only one local process can concurrently
<add> # download model & vocab.
<add> config = AutoConfig.from_pretrained(
<add> model_args.config_name if model_args.config_name else model_args.model_name_or_path,
<add> num_labels=num_labels,
<add> finetuning_task=data_args.task_name,
<add> cache_dir=model_args.cache_dir,
<add> )
<add> tokenizer = AutoTokenizer.from_pretrained(
<add> model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,
<add> cache_dir=model_args.cache_dir,
<add> )
<add> with training_args.strategy.scope():
<add> model = TFAutoModelForMultipleChoice.from_pretrained(
<add> model_args.model_name_or_path,
<add> from_pt=bool(".bin" in model_args.model_name_or_path),
<add> config=config,
<add> cache_dir=model_args.cache_dir,
<add> )
<add> # Get datasets
<add> train_dataset = (
<add> TFMultipleChoiceDataset(
<add> data_dir=data_args.data_dir,
<add> tokenizer=tokenizer,
<add> task=data_args.task_name,
<add> max_seq_length=data_args.max_seq_length,
<add> overwrite_cache=data_args.overwrite_cache,
<add> mode=Split.train,
<add> )
<add> if training_args.do_train
<add> else None
<add> )
<add> eval_dataset = (
<add> TFMultipleChoiceDataset(
<add> data_dir=data_args.data_dir,
<add> tokenizer=tokenizer,
<add> task=data_args.task_name,
<add> max_seq_length=data_args.max_seq_length,
<add> overwrite_cache=data_args.overwrite_cache,
<add> mode=Split.dev,
<add> )
<add> if training_args.do_eval
<add> else None
<add> )
<add>
<add> def compute_metrics(p: EvalPrediction) -> Dict:
<add> preds = np.argmax(p.predictions, axis=1)
<add> return {"acc": simple_accuracy(preds, p.label_ids)}
<add>
<add> # Initialize our Trainer
<add> trainer = TFTrainer(
<add> model=model,
<add> args=training_args,
<add> train_dataset=train_dataset.get_dataset() if train_dataset else None,
<add> eval_dataset=eval_dataset.get_dataset() if eval_dataset else None,
<add> compute_metrics=compute_metrics,
<add> )
<add>
<add> # Training
<add> if training_args.do_train:
<add> trainer.train()
<add> trainer.save_model()
<add> tokenizer.save_pretrained(training_args.output_dir)
<add> # Evaluation
<add> results = {}
<add> if training_args.do_eval:
<add> logger.info("*** Evaluate ***")
<add>
<add> result = trainer.evaluate()
<add>
<add> output_eval_file = os.path.join(training_args.output_dir, "eval_results.txt")
<add> with open(output_eval_file, "w") as writer:
<add> logger.info("***** Eval results *****")
<add> for key, value in result.items():
<add> logger.info(" %s = %s", key, value)
<add> writer.write("%s = %s\n" % (key, value))
<add>
<add> results.update(result)
<add>
<add> return results
<add>
<add>
<add>if __name__ == "__main__":
<add> main()
<ide><path>examples/multiple-choice/utils_multiple_choice.py
<ide> from enum import Enum
<ide> from typing import List, Optional
<ide>
<del>import torch
<ide> import tqdm
<del>from torch.utils.data.dataset import Dataset
<ide>
<del>from transformers import PreTrainedTokenizer, torch_distributed_zero_first
<add>from transformers import PreTrainedTokenizer, is_tf_available, is_torch_available
<ide>
<ide>
<ide> logger = logging.getLogger(__name__)
<ide> class Split(Enum):
<ide> test = "test"
<ide>
<ide>
<del>class MultipleChoiceDataset(Dataset):
<del> """
<del> This will be superseded by a framework-agnostic approach
<del> soon.
<del> """
<del>
<del> features: List[InputFeatures]
<del>
<del> def __init__(
<del> self,
<del> data_dir: str,
<del> tokenizer: PreTrainedTokenizer,
<del> task: str,
<del> max_seq_length: Optional[int] = None,
<del> overwrite_cache=False,
<del> mode: Split = Split.train,
<del> local_rank=-1,
<del> ):
<del> processor = processors[task]()
<del>
<del> cached_features_file = os.path.join(
<del> data_dir,
<del> "cached_{}_{}_{}_{}".format(mode.value, tokenizer.__class__.__name__, str(max_seq_length), task,),
<del> )
<del> with torch_distributed_zero_first(local_rank):
<del> # Make sure only the first process in distributed training processes the dataset,
<del> # and the others will use the cache.
<add>if is_torch_available():
<add> import torch
<add> from torch.utils.data.dataset import Dataset
<add> from transformers import torch_distributed_zero_first
<add>
<add> class MultipleChoiceDataset(Dataset):
<add> """
<add> This will be superseded by a framework-agnostic approach
<add> soon.
<add> """
<add>
<add> features: List[InputFeatures]
<add>
<add> def __init__(
<add> self,
<add> data_dir: str,
<add> tokenizer: PreTrainedTokenizer,
<add> task: str,
<add> max_seq_length: Optional[int] = None,
<add> overwrite_cache=False,
<add> mode: Split = Split.train,
<add> local_rank=-1,
<add> ):
<add> processor = processors[task]()
<add>
<add> cached_features_file = os.path.join(
<add> data_dir,
<add> "cached_{}_{}_{}_{}".format(mode.value, tokenizer.__class__.__name__, str(max_seq_length), task,),
<add> )
<add> with torch_distributed_zero_first(local_rank):
<add> # Make sure only the first process in distributed training processes the dataset,
<add> # and the others will use the cache.
<ide>
<del> if os.path.exists(cached_features_file) and not overwrite_cache:
<del> logger.info(f"Loading features from cached file {cached_features_file}")
<del> self.features = torch.load(cached_features_file)
<del> else:
<del> logger.info(f"Creating features from dataset file at {data_dir}")
<del> label_list = processor.get_labels()
<del> if mode == Split.dev:
<del> examples = processor.get_dev_examples(data_dir)
<del> elif mode == Split.test:
<del> examples = processor.get_test_examples(data_dir)
<add> if os.path.exists(cached_features_file) and not overwrite_cache:
<add> logger.info(f"Loading features from cached file {cached_features_file}")
<add> self.features = torch.load(cached_features_file)
<ide> else:
<del> examples = processor.get_train_examples(data_dir)
<del> logger.info("Training examples: %s", len(examples))
<del> # TODO clean up all this to leverage built-in features of tokenizers
<del> self.features = convert_examples_to_features(
<del> examples,
<del> label_list,
<del> max_seq_length,
<del> tokenizer,
<del> pad_on_left=bool(tokenizer.padding_side == "left"),
<del> pad_token=tokenizer.pad_token_id,
<del> pad_token_segment_id=tokenizer.pad_token_type_id,
<del> )
<del> if local_rank in [-1, 0]:
<del> logger.info("Saving features into cached file %s", cached_features_file)
<del> torch.save(self.features, cached_features_file)
<add> logger.info(f"Creating features from dataset file at {data_dir}")
<add> label_list = processor.get_labels()
<add> if mode == Split.dev:
<add> examples = processor.get_dev_examples(data_dir)
<add> elif mode == Split.test:
<add> examples = processor.get_test_examples(data_dir)
<add> else:
<add> examples = processor.get_train_examples(data_dir)
<add> logger.info("Training examples: %s", len(examples))
<add> # TODO clean up all this to leverage built-in features of tokenizers
<add> self.features = convert_examples_to_features(
<add> examples,
<add> label_list,
<add> max_seq_length,
<add> tokenizer,
<add> pad_on_left=bool(tokenizer.padding_side == "left"),
<add> pad_token=tokenizer.pad_token_id,
<add> pad_token_segment_id=tokenizer.pad_token_type_id,
<add> )
<add> if local_rank in [-1, 0]:
<add> logger.info("Saving features into cached file %s", cached_features_file)
<add> torch.save(self.features, cached_features_file)
<add>
<add> def __len__(self):
<add> return len(self.features)
<add>
<add> def __getitem__(self, i) -> InputFeatures:
<add> return self.features[i]
<add>
<add>
<add>if is_tf_available():
<add> import tensorflow as tf
<add>
<add> class TFMultipleChoiceDataset:
<add> """
<add> This will be superseded by a framework-agnostic approach
<add> soon.
<add> """
<add>
<add> features: List[InputFeatures]
<add>
<add> def __init__(
<add> self,
<add> data_dir: str,
<add> tokenizer: PreTrainedTokenizer,
<add> task: str,
<add> max_seq_length: Optional[int] = 128,
<add> overwrite_cache=False,
<add> mode: Split = Split.train,
<add> ):
<add> processor = processors[task]()
<add>
<add> logger.info(f"Creating features from dataset file at {data_dir}")
<add> label_list = processor.get_labels()
<add> if mode == Split.dev:
<add> examples = processor.get_dev_examples(data_dir)
<add> elif mode == Split.test:
<add> examples = processor.get_test_examples(data_dir)
<add> else:
<add> examples = processor.get_train_examples(data_dir)
<add> logger.info("Training examples: %s", len(examples))
<add> # TODO clean up all this to leverage built-in features of tokenizers
<add> self.features = convert_examples_to_features(
<add> examples,
<add> label_list,
<add> max_seq_length,
<add> tokenizer,
<add> pad_on_left=bool(tokenizer.padding_side == "left"),
<add> pad_token=tokenizer.pad_token_id,
<add> pad_token_segment_id=tokenizer.pad_token_type_id,
<add> )
<add>
<add> def gen():
<add> for (ex_index, ex) in tqdm.tqdm(enumerate(self.features), desc="convert examples to features"):
<add> if ex_index % 10000 == 0:
<add> logger.info("Writing example %d of %d" % (ex_index, len(examples)))
<add>
<add> yield (
<add> {
<add> "example_id": 0,
<add> "input_ids": ex.input_ids,
<add> "attention_mask": ex.attention_mask,
<add> "token_type_ids": ex.token_type_ids,
<add> },
<add> ex.label,
<add> )
<add>
<add> self.dataset = tf.data.Dataset.from_generator(
<add> gen,
<add> (
<add> {
<add> "example_id": tf.int32,
<add> "input_ids": tf.int32,
<add> "attention_mask": tf.int32,
<add> "token_type_ids": tf.int32,
<add> },
<add> tf.int64,
<add> ),
<add> (
<add> {
<add> "example_id": tf.TensorShape([]),
<add> "input_ids": tf.TensorShape([None, None]),
<add> "attention_mask": tf.TensorShape([None, None]),
<add> "token_type_ids": tf.TensorShape([None, None]),
<add> },
<add> tf.TensorShape([]),
<add> ),
<add> )
<add>
<add> def get_dataset(self):
<add> return self.dataset
<ide>
<del> def __len__(self):
<del> return len(self.features)
<add> def __len__(self):
<add> return len(self.features)
<ide>
<del> def __getitem__(self, i) -> InputFeatures:
<del> return self.features[i]
<add> def __getitem__(self, i) -> InputFeatures:
<add> return self.features[i]
<ide>
<ide>
<ide> class DataProcessor:
<ide> def _create_examples(self, lines, set_type):
<ide> return examples
<ide>
<ide>
<add>class SynonymProcessor(DataProcessor):
<add> """Processor for the Synonym data set."""
<add>
<add> def get_train_examples(self, data_dir):
<add> """See base class."""
<add> logger.info("LOOKING AT {} train".format(data_dir))
<add> return self._create_examples(self._read_csv(os.path.join(data_dir, "mctrain.csv")), "train")
<add>
<add> def get_dev_examples(self, data_dir):
<add> """See base class."""
<add> logger.info("LOOKING AT {} dev".format(data_dir))
<add> return self._create_examples(self._read_csv(os.path.join(data_dir, "mchp.csv")), "dev")
<add>
<add> def get_test_examples(self, data_dir):
<add> """See base class."""
<add> logger.info("LOOKING AT {} dev".format(data_dir))
<add>
<add> return self._create_examples(self._read_csv(os.path.join(data_dir, "mctest.csv")), "test")
<add>
<add> def get_labels(self):
<add> """See base class."""
<add> return ["0", "1", "2", "3", "4"]
<add>
<add> def _read_csv(self, input_file):
<add> with open(input_file, "r", encoding="utf-8") as f:
<add> return list(csv.reader(f))
<add>
<add> def _create_examples(self, lines: List[List[str]], type: str):
<add> """Creates examples for the training and dev sets."""
<add>
<add> examples = [
<add> InputExample(
<add> example_id=line[0],
<add> question="", # in the swag dataset, the
<add> # common beginning of each
<add> # choice is stored in "sent2".
<add> contexts=[line[1], line[1], line[1], line[1], line[1]],
<add> endings=[line[2], line[3], line[4], line[5], line[6]],
<add> label=line[7],
<add> )
<add> for line in lines # we skip the line with the column names
<add> ]
<add>
<add> return examples
<add>
<add>
<ide> class SwagProcessor(DataProcessor):
<ide> """Processor for the SWAG data set."""
<ide>
<ide> def convert_examples_to_features(
<ide> return features
<ide>
<ide>
<del>processors = {"race": RaceProcessor, "swag": SwagProcessor, "arc": ArcProcessor}
<del>
<del>
<del>MULTIPLE_CHOICE_TASKS_NUM_LABELS = {"race", 4, "swag", 4, "arc", 4}
<add>processors = {"race": RaceProcessor, "swag": SwagProcessor, "arc": ArcProcessor, "syn": SynonymProcessor}
<add>MULTIPLE_CHOICE_TASKS_NUM_LABELS = {"race", 4, "swag", 4, "arc", 4, "syn", 5}
<ide><path>src/transformers/__init__.py
<ide> from .modeling_tf_auto import (
<ide> TFAutoModel,
<ide> TFAutoModelForPreTraining,
<add> TFAutoModelForMultipleChoice,
<ide> TFAutoModelForSequenceClassification,
<ide> TFAutoModelForQuestionAnswering,
<ide> TFAutoModelWithLMHead,
<ide> TFAlbertModel,
<ide> TFAlbertForPreTraining,
<ide> TFAlbertForMaskedLM,
<add> TFAlbertForMultipleChoice,
<ide> TFAlbertForSequenceClassification,
<ide> TFAlbertForQuestionAnswering,
<ide> TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_MAP,
<ide><path>src/transformers/modeling_tf_albert.py
<ide> import tensorflow as tf
<ide>
<ide> from .configuration_albert import AlbertConfig
<del>from .file_utils import add_start_docstrings, add_start_docstrings_to_callable
<add>from .file_utils import MULTIPLE_CHOICE_DUMMY_INPUTS, add_start_docstrings, add_start_docstrings_to_callable
<ide> from .modeling_tf_bert import ACT2FN, TFBertSelfAttention
<ide> from .modeling_tf_utils import TFPreTrainedModel, get_initializer, keras_serializable, shape_list
<ide> from .tokenization_utils import BatchEncoding
<ide> def call(self, inputs, **kwargs):
<ide> outputs = (start_logits, end_logits,) + outputs[2:]
<ide>
<ide> return outputs # start_logits, end_logits, (hidden_states), (attentions)
<add>
<add>
<add>@add_start_docstrings(
<add> """Albert Model with a multiple choice classification head on top (a linear layer on top of
<add> the pooled output and a softmax) e.g. for RocStories/SWAG tasks. """,
<add> ALBERT_START_DOCSTRING,
<add>)
<add>class TFAlbertForMultipleChoice(TFAlbertPreTrainedModel):
<add> def __init__(self, config, *inputs, **kwargs):
<add> super().__init__(config, *inputs, **kwargs)
<add>
<add> self.albert = TFAlbertMainLayer(config, name="albert")
<add> self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
<add> self.classifier = tf.keras.layers.Dense(
<add> 1, kernel_initializer=get_initializer(config.initializer_range), name="classifier"
<add> )
<add>
<add> @property
<add> def dummy_inputs(self):
<add> """ Dummy inputs to build the network.
<add>
<add> Returns:
<add> tf.Tensor with dummy inputs
<add> """
<add> return {"input_ids": tf.constant(MULTIPLE_CHOICE_DUMMY_INPUTS)}
<add>
<add> @add_start_docstrings_to_callable(ALBERT_INPUTS_DOCSTRING)
<add> def call(
<add> self,
<add> inputs,
<add> attention_mask=None,
<add> token_type_ids=None,
<add> position_ids=None,
<add> head_mask=None,
<add> inputs_embeds=None,
<add> training=False,
<add> ):
<add> r"""
<add> Return:
<add> :obj:`tuple(tf.Tensor)` comprising various elements depending on the configuration (:class:`~transformers.BertConfig`) and inputs:
<add> classification_scores (:obj:`Numpy array` or :obj:`tf.Tensor` of shape :obj:`(batch_size, num_choices)`:
<add> `num_choices` is the size of the second dimension of the input tensors. (see `input_ids` above).
<add>
<add> Classification scores (before SoftMax).
<add> hidden_states (:obj:`tuple(tf.Tensor)`, `optional`, returned when :obj:`config.output_hidden_states=True`):
<add> tuple of :obj:`tf.Tensor` (one for the output of the embeddings + one for the output of each layer)
<add> of shape :obj:`(batch_size, sequence_length, hidden_size)`.
<add>
<add> Hidden-states of the model at the output of each layer plus the initial embedding outputs.
<add> attentions (:obj:`tuple(tf.Tensor)`, `optional`, returned when ``config.output_attentions=True``):
<add> tuple of :obj:`tf.Tensor` (one for each layer) of shape
<add> :obj:`(batch_size, num_heads, sequence_length, sequence_length)`:
<add>
<add> Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
<add>
<add> Examples::
<add>
<add> import tensorflow as tf
<add> from transformers import AlbertTokenizer, TFAlbertForMultipleChoice
<add>
<add> tokenizer = AlbertTokenizer.from_pretrained('albert-base-v2')
<add> model = TFAlbertForMultipleChoice.from_pretrained('albert-base-v2')
<add>
<add> example1 = ["This is a context", "Is it a context? Yes"]
<add> example2 = ["This is a context", "Is it a context? No"]
<add> encoding = tokenizer.batch_encode_plus([example1, example2], return_tensors='tf', truncation_strategy="only_first", pad_to_max_length=True, max_length=128)
<add> outputs = model(encoding["input_ids"][None, :])
<add> logits = outputs[0]
<add>
<add> """
<add> if isinstance(inputs, (tuple, list)):
<add> input_ids = inputs[0]
<add> attention_mask = inputs[1] if len(inputs) > 1 else attention_mask
<add> token_type_ids = inputs[2] if len(inputs) > 2 else token_type_ids
<add> position_ids = inputs[3] if len(inputs) > 3 else position_ids
<add> head_mask = inputs[4] if len(inputs) > 4 else head_mask
<add> inputs_embeds = inputs[5] if len(inputs) > 5 else inputs_embeds
<add> assert len(inputs) <= 6, "Too many inputs."
<add> elif isinstance(inputs, dict):
<add> print("isdict(1)")
<add> input_ids = inputs.get("input_ids")
<add> print(input_ids)
<add>
<add> attention_mask = inputs.get("attention_mask", attention_mask)
<add> token_type_ids = inputs.get("token_type_ids", token_type_ids)
<add> position_ids = inputs.get("position_ids", position_ids)
<add> head_mask = inputs.get("head_mask", head_mask)
<add> inputs_embeds = inputs.get("inputs_embeds", inputs_embeds)
<add> assert len(inputs) <= 6, "Too many inputs."
<add> else:
<add> input_ids = inputs
<add>
<add> if input_ids is not None:
<add> num_choices = shape_list(input_ids)[1]
<add> seq_length = shape_list(input_ids)[2]
<add> else:
<add> num_choices = shape_list(inputs_embeds)[1]
<add> seq_length = shape_list(inputs_embeds)[2]
<add>
<add> flat_input_ids = tf.reshape(input_ids, (-1, seq_length)) if input_ids is not None else None
<add> flat_attention_mask = tf.reshape(attention_mask, (-1, seq_length)) if attention_mask is not None else None
<add> flat_token_type_ids = tf.reshape(token_type_ids, (-1, seq_length)) if token_type_ids is not None else None
<add> flat_position_ids = tf.reshape(position_ids, (-1, seq_length)) if position_ids is not None else None
<add>
<add> flat_inputs = [
<add> flat_input_ids,
<add> flat_attention_mask,
<add> flat_token_type_ids,
<add> flat_position_ids,
<add> head_mask,
<add> inputs_embeds,
<add> ]
<add>
<add> outputs = self.albert(flat_inputs, training=training)
<add>
<add> pooled_output = outputs[1]
<add>
<add> pooled_output = self.dropout(pooled_output, training=training)
<add> logits = self.classifier(pooled_output)
<add> reshaped_logits = tf.reshape(logits, (-1, num_choices))
<add>
<add> outputs = (reshaped_logits,) + outputs[2:] # add hidden states and attention if they are here
<add>
<add> return outputs # reshaped_logits, (hidden_states), (attentions)
<ide><path>src/transformers/modeling_tf_auto.py
<ide> from .modeling_tf_albert import (
<ide> TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_MAP,
<ide> TFAlbertForMaskedLM,
<add> TFAlbertForMultipleChoice,
<ide> TFAlbertForPreTraining,
<ide> TFAlbertForQuestionAnswering,
<ide> TFAlbertForSequenceClassification,
<ide> from .modeling_tf_bert import (
<ide> TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP,
<ide> TFBertForMaskedLM,
<add> TFBertForMultipleChoice,
<ide> TFBertForPreTraining,
<ide> TFBertForQuestionAnswering,
<ide> TFBertForSequenceClassification,
<ide> ]
<ide> )
<ide>
<add>TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING = OrderedDict(
<add> [(BertConfig, TFBertForMultipleChoice), (AlbertConfig, TFAlbertForMultipleChoice)]
<add>)
<add>
<ide> TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING = OrderedDict(
<ide> [
<ide> (DistilBertConfig, TFDistilBertForQuestionAnswering),
<ide> def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
<ide> )
<ide>
<ide>
<add>class TFAutoModelForMultipleChoice:
<add> r"""
<add> :class:`~transformers.TFAutoModelForMultipleChoice` is a generic model class
<add> that will be instantiated as one of the multiple choice model classes of the library
<add> when created with the `TFAutoModelForMultipleChoice.from_pretrained(pretrained_model_name_or_path)`
<add> class method.
<add>
<add> The `from_pretrained()` method takes care of returning the correct model class instance
<add> based on the `model_type` property of the config object, or when it's missing,
<add> falling back to using pattern matching on the `pretrained_model_name_or_path` string.
<add>
<add> The model class to instantiate is selected as the first pattern matching
<add> in the `pretrained_model_name_or_path` string (in the following order):
<add> - contains `albert`: TFAlbertForMultipleChoice (Albert model)
<add> - contains `bert`: TFBertForMultipleChoice (Bert model)
<add>
<add> This class cannot be instantiated using `__init__()` (throws an error).
<add> """
<add>
<add> def __init__(self):
<add> raise EnvironmentError(
<add> "TFAutoModelForMultipleChoice is designed to be instantiated "
<add> "using the `TFAutoModelForMultipleChoice.from_pretrained(pretrained_model_name_or_path)` or "
<add> "`TFAutoModelForMultipleChoice.from_config(config)` methods."
<add> )
<add>
<add> @classmethod
<add> def from_config(cls, config):
<add> r""" Instantiates one of the base model classes of the library
<add> from a configuration.
<add>
<add> config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`:
<add> The model class to instantiate is selected based on the configuration class:
<add> - isInstance of `albert` configuration class: AlbertModel (Albert model)
<add> - isInstance of `bert` configuration class: BertModel (Bert model)
<add>
<add> Examples::
<add>
<add> config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from S3 and cache.
<add> model = AutoModelForMulitpleChoice.from_config(config) # E.g. model was saved using `save_pretrained('./test/saved_model/')`
<add> """
<add> for config_class, model_class in TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING.items():
<add> if isinstance(config, config_class):
<add> return model_class(config)
<add> raise ValueError(
<add> "Unrecognized configuration class {} for this kind of TFAutoModel: {}.\n"
<add> "Model type should be one of {}.".format(
<add> config.__class__,
<add> cls.__name__,
<add> ", ".join(c.__name__ for c in TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING.keys()),
<add> )
<add> )
<add>
<add> @classmethod
<add> def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
<add> r""" Instantiates one of the multiple choice model classes of the library
<add> from a pre-trained model configuration.
<add>
<add> The `from_pretrained()` method takes care of returning the correct model class instance
<add> based on the `model_type` property of the config object, or when it's missing,
<add> falling back to using pattern matching on the `pretrained_model_name_or_path` string.
<add>
<add> The model class to instantiate is selected as the first pattern matching
<add> in the `pretrained_model_name_or_path` string (in the following order):
<add> - contains `albert`: TFRobertaForMultiple (Albert model)
<add> - contains `bert`: TFBertForMultipleChoice (Bert model)
<add>
<add> The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)
<add> To train the model, you should first set it back in training mode with `model.train()`
<add>
<add> Params:
<add> pretrained_model_name_or_path: either:
<add>
<add> - a string with the `shortcut name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``.
<add> - a string with the `identifier name` of a pre-trained model that was user-uploaded to our S3, e.g.: ``dbmdz/bert-base-german-cased``.
<add> - a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/``.
<add> - a path or url to a `PyTorch, TF 1.X or TF 2.0 checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In the case of a PyTorch checkpoint, ``from_pt`` should be set to True and a configuration object should be provided as ``config`` argument.
<add>
<add> from_pt: (`Optional`) Boolean
<add> Set to True if the Checkpoint is a PyTorch checkpoint.
<add>
<add> model_args: (`optional`) Sequence of positional arguments:
<add> All remaning positional arguments will be passed to the underlying model's ``__init__`` method
<add>
<add> config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`:
<add> Configuration for the model to use instead of an automatically loaded configuation. Configuration can be automatically loaded when:
<add>
<add> - the model is a model provided by the library (loaded with the ``shortcut-name`` string of a pretrained model), or
<add> - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory.
<add> - the model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory.
<add>
<add> state_dict: (`optional`) dict:
<add> an optional state dictionnary for the model to use instead of a state dictionary loaded from saved weights file.
<add> This option can be used if you want to create a model from a pretrained configuration but load your own weights.
<add> In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option.
<add>
<add> cache_dir: (`optional`) string:
<add> Path to a directory in which a downloaded pre-trained model
<add> configuration should be cached if the standard cache should not be used.
<add>
<add> force_download: (`optional`) boolean, default False:
<add> Force to (re-)download the model weights and configuration files and override the cached versions if they exists.
<add>
<add> resume_download: (`optional`) boolean, default False:
<add> Do not delete incompletely recieved file. Attempt to resume the download if such a file exists.
<add>
<add> proxies: (`optional`) dict, default None:
<add> A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.
<add> The proxies are used on each request.
<add>
<add> output_loading_info: (`optional`) boolean:
<add> Set to ``True`` to also return a dictionnary containing missing keys, unexpected keys and error messages.
<add>
<add> kwargs: (`optional`) Remaining dictionary of keyword arguments:
<add> Can be used to update the configuration object (after it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config` is provided or automatically loaded:
<add>
<add> - If a configuration is provided with ``config``, ``**kwargs`` will be directly passed to the underlying model's ``__init__`` method (we assume all relevant updates to the configuration have already been done)
<add> - If a configuration is not provided, ``kwargs`` will be first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration attribute will be used to override said attribute with the supplied ``kwargs`` value. Remaining keys that do not correspond to any configuration attribute will be passed to the underlying model's ``__init__`` function.
<add>
<add> Examples::
<add>
<add> model = TFAutoModelFormultipleChoice.from_pretrained('bert-base-uncased') # Download model and configuration from S3 and cache.
<add> model = TFAutoModelFormultipleChoice.from_pretrained('./test/bert_model/') # E.g. model was saved using `save_pretrained('./test/saved_model/')`
<add> model = TFAutoModelFormultipleChoice.from_pretrained('bert-base-uncased', output_attention=True) # Update configuration during loading
<add> assert model.config.output_attention == True
<add> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
<add> config = AutoConfig.from_json_file('./tf_model/bert_tf_model_config.json')
<add> model = TFAutoModelFormultipleChoice.from_pretrained('./pt_model/bert_pytorch_model.bin', from_pt=True, config=config)
<add>
<add> """
<add> config = kwargs.pop("config", None)
<add> if not isinstance(config, PretrainedConfig):
<add> config = AutoConfig.from_pretrained(pretrained_model_name_or_path, **kwargs)
<add>
<add> for config_class, model_class in TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING.items():
<add> if isinstance(config, config_class):
<add> return model_class.from_pretrained(pretrained_model_name_or_path, *model_args, config=config, **kwargs)
<add> raise ValueError(
<add> "Unrecognized configuration class {} for this kind of TFAutoModel: {}.\n"
<add> "Model type should be one of {}.".format(
<add> config.__class__,
<add> cls.__name__,
<add> ", ".join(c.__name__ for c in TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING.keys()),
<add> )
<add> )
<add>
<add>
<ide> class TFAutoModelForSequenceClassification(object):
<ide> r"""
<ide> :class:`~transformers.TFAutoModelForSequenceClassification` is a generic model class
<ide><path>src/transformers/trainer_tf.py
<ide> def _create_optimizer(self) -> None:
<ide> in the Tensorflow documentation and those contained in the transformers library.
<ide> """
<ide> if self.args.optimizer_name == "adamw":
<del> self.optimizer = create_optimizer(self.args.learning_rate, self.train_steps, self.args.warmup_steps)
<add> self.optimizer = create_optimizer(
<add> self.args.learning_rate, self.train_steps, self.args.warmup_steps, self.args.end_lr
<add> )
<ide> else:
<ide> try:
<ide> self.optimizer = tf.keras.optimizers.get(
<ide> def _create_optimizer(self) -> None:
<ide> self.optimizer = tf.keras.optimizers.get(
<ide> {"class_name": self.args.optimizer_name, "config": {"learning_rate": self.args.learning_rate}}
<ide> )
<add> logger.info("Created an/a {} optimizer".format(self.optimizer))
<ide>
<ide> def _create_checkpoint_manager(self, max_to_keep: int = 5, load_model: bool = True) -> None:
<ide> """
<ide> def _create_checkpoint_manager(self, max_to_keep: int = 5, load_model: bool = Tr
<ide> load_model: if we want to start the training from the latest checkpoint.
<ide> """
<ide> ckpt = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model)
<add>
<ide> self.model.ckpt_manager = tf.train.CheckpointManager(ckpt, PREFIX_CHECKPOINT_DIR, max_to_keep=max_to_keep)
<ide>
<ide> if load_model:
<ide> def save_model(self) -> None:
<ide>
<ide> path = os.path.join(self.args.output_dir, "saved_model")
<ide>
<add> logger.info("Saving model in {}".format(path))
<ide> os.makedirs(path, exist_ok=True)
<ide> self.model.save_pretrained(self.args.output_dir)
<ide><path>src/transformers/training_args_tf.py
<ide> class TFTrainingArguments(TrainingArguments):
<ide> "help": "Name of a Tensorflow loss. For the list see: https://www.tensorflow.org/api_docs/python/tf/keras/losses"
<ide> },
<ide> )
<add> tpu_name: str = field(
<add> default=None, metadata={"help": "Name of TPU"},
<add> )
<add> end_lr: float = field(
<add> default=0, metadata={"help": "End learning rate for optimizer"},
<add> )
<ide> eval_steps: int = field(default=1000, metadata={"help": "Run an evaluation every X steps."})
<ide> debug: bool = field(
<ide> default=False, metadata={"help": "Activate the trace to record computation graphs and profiling information"}
<ide> def _setup_strategy(self) -> Tuple["tf.distribute.Strategy", int]:
<ide> strategy = tf.distribute.OneDeviceStrategy(device="/cpu:0")
<ide> else:
<ide> try:
<del> tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
<add> if self.tpu_name:
<add> tpu = tf.distribute.cluster_resolver.TPUClusterResolver(self.tpu_name)
<add> else:
<add> tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
<ide> except ValueError:
<ide> tpu = None
<ide> | 9 |
PHP | PHP | apply fixes from styleci | 247e96b7adb8fb92f32a5420dcf0fbb33e6a2475 | <ide><path>src/Illuminate/Support/Str.php
<ide> class Str
<ide> */
<ide> public static function after($subject, $search)
<ide> {
<del> return $search == "" ? $subject : array_reverse(explode($search, $subject, 2))[0];
<add> return $search == '' ? $subject : array_reverse(explode($search, $subject, 2))[0];
<ide> }
<ide>
<ide> /**
<ide> public static function ascii($value, $language = 'en')
<ide> */
<ide> public static function before($subject, $search)
<ide> {
<del> return $search == "" ? $subject : explode($search, $subject)[0];
<add> return $search == '' ? $subject : explode($search, $subject)[0];
<ide> }
<ide>
<ide> /** | 1 |
PHP | PHP | fix styling issue | e5f217135464a7de2b11d6e11ca7e5940678ecf9 | <ide><path>src/Illuminate/Routing/Router.php
<ide> protected function runRoute(Request $request, Route $route)
<ide> $this->runRouteWithinStack($route, $request)
<ide> );
<ide> } catch (ModelNotFoundException $exception) {
<del> if($route->getElse()) {
<add> if ($route->getElse()) {
<ide> return $route->getElse();
<ide> }
<ide> | 1 |
Python | Python | clarify meaning of mro | 1ffb4361f4b293e9a6b37351f32fed645330bba7 | <ide><path>keras/engine/training.py
<ide> def __reduce__(self):
<ide> # it _may_ be possible to serialize as a plain Python object,
<ide> # as long as the constituent parts (layers, optimizers, losses, etc.)
<ide> # can be serialized as plain Python objects.
<del> # Thus we call up the MRO to get an implementation of __reduce__
<del> # to try to pickle this Model as a plain Python object.
<add> # Thus we call up the superclass hierarchy to get an implementation of
<add> # __reduce__ that can pickle this Model as a plain Python object.
<ide> return super(Model, self).__reduce__()
<ide>
<ide> def __deepcopy__(self, memo): | 1 |
Javascript | Javascript | fix broken test | 03ef5b041d3120dccfa58ef78c175e5c9b59c892 | <ide><path>src/test/locale/it.js
<ide> test('format', function (assert) {
<ide> ['M Mo MM MMMM MMM', '2 2º 02 febbraio feb'],
<ide> ['YYYY YY', '2010 10'],
<ide> ['D Do DD', '14 14º 14'],
<del> ['d do dddd ddd dd', '0 0º Domenica Dom D'],
<add> ['d do dddd ddd dd', '0 0º Domenica Dom Do'],
<ide> ['DDD DDDo DDDD', '45 45º 045'],
<ide> ['w wo ww', '6 6º 06'],
<ide> ['h hh', '3 03'], | 1 |
Ruby | Ruby | remove explicit builder require | 3c78d0b1ebfa87dfb80b8ac005554ece49cf2e34 | <ide><path>actionpack/lib/action_view.rb
<ide> #++
<ide>
<ide> $:.unshift(File.dirname(__FILE__) + "/action_view/vendor")
<del>require 'action_view/vendor/builder'
<del>
<ide> require 'action_view/base'
<ide> require 'action_view/partials'
<ide> | 1 |
Text | Text | remove shirtstarter as an example application | 3efa02da919224eaa235cd19dacf64aa80d02e21 | <ide><path>docs/docs/examples.md
<ide> prev: complementary-tools.html
<ide> * **[Instagram.com](http://instagram.com/)** is 100% built on React, both public site and internal tools.
<ide> * **[Facebook.com](http://www.facebook.com/)**'s commenting interface, business management tools, [Lookback video editor](http://facebook.com/lookback/edit), page insights, and most, if not all, new JS development.
<ide> * **[Khan Academy](http://khanacademy.org/)** uses React for most new JS development.
<del>* **[Shirtstarter](https://www.shirtstarter.com/)** is 100% built on React.
<ide> * **[Sberbank](http://sberbank.ru/moscow/ru/person/)**, Russia's number one bank, is built with React.
<ide> * **[The New York Times's 2014 Red Carpet Project](http://www.nytimes.com/interactive/2014/02/02/fashion/red-carpet-project.html?_r=0)** is built with React.
<ide> | 1 |
PHP | PHP | remove temporary variable | 5deeb7febb93729cc43a96b3c0218be347af132f | <ide><path>src/Illuminate/Http/Request.php
<ide> public static function createFromBase(SymfonyRequest $request)
<ide> return $request;
<ide> }
<ide>
<del> $content = $request->content;
<del>
<ide> $newRequest = (new static)->duplicate(
<ide> $request->query->all(), $request->request->all(), $request->attributes->all(),
<ide> $request->cookies->all(), $request->files->all(), $request->server->all()
<ide> );
<ide>
<ide> $newRequest->headers->replace($request->headers->all());
<ide>
<del> $newRequest->content = $content;
<add> $newRequest->content = $request->content;
<ide>
<ide> $newRequest->request = $newRequest->getInputSource();
<ide> | 1 |
Javascript | Javascript | move unicode translation cache into font | 0b6c0db9310376f04658a45bb19b2c2791064082 | <ide><path>fonts.js
<ide> var Fonts = (function Fonts() {
<ide> this.properties = properties;
<ide> this.id = fontCount++;
<ide> this.loading = true;
<del> this.charsCache = Object.create(null);
<ide> this.sizes = [];
<ide> }
<ide>
<ide> var current;
<del> var charsCache;
<ide> var measureCache;
<ide>
<ide> return {
<ide> var Fonts = (function Fonts() {
<ide> // |current| can be null is fontName is a built-in font
<ide> // (e.g. "sans-serif")
<ide> if (fontObj && (current = fonts[fontObj.id])) {
<del> charsCache = current.charsCache;
<ide> var sizes = current.sizes;
<ide> if (!(measureCache = sizes[size]))
<ide> measureCache = sizes[size] = Object.create(null);
<ide> } else {
<del> charsCache = null;
<ide> measureCache = null
<ide> }
<ide>
<ide> ctx.font = (size * kScalePrecision) + 'px "' + fontName + '"';
<ide> },
<del> charsToUnicode: function fonts_chars2Unicode(chars) {
<del> if (!charsCache)
<del> return chars;
<del>
<del> // if we translated this string before, just grab it from the cache
<del> var str = charsCache[chars];
<del> if (str)
<del> return str;
<del>
<del> // translate the string using the font's encoding
<del> var encoding = current ? current.properties.encoding : null;
<del> if (!encoding)
<del> return chars;
<del>
<del> str = '';
<del> for (var i = 0; i < chars.length; ++i) {
<del> var charcode = chars.charCodeAt(i);
<del> var unicode = encoding[charcode];
<del>
<del> // Check if the glyph has already been converted
<del> if (!IsNum(unicode))
<del> unicode = encoding[unicode] = GlyphsUnicode[unicode.name];
<del>
<del> // Handle surrogate pairs
<del> if (unicode > 0xFFFF) {
<del> str += String.fromCharCode(unicode & 0xFFFF);
<del> unicode >>= 16;
<del> }
<del> str += String.fromCharCode(unicode);
<del> }
<del>
<del> // Enter the translated string into the cache
<del> return charsCache[chars] = str;
<del> },
<ide> measureText: function fonts_measureText(text) {
<ide> var width;
<ide> if (measureCache && (width = measureCache[text]))
<ide> var Font = (function() {
<ide> styleSheet.insertRule(rule, styleSheet.cssRules.length);
<ide>
<ide> return rule;
<add> },
<add>
<add> charsToUnicode: function fonts_chars2Unicode(chars) {
<add> var charsCache = this.charsCache;
<add>
<add> // if we translated this string before, just grab it from the cache
<add> if (charsCache) {
<add> var str = charsCache[chars];
<add> if (str)
<add> return str;
<add> }
<add>
<add> // translate the string using the font's encoding
<add> var encoding = this.encoding;
<add> if (!encoding)
<add> return chars;
<add>
<add> // lazily create the translation cache
<add> if (!charsCache)
<add> charsCache = this.charsCache = Object.create(null);
<add>
<add> str = '';
<add> for (var i = 0; i < chars.length; ++i) {
<add> var charcode = chars.charCodeAt(i);
<add> var unicode = encoding[charcode];
<add>
<add> // Check if the glyph has already been converted
<add> if (!IsNum(unicode))
<add> unicode = encoding[unicode] = GlyphsUnicode[unicode.name];
<add>
<add> // Handle surrogate pairs
<add> if (unicode > 0xFFFF) {
<add> str += String.fromCharCode(unicode & 0xFFFF);
<add> unicode >>= 16;
<add> }
<add> str += String.fromCharCode(unicode);
<add> }
<add>
<add> // Enter the translated string into the cache
<add> return charsCache[chars] = str;
<ide> }
<ide> };
<ide>
<ide><path>pdf.js
<ide> var CanvasGraphics = (function() {
<ide> ctx.scale(1, -1);
<ide>
<ide> if (this.ctx.$showText) {
<del> ctx.$showText(current.y, Fonts.charsToUnicode(text));
<add> ctx.$showText(current.y, text);
<ide> } else {
<del> text = Fonts.charsToUnicode(text);
<ide> ctx.translate(current.x, -1 * current.y);
<del>
<ide> var font = this.current.font;
<del> if (font)
<add> if (font) {
<ide> ctx.transform.apply(ctx, font.textMatrix);
<add> text = font.charsToUnicode(text);
<add> }
<ide> ctx.fillText(text, 0, 0);
<ide> current.x += Fonts.measureText(text);
<ide> } | 2 |
Text | Text | add text shown in example of usage | b48a1f08c1cd92ab275bbf56869bc955ef10defe | <ide><path>model_cards/mrm8488/bert-spanish-cased-finetuned-ner/README.md
<ide> nlp_ner = pipeline(
<ide> {"use_fast": False}
<ide> ))
<ide>
<add>text = 'Mis amigos están pensando viajar a Londres este verano'
<add>
<ide> nlp_ner(text)
<ide>
<ide> #Output: [{'entity': 'B-LOC', 'score': 0.9998720288276672, 'word': 'Londres'}] | 1 |
Javascript | Javascript | fix lint error | 56b2fd487139b6ef59f4d3e3e9ca02a49cc7c202 | <ide><path>src/createStore.js
<ide> export default function createStore(reducer, initialState) {
<ide>
<ide> return function unsubscribe() {
<ide> var index = listeners.indexOf(listener);
<del> if(index === -1)return;
<add> if (index === -1)return;
<ide> listeners.splice(index, 1);
<ide> };
<ide> }
<ide><path>test/createStore.spec.js
<ide> describe('createStore', () => {
<ide> const listenerA = expect.createSpy(() => {});
<ide> const listenerB = expect.createSpy(() => {});
<ide>
<del> let unsubscribeA = store.subscribe(listenerA);
<add> const unsubscribeA = store.subscribe(listenerA);
<ide> store.subscribe(listenerB);
<ide>
<ide> unsubscribeA(); | 2 |
Ruby | Ruby | fix lmod hashcat cert issue | 55e045d23829328827ae8b2ece0187c25fa6c0ef | <ide><path>Library/Homebrew/dev-cmd/audit.rb
<ide> def audit_versioned_keg_only
<ide> end
<ide>
<ide> CERT_ERROR_ALLOWLIST = {
<add> "hashcat" => "https://hashcat.net/hashcat/",
<ide> "jinx" => "https://www.jinx-lang.org/",
<ide> "lmod" => "https://www.tacc.utexas.edu/research-development/tacc-projects/lmod",
<ide> "micropython" => "https://www.micropython.org/", | 1 |
PHP | PHP | adjust the doc blocks instead | d42104a8243bdb2c475afea1ef8a433022ec9b40 | <ide><path>src/View/Helper/TimeHelper.php
<ide> public function toQuarter($dateString, $range = false)
<ide> *
<ide> * @param int|string|\DateTime $dateString UNIX timestamp, strtotime() valid string or DateTime object
<ide> * @param string|\DateTimeZone|null $timezone User's timezone string or DateTimeZone object
<del> * @return int Unix timestamp
<add> * @return string UNIX timestamp
<ide> * @see \Cake\I18n\Time::toUnix()
<ide> */
<ide> public function toUnix($dateString, $timezone = null)
<ide> {
<del> return (int)(new Time($dateString, $timezone))->toUnixString();
<add> return (new Time($dateString, $timezone))->toUnixString();
<ide> }
<ide>
<ide> /**
<ide> public function isWithinNext($timeInterval, $dateString, $timezone = null)
<ide> * Returns gmt as a UNIX timestamp.
<ide> *
<ide> * @param int|string|\DateTime|null $string UNIX timestamp, strtotime() valid string or DateTime object
<del> * @return int UNIX timestamp
<add> * @return string UNIX timestamp
<ide> * @see \Cake\I18n\Time::gmt()
<ide> */
<ide> public function gmt($string = null)
<ide> {
<del> return (int)(new Time($string))->toUnixString();
<add> return (new Time($string))->toUnixString();
<ide> }
<ide>
<ide> /** | 1 |
Javascript | Javascript | hide $browser docs - it's a private service | f158d81d21c5c6d8fe2331abf8e527e8fc45d4a9 | <ide><path>src/ng/browser.js
<ide> 'use strict';
<ide>
<ide> /**
<del> * @ngdoc object
<add> * ! This is a private undocumented service !
<add> *
<ide> * @name ng.$browser
<ide> * @requires $log
<ide> * @description
<ide> function Browser(window, document, $log, $sniffer) {
<ide> pollTimeout;
<ide>
<ide> /**
<del> * @ngdoc method
<ide> * @name ng.$browser#addPollFn
<ide> * @methodOf ng.$browser
<ide> *
<ide> function Browser(window, document, $log, $sniffer) {
<ide> baseElement = document.find('base');
<ide>
<ide> /**
<del> * @ngdoc method
<ide> * @name ng.$browser#url
<ide> * @methodOf ng.$browser
<ide> *
<ide> function Browser(window, document, $log, $sniffer) {
<ide> }
<ide>
<ide> /**
<del> * @ngdoc method
<ide> * @name ng.$browser#onUrlChange
<ide> * @methodOf ng.$browser
<ide> * @TODO(vojta): refactor to use node's syntax for events
<ide> function Browser(window, document, $log, $sniffer) {
<ide> var cookiePath = self.baseHref();
<ide>
<ide> /**
<del> * @ngdoc method
<ide> * @name ng.$browser#cookies
<ide> * @methodOf ng.$browser
<ide> *
<ide> function Browser(window, document, $log, $sniffer) {
<ide>
<ide>
<ide> /**
<del> * @ngdoc method
<ide> * @name ng.$browser#defer
<ide> * @methodOf ng.$browser
<ide> * @param {function()} fn A function, who's execution should be defered.
<ide> function Browser(window, document, $log, $sniffer) {
<ide>
<ide>
<ide> /**
<del> * THIS DOC IS NOT VISIBLE because ngdocs can't process docs for foo#method.method
<del> *
<ide> * @name ng.$browser#defer.cancel
<ide> * @methodOf ng.$browser.defer
<ide> *
<ide><path>src/ngMock/angular-mocks.js
<ide> angular.mock = {};
<ide>
<ide> /**
<del> * @ngdoc object
<add> * ! This is a private undocumented service !
<add> *
<ide> * @name ngMock.$browser
<ide> *
<ide> * @description
<ide> angular.mock.$Browser = function() {
<ide>
<ide>
<ide> /**
<del> * @ngdoc method
<ide> * @name ngMock.$browser#defer.flush
<ide> * @methodOf ngMock.$browser
<ide> *
<ide> angular.mock.$Browser = function() {
<ide> }
<ide> };
<ide> /**
<del> * @ngdoc property
<ide> * @name ngMock.$browser#defer.now
<ide> * @propertyOf ngMock.$browser
<ide> * | 2 |
Text | Text | reflect name change | 832116ac49d02471449064f3cb1496a5ad36890e | <ide><path>README.md
<del># Action Mailroom
<add># Action Mailbox
<ide>
<del>📬
<ide>\ No newline at end of file
<add>📬 | 1 |
PHP | PHP | fix un-aliased query in marshaller | 0421d5c348fec8224c2bc7e221f672d7a2c02576 | <ide><path>src/ORM/Marshaller.php
<ide> public function mergeMany($entities, array $data, array $options = [])
<ide> return count(array_filter($keys, 'strlen')) === count($primary);
<ide> })
<ide> ->reduce(function ($query, $keys) use ($primary) {
<del> return $query->orWhere($query->newExpr()->and_(array_combine($primary, $keys)));
<add> $fields = array_map([$this->_table, 'aliasField'], $primary);
<add> return $query->orWhere($query->newExpr()->and_(array_combine($fields, $keys)));
<ide> }, $this->_table->find());
<ide>
<ide> if (!empty($indexed) && count($maybeExistentQuery->clause('where'))) {
<ide><path>tests/TestCase/ORM/MarshallerTest.php
<ide> public function testMergeManyCompositeKey()
<ide> $this->assertSame($entities[1], $result[1], 'Should retain object');
<ide> }
<ide>
<add> /**
<add> * Test mergeMany() with forced contain to ensure aliases are used in queries.
<add> *
<add> * @return void
<add> */
<add> public function testMergeManyExistingQueryAliases()
<add> {
<add> $entities = [
<add> new OpenEntity(['id' => 1, 'comment' => 'First post', 'user_id' => 2], ['markClean' => true]),
<add> ];
<add>
<add> $data = [
<add> ['id' => 1, 'comment' => 'Changed 1', 'user_id' => 1],
<add> ['id' => 2, 'comment' => 'Changed 2', 'user_id' => 2],
<add> ];
<add> $this->comments->eventManager()->on('Model.beforeFind', function ($event, $query) {
<add> return $query->contain(['Articles']);
<add> });
<add> $marshall = new Marshaller($this->comments);
<add> $result = $marshall->mergeMany($entities, $data);
<add>
<add> $this->assertSame($entities[0], $result[0]);
<add> }
<add>
<ide> /**
<ide> * Test mergeMany() when the exist check returns nothing.
<ide> * | 2 |
Text | Text | replace version with replaceme | 5b4a0cb1494f77565454278ab97166fba25f7e45 | <ide><path>doc/api/stream.md
<ide> the status of the `highWaterMark`.
<ide>
<ide> ##### writable.writableFinished
<ide> <!-- YAML
<del>added: v12.4.0
<add>added: REPLACEME
<ide> -->
<ide>
<ide> * {boolean} | 1 |
Ruby | Ruby | remove nil in nulltype#ref | 2f3fe92f1bdd2ca88dfa2a7aa0253b79a0bf6194 | <ide><path>actionpack/lib/action_dispatch/http/mime_type.rb
<ide> def nil?
<ide> true
<ide> end
<ide>
<del> def ref
<del> nil
<del> end
<add> def ref; end
<ide>
<ide> def respond_to_missing?(method, include_private = false)
<ide> method.to_s.ends_with? '?' | 1 |
Javascript | Javascript | revert unwanted change | 5a7837eb729f1481baadc454671dad933701faab | <ide><path>build/three.js
<ide>
<ide> var n = gl.getProgramParameter( program, gl.ACTIVE_UNIFORMS );
<ide>
<del> for ( var i = 0; i < n; ++ i ) {
<add> for ( var i = 0; i !== n; ++ i ) {
<ide>
<ide> var info = gl.getActiveUniform( program, i ),
<ide> path = info.name,
<ide><path>build/three.module.js
<ide> function WebGLUniforms( gl, program, renderer ) {
<ide>
<ide> var n = gl.getProgramParameter( program, gl.ACTIVE_UNIFORMS );
<ide>
<del> for ( var i = 0; i < n; ++ i ) {
<add> for ( var i = 0; i !== n; ++ i ) {
<ide>
<ide> var info = gl.getActiveUniform( program, i ),
<ide> path = info.name,
<ide><path>src/renderers/webgl/WebGLUniforms.js
<ide> function WebGLUniforms( gl, program, renderer ) {
<ide>
<ide> var n = gl.getProgramParameter( program, gl.ACTIVE_UNIFORMS );
<ide>
<del> for ( var i = 0; i < n; ++ i ) {
<add> for ( var i = 0; i !== n; ++ i ) {
<ide>
<ide> var info = gl.getActiveUniform( program, i ),
<ide> path = info.name, | 3 |
Python | Python | tweak a test to avoid hitting a limit with sqlite | 2875b5dcab23c027d019656b08da8b911bc60711 | <ide><path>tests/regressiontests/multiple_database/tests.py
<ide> class SyncDBTestCase(TestCase):
<ide>
<ide> def test_syncdb_to_other_database(self):
<ide> """Regression test for #16039: syncdb with --database option."""
<del> count = ContentType.objects.count()
<add> cts = ContentType.objects.using('other').filter(app_label='multiple_database')
<add>
<add> count = cts.count()
<ide> self.assertGreater(count, 0)
<ide>
<del> ContentType.objects.using('other').delete()
<add> cts.delete()
<ide> management.call_command('syncdb', verbosity=0, interactive=False,
<ide> load_initial_data=False, database='other')
<del>
<del> self.assertEqual(ContentType.objects.using("other").count(), count)
<add> self.assertEqual(cts.count(), count)
<ide>
<ide> def test_syncdb_to_other_database_with_router(self):
<ide> """Regression test for #16039: syncdb with --database option."""
<del> ContentType.objects.using('other').delete()
<add> cts = ContentType.objects.using('other').filter(app_label='multiple_database')
<add>
<add> cts.delete()
<ide> try:
<ide> old_routers = router.routers
<ide> router.routers = [SyncOnlyDefaultDatabaseRouter()]
<ide> def test_syncdb_to_other_database_with_router(self):
<ide> finally:
<ide> router.routers = old_routers
<ide>
<del> self.assertEqual(ContentType.objects.using("other").count(), 0)
<add> self.assertEqual(cts.count(), 0) | 1 |
Java | Java | resolve minor 4.3.x deprecations in master | 62e530ec94fe2b9a60f15739356ef70065aa4371 | <ide><path>spring-expression/src/main/java/org/springframework/expression/spel/SpelMessage.java
<ide> /*
<del> * Copyright 2002-2016 the original author or authors.
<add> * Copyright 2002-2017 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public String formatMessage(Object... inserts) {
<ide> return formattedMessage.toString();
<ide> }
<ide>
<del> /**
<del> * Produce a complete message including the prefix, the position (if known)
<del> * and with the inserts applied to the message.
<del> * @param pos the position (ignored and not included in the message if less than 0)
<del> * @param inserts the inserts to put into the formatted message
<del> * @return a formatted message
<del> * @deprecated as of Spring 4.3.5, in favor of {@link #formatMessage(Object...)}
<del> */
<del> @Deprecated
<del> public String formatMessage(int pos, Object... inserts) {
<del> StringBuilder formattedMessage = new StringBuilder();
<del> formattedMessage.append("EL").append(this.code);
<del> switch (this.kind) {
<del> case ERROR:
<del> formattedMessage.append("E");
<del> break;
<del> }
<del> formattedMessage.append(":");
<del> if (pos >= 0) {
<del> formattedMessage.append("(pos ").append(pos).append("): ");
<del> }
<del> formattedMessage.append(MessageFormat.format(this.message, inserts));
<del> return formattedMessage.toString();
<del> }
<del>
<ide>
<ide> public enum Kind { INFO, WARNING, ERROR }
<ide>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/AbstractMethodArgumentResolutionException.java
<del>/*
<del> * Copyright 2002-2017 the original author or authors.
<del> *
<del> * Licensed under the Apache License, Version 2.0 (the "License");
<del> * you may not use this file except in compliance with the License.
<del> * You may obtain a copy of the License at
<del> *
<del> * http://www.apache.org/licenses/LICENSE-2.0
<del> *
<del> * Unless required by applicable law or agreed to in writing, software
<del> * distributed under the License is distributed on an "AS IS" BASIS,
<del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<del> * See the License for the specific language governing permissions and
<del> * limitations under the License.
<del> */
<del>
<del>package org.springframework.messaging.handler.annotation.support;
<del>
<del>import org.springframework.core.MethodParameter;
<del>import org.springframework.messaging.Message;
<del>import org.springframework.messaging.handler.invocation.MethodArgumentResolutionException;
<del>
<del>/**
<del> * Base class for exceptions resulting from the invocation of
<del> * {@link org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolver}.
<del> *
<del> * @author Rossen Stoyanchev
<del> * @since 4.0.3
<del> * @deprecated as of 4.3.6, in favor of the invocation-associated
<del> * {@link MethodArgumentResolutionException}
<del> */
<del>@Deprecated
<del>@SuppressWarnings("serial")
<del>public abstract class AbstractMethodArgumentResolutionException extends MethodArgumentResolutionException {
<del>
<del> protected AbstractMethodArgumentResolutionException(Message<?> message, MethodParameter parameter) {
<del> super(message, parameter);
<del> }
<del>
<del> protected AbstractMethodArgumentResolutionException(Message<?> message, MethodParameter parameter, String description) {
<del> super(message, parameter, description);
<del> }
<del>
<del>
<del> protected static String getMethodParamMessage(MethodParameter param) {
<del> return "";
<del> }
<del>
<del>}
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/MethodArgumentNotValidException.java
<ide>
<ide> import org.springframework.core.MethodParameter;
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.handler.invocation.MethodArgumentResolutionException;
<ide> import org.springframework.validation.BindingResult;
<ide> import org.springframework.validation.ObjectError;
<ide>
<ide> * @author Rossen Stoyanchev
<ide> * @since 4.0.1
<ide> */
<del>@SuppressWarnings({"serial", "deprecation"})
<del>public class MethodArgumentNotValidException extends AbstractMethodArgumentResolutionException {
<add>@SuppressWarnings("serial")
<add>public class MethodArgumentNotValidException extends MethodArgumentResolutionException {
<ide>
<ide> private BindingResult bindingResult;
<ide>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/MethodArgumentTypeMismatchException.java
<ide>
<ide> import org.springframework.core.MethodParameter;
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.handler.invocation.MethodArgumentResolutionException;
<ide>
<ide> /**
<ide> * Exception that indicates that a method argument has not the expected type.
<ide> *
<ide> * @author Stephane Nicoll
<ide> * @since 4.0.3
<ide> */
<del>@SuppressWarnings({"serial", "deprecation"})
<del>public class MethodArgumentTypeMismatchException extends AbstractMethodArgumentResolutionException {
<add>@SuppressWarnings("serial")
<add>public class MethodArgumentTypeMismatchException extends MethodArgumentResolutionException {
<ide>
<ide> public MethodArgumentTypeMismatchException(Message<?> message, MethodParameter parameter, String description) {
<ide> super(message, parameter, description); | 4 |
Ruby | Ruby | convert dmg test to spec | 92e2e7a21665aad705d76b90735aed854b7a09f5 | <add><path>Library/Homebrew/cask/spec/cask/container/dmg_spec.rb
<del><path>Library/Homebrew/cask/test/cask/container/dmg_test.rb
<del>require "test_helper"
<add>require "spec_helper"
<ide>
<ide> describe Hbc::Container::Dmg do
<del> describe "mount!" do
<add> describe "#mount!" do
<ide> it "does not store nil mounts for dmgs with extra data" do
<ide> transmission = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/local-transmission.rb")
<ide>
<ide>
<ide> begin
<ide> dmg.mount!
<del> dmg.mounts.wont_include nil
<add> expect(dmg.mounts).not_to include nil
<ide> ensure
<ide> dmg.eject!
<ide> end | 1 |
Ruby | Ruby | add assertion to polymorphic_routes_test.rb | f1a097733a6923d4c7b6b4413394ce86e17198d6 | <ide><path>actionview/test/activerecord/polymorphic_routes_test.rb
<ide> def assert_url(url, args)
<ide>
<ide> def test_string
<ide> with_test_routes do
<del> # FIXME: why are these different? Symbol case passes through to
<del> # `polymorphic_url`, but the String case doesn't.
<add> assert_equal "/projects", polymorphic_path("projects")
<ide> assert_equal "http://example.com/projects", polymorphic_url("projects")
<ide> assert_equal "projects", url_for("projects")
<ide> end | 1 |
PHP | PHP | allow multiple schemas in pgsql | 7b846be4c4513e129a877f485eb975a9eec0d115 | <ide><path>laravel/database/connectors/postgres.php
<ide> public function connect($config)
<ide> // the database to set the search path.
<ide> if (isset($config['schema']))
<ide> {
<del> $connection->prepare("SET search_path TO '{$config['schema']}'")->execute();
<add> $connection->prepare("SET search_path TO {$config['schema']}")->execute();
<ide> }
<ide>
<ide> return $connection; | 1 |
Text | Text | remove "unreleased" section from changelog | 58b8797b7372c9296e65e08ce8297e4a394b7972 | <ide><path>CHANGELOG.md
<del>## [Unreleased]
<del><details>
<del> <summary>
<del> Changes that have landed in master but are not yet released.
<del> Click to see more.
<del> </summary>
<del></details>
<del>
<ide> ## 16.12.0 (November 14, 2019)
<ide>
<ide> ### React DOM | 1 |
Text | Text | update relation to other libraries.md | ecfd1276a6d680a55c6848766735af396d606381 | <ide><path>docs/Basics/Relation to Other Libraries.md
<ide> Relation to Other Libraries
<ide> --------------------------
<ide>
<del>TODO
<add>### Flux
<add>
<add>Can Redux be considered a [Flux](https://facebook.github.io/flux/) implementation?
<add>[Yes](https://twitter.com/fisherwebdev/status/616278911886884864), and [no](https://twitter.com/andrestaltz/status/616270755605708800).
<add>
<add>(Don’t worry, [Flux creators](https://twitter.com/jingc/status/616608251463909376) [approve of it](https://twitter.com/fisherwebdev/status/616286955693682688), if that’s all you wanted to know.)
<add>
<add>Redux was inspired by several important qualities of Flux. Like Flux, Redux prescribes you to concentrate your model update logic in a certain layer of your application (“stores” in Flux, “reducers” in Redux). Instead of letting the application code directly mutate the data, both tell you to describe every mutation as a plain object called “action”.
<add>
<add>Unlike Flux, **Redux does not have a concept of Dispatcher**. This is because it relies on pure functions instead of event emitters, and pure functions are easy to compose and don’t need an additional entity managing them. WDepending on how you view Flux, you may see this as a deviation or an implementation detail. Flux has often been [described as `(state, action) => state`](https://speakerdeck.com/jmorrell/jsconf-uy-flux-those-who-forget-the-past-dot-dot-dot). In this sense, Redux is true to the Flux architecture, but makes it simpler thanks to pure functions.
<add>
<add>Another important difference from Flux is that **Redux assumes you never mutate your data**. You can use plain objects and arrays for your state just fine, but mutating them inside the reducers is severely discouraged. You should always return a new object, which is easy with the [object spread syntax proposed for ES7](https://github.com/sebmarkbage/ecmascript-rest-spread) and implemented in [Babel](http://babeljs.io), or with a library like [Immutable](https://facebook.github.io/immutable-js).
<add>
<add>While it is technically *possible* to [write impure reducers](https://github.com/gaearon/redux/issues/328#issuecomment-125035516) that mutate the data for performance corner cases, we actively discourage you from doing this. Development features like time travel, record/replay, or hot reloading will break. Moreover it doesn’t seem like immutability poses performance problems in most of the real apps, because, as [Om](https://github.com/omcljs/om) demonstrates, even if you lose out on object allocation, you still win by avoiding expensive re-renders and re-calculations, as you know exactly what changed thanks to reducer purity.
<add>
<add>### Elm
<add>
<add>[Elm](http://elm-lang.org/) is a functional programming language created by [Evan Czaplicki](https://twitter.com/czaplic). It encourages using [an architecture that can be described as `(state, action) => state`](http://elm-lang.org/guide/architecture). Technically, Elm “updaters” are equivalent to the reducers in Redux.
<add>
<add>Unlike Redux, Elm is a language, so it is able to benefit from statical typing for actions, and pattern matching. Even if you don’t plan to use Elm, you should read about the Elm architecture, and play with it. There is an interesting [JavaScript library playground implementing similar ideas](https://github.com/paldepind/noname-functional-frontend-framework). We should look there for inspiration on Redux! One way how we can get closer to the static typing of Elm is by [using a gradual typing solution like Flow](https://github.com/gaearon/redux/issues/290).
<add>
<add>### Immutable
<add>
<add>[Immutable](https://facebook.github.io/immutable-js) is a JavaScript library implementing persistent data structures. It is performant and has an idiomatic JavaScript API.
<add>
<add>Immutable and most similar libraries are orthogonal to Redux. Feel free to use them together!
<add>
<add>**Redux doesn’t care *how* you store the state—it can be a plain object, an Immutable object, or anything else.** You’ll probably want a (de)serialization mechanism for writing universal apps and hydrating their state from the server, but other than that, you can use any data storage library *as long as it supports immutability*. For example, it doesn’t make sense to use Backbone for Redux state, because Backbone models are mutable.
<add>
<add>Note that if your immutable library supports cursors, you should not use them in a Redux app. The whole tree should be considered read-only, and you should use Redux for updating the state, and subscribing to the updates. **If you’re happy with cursors, you don’t need Redux.**
<add>
<add>### Baobab
<add>
<add>[Baobab](https://github.com/Yomguithereal/baobab) is another popular library implementing immutable API for updating plain JavaScript objects. While you can use it with Redux, there is little benefit to them together.
<add>
<add>Most of the functionality Baobab provides is related to updating the data with cursors, but Redux enforces that the only way to update the data is to dispatch the action. Therefore they solve the same problem differently, and don’t complement each other.
<add>
<add>Unlike Immutable, Baobab doesn’t yet implement any special efficient data structures under the hood, so you don’t really win anything from using it together with Redux. It’s easier to just use plain objects in this case.
<ide>
<ide> --------------------------
<ide> Next: [The Redux Flow](The Redux Flow.md) | 1 |
Javascript | Javascript | add double side support | 74c27264d7582b0b481d44e3cc673be73110db98 | <ide><path>examples/js/loaders/LDrawLoader.js
<ide> THREE.LDrawLoader = ( function () {
<ide>
<ide> }
<ide>
<del> var bfcEnabled = false;
<add> var bfcCertified = false;
<ide> var bfcCCW = true;
<ide> var bfcInverted = false;
<add> var bfcCull = true;
<ide>
<ide> // Parse all line commands
<ide> for ( lineIndex = 0; lineIndex < numLines; lineIndex ++ ) {
<ide> THREE.LDrawLoader = ( function () {
<ide> currentEmbeddedFileName = lp.getRemainingString();
<ide> currentEmbeddedText = '';
<ide>
<del> bfcEnabled = false;
<add> bfcCertified = false;
<ide> bfcCCW = true;
<ide>
<ide> }
<ide> THREE.LDrawLoader = ( function () {
<ide> case 'CERTIFY':
<ide> case 'NOCERTIFY':
<ide>
<del> bfcEnabled = token === 'CERTIFY';
<add> bfcCertified = token === 'CERTIFY';
<ide> bfcCCW = true;
<ide>
<ide> break;
<ide> THREE.LDrawLoader = ( function () {
<ide> case 'CLIP':
<ide> case 'NOCLIP':
<ide>
<del> console.warn( 'THREE.LDrawLoader: BFC CLIP and NOCLIP directives ignored.' );
<add> bfcCull = token === 'CLIP';
<ide>
<ide> break;
<ide>
<ide> THREE.LDrawLoader = ( function () {
<ide> var material = parseColourCode( lp );
<ide>
<ide> var inverted = currentParseScope.inverted;
<del> var ccw = ! bfcEnabled || ( bfcCCW !== inverted );
<add> var ccw = bfcCCW !== inverted;
<add> var doubleSided = ! bfcCertified || ! bfcCull;
<ide> var v0, v1, v2;
<ide>
<ide> if ( ccw === true ) {
<ide> THREE.LDrawLoader = ( function () {
<ide> v2: v2
<ide> } );
<ide>
<add> if ( doubleSided === true ) {
<add>
<add> triangles.push( {
<add> material: material,
<add> colourCode: material.userData.code,
<add> v0: v0,
<add> v1: v2,
<add> v2: v1
<add> } );
<add>
<add> }
<add>
<ide> break;
<ide>
<ide> // Line type 4: Quadrilateral
<ide> THREE.LDrawLoader = ( function () {
<ide> var material = parseColourCode( lp );
<ide>
<ide> var inverted = currentParseScope.inverted;
<del> var ccw = ! bfcEnabled || ( bfcCCW !== inverted );
<add> var ccw = bfcCCW !== inverted;
<add> var doubleSided = ! bfcCertified || ! bfcCull;
<ide> var v0, v1, v2, v3;
<ide>
<ide> if ( ccw === true ) {
<ide> THREE.LDrawLoader = ( function () {
<ide> v2: v3
<ide> } );
<ide>
<add> if ( doubleSided === true ) {
<add>
<add> triangles.push( {
<add> material: material,
<add> colourCode: material.userData.code,
<add> v0: v0,
<add> v1: v2,
<add> v2: v1
<add> } );
<add>
<add> triangles.push( {
<add> material: material,
<add> colourCode: material.userData.code,
<add> v0: v0,
<add> v1: v3,
<add> v2: v2
<add> } );
<add>
<add> }
<add>
<ide> break;
<ide>
<ide> // Line type 5: Optional line | 1 |
Python | Python | add examples for ufunc.resolve_dtypes | 36d75387ea974daa41667e22b4ac255709cfc61a | <ide><path>numpy/core/_add_newdocs.py
<ide> Similar function to this, but returns additional information which
<ide> give access to the core C functionality of NumPy.
<ide>
<add> Examples
<add> --------
<add> This API requires passing dtypes, define them for convenience:
<add>
<add> >>> int32 = np.dtype("int32")
<add> >>> float32 = np.dtype("float32")
<add>
<add> The typical ufunc call does not pass an output dtype. `np.add` has two
<add> inputs and one output, so leave the output as ``None`` (not provided):
<add>
<add> >>> np.add.resolve_dtypes((int32, float32, None))
<add> (dtype('float64'), dtype('float64'), dtype('float64'))
<add>
<add> The loop found uses "float64" for all operands (including the output), the
<add> first input would be cast.
<add>
<add> ``resolve_dtypes`` supports "weak" handling for Python scalars by passing
<add> ``int``, ``float``, or ``complex``:
<add>
<add> >>> np.add.resolve_dtypes((float32, float, None))
<add> (dtype('float32'), dtype('float32'), dtype('float32'))
<add>
<add> Where the Python ``float`` behaves samilar to a Python value ``0.0``
<add> in a ufunc call. (See :ref:`NEP 50 <NEP50>` for details.)
<add>
<ide> """))
<ide>
<ide> add_newdoc('numpy.core', 'ufunc', ('_resolve_dtypes_and_context', | 1 |
Python | Python | remove redundant blank-line and parenthesis | ee9db04084b0d8adaac46069fa97b63d7d274013 | <ide><path>airflow/configuration.py
<del>
<ide> # Licensed to the Apache Software Foundation (ASF) under one
<ide> # or more contributor license agreements. See the NOTICE file
<ide> # distributed with this work for additional information
<ide><path>tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
<ide> def test_s3_to_sftp_operation(self):
<ide> # Clean up after finishing with test
<ide> conn.delete_object(Bucket=self.s3_bucket, Key=self.s3_key)
<ide> conn.delete_bucket(Bucket=self.s3_bucket)
<del> self.assertFalse((self.s3_hook.check_for_bucket(self.s3_bucket)))
<add> self.assertFalse(self.s3_hook.check_for_bucket(self.s3_bucket))
<ide>
<ide> def delete_remote_resource(self):
<ide> # check the remote file content
<ide><path>tests/test_utils/perf/sql_queries.py
<del>
<ide> # Licensed to the Apache Software Foundation (ASF) under one
<ide> # or more contributor license agreements. See the NOTICE file
<ide> # distributed with this work for additional information | 3 |
Python | Python | remove broken test, fix lua and nsfn tests | 6c2d57c09dfc5285706d26e0d4860cc870d4af2d | <ide><path>libcloud/common/base.py
<ide>
<ide> import libcloud
<ide>
<del>from libcloud.utils.py3 import PY25
<add>from libcloud.utils.py3 import PY25, basestring
<ide> from libcloud.utils.py3 import httplib
<ide> from libcloud.utils.py3 import urlparse
<ide> from libcloud.utils.py3 import urlencode
<ide> def __init__(self, response, connection):
<ide> self.headers = lowercase_keys(dict(response.headers))
<ide> self.error = response.reason
<ide> self.status = response.status_code
<del>
<del> self.body = response.text.strip() if response.text is not None else ''
<add>
<add> if isinstance(response.text, basestring):
<add> self.body = response.text.strip() \
<add> if response.text is not None else ''
<add> else:
<add> self.body = ''
<ide>
<ide> if not self.success():
<ide> raise exception_from_message(code=self.status,
<ide><path>libcloud/test/compute/test_gce.py
<ide> def test_ex_set_machine_type_notstopped(self):
<ide> self.assertRaises(GoogleBaseError, self.driver.ex_set_machine_type,
<ide> node, 'custom-4-61440')
<ide>
<del> def test_ex_set_machine_type_invalid(self):
<del> # get stopped node, change machine type
<del> zone = 'us-central1-a'
<del> node = self.driver.ex_get_node('custom-node', zone)
<del> self.assertRaises(InvalidRequestError, self.driver.ex_set_machine_type,
<del> node, 'custom-1-61440')
<del>
<ide> def test_ex_set_machine_type(self):
<ide> # get stopped node, change machine type
<ide> zone = 'us-central1-a'
<ide><path>libcloud/test/dns/test_luadns.py
<ide> class LuadnsTests(unittest.TestCase):
<ide>
<ide> def setUp(self):
<ide> LuadnsMockHttp.type = None
<del> LuadnsDNSDriver.connectionCls.conn_classes = (
<del> None, LuadnsMockHttp)
<add> LuadnsDNSDriver.connectionCls.conn_class = LuadnsMockHttp
<ide> self.driver = LuadnsDNSDriver(*DNS_PARAMS_LUADNS)
<ide> self.test_zone = Zone(id='11', type='master', ttl=None,
<ide> domain='example.com', extra={},
<ide><path>libcloud/test/dns/test_nfsn.py
<ide> class NFSNTestCase(LibcloudTestCase):
<ide>
<ide> def setUp(self):
<del> NFSNDNSDriver.connectionCls.conn_classes = (None, NFSNMockHttp)
<add> NFSNDNSDriver.connectionCls.conn_class = NFSNMockHttp
<ide> NFSNMockHttp.type = None
<ide> self.driver = NFSNDNSDriver('testid', 'testsecret')
<ide> | 4 |
Ruby | Ruby | add full lib path to audit | f432a59fd0d68a7b17143699fa7e4dc93bd6a5e8 | <ide><path>Library/Homebrew/formula_cellar_checks.rb
<ide> def check_jars
<ide> jars = f.lib.children.select{|g| g.to_s =~ /\.jar$/}
<ide> return if jars.empty?
<ide>
<del> ['JARs were installed to "lib".',
<add> ["JARs were installed to \"#{f.lib}\".",
<ide> <<-EOS.undent
<ide> Installing JARs to "lib" can cause conflicts between packages.
<ide> For Java software, it is typically better for the formula to
<ide> def check_non_libraries
<ide> end
<ide> return if non_libraries.empty?
<ide>
<del> ['Non-libraries were installed to "lib".',
<add> ["Non-libraries were installed to \"#{f.lib}\".",
<ide> <<-EOS.undent
<ide> Installing non-libraries to "lib" is bad practice.
<ide> The offending files are: | 1 |
Python | Python | move lstsq to umath_linalg | 3ef55be846bc8e6e21515787b29608fec7e1fad0 | <ide><path>numpy/linalg/linalg.py
<ide> def _raise_linalgerror_eigenvalues_nonconvergence(err, flag):
<ide> def _raise_linalgerror_svd_nonconvergence(err, flag):
<ide> raise LinAlgError("SVD did not converge")
<ide>
<add>def _raise_linalgerror_lstsq(err, flag):
<add> raise LinAlgError("SVD did not converge in Linear Least Squares")
<add>
<ide> def get_linalg_error_extobj(callback):
<ide> extobj = list(_linalg_error_extobj) # make a copy
<ide> extobj[2] = callback
<ide> def lstsq(a, b, rcond="warn"):
<ide> >>> plt.show()
<ide>
<ide> """
<del> import math
<ide> a, _ = _makearray(a)
<ide> b, wrap = _makearray(b)
<ide> is_1d = b.ndim == 1
<ide> def lstsq(a, b, rcond="warn"):
<ide> m = a.shape[0]
<ide> n = a.shape[1]
<ide> n_rhs = b.shape[1]
<del> ldb = max(n, m)
<ide> if m != b.shape[0]:
<ide> raise LinAlgError('Incompatible dimensions')
<ide>
<ide> def lstsq(a, b, rcond="warn"):
<ide> FutureWarning, stacklevel=2)
<ide> rcond = -1
<ide> if rcond is None:
<del> rcond = finfo(t).eps * ldb
<del>
<del> bstar = zeros((ldb, n_rhs), t)
<del> bstar[:m, :n_rhs] = b
<del> a, bstar = _fastCopyAndTranspose(t, a, bstar)
<del> a, bstar = _to_native_byte_order(a, bstar)
<del> s = zeros((min(m, n),), real_t)
<del> # This line:
<del> # * is incorrect, according to the LAPACK documentation
<del> # * raises a ValueError if min(m,n) == 0
<del> # * should not be calculated here anyway, as LAPACK should calculate
<del> # `liwork` for us. But that only works if our version of lapack does
<del> # not have this bug:
<del> # http://icl.cs.utk.edu/lapack-forum/archives/lapack/msg00899.html
<del> # Lapack_lite does have that bug...
<del> nlvl = max( 0, int( math.log( float(min(m, n))/2. ) ) + 1 )
<del> iwork = zeros((3*min(m, n)*nlvl+11*min(m, n),), fortran_int)
<del> if isComplexType(t):
<del> lapack_routine = lapack_lite.zgelsd
<del> lwork = 1
<del> rwork = zeros((lwork,), real_t)
<del> work = zeros((lwork,), t)
<del> results = lapack_routine(m, n, n_rhs, a, m, bstar, ldb, s, rcond,
<del> 0, work, -1, rwork, iwork, 0)
<del> lrwork = int(rwork[0])
<del> lwork = int(work[0].real)
<del> work = zeros((lwork,), t)
<del> rwork = zeros((lrwork,), real_t)
<del> results = lapack_routine(m, n, n_rhs, a, m, bstar, ldb, s, rcond,
<del> 0, work, lwork, rwork, iwork, 0)
<add> rcond = finfo(t).eps * max(n, m)
<add>
<add> if m <= n:
<add> gufunc = _umath_linalg.lstsq_m
<ide> else:
<del> lapack_routine = lapack_lite.dgelsd
<del> lwork = 1
<del> work = zeros((lwork,), t)
<del> results = lapack_routine(m, n, n_rhs, a, m, bstar, ldb, s, rcond,
<del> 0, work, -1, iwork, 0)
<del> lwork = int(work[0])
<del> work = zeros((lwork,), t)
<del> results = lapack_routine(m, n, n_rhs, a, m, bstar, ldb, s, rcond,
<del> 0, work, lwork, iwork, 0)
<del> if results['info'] > 0:
<del> raise LinAlgError('SVD did not converge in Linear Least Squares')
<del>
<del> # undo transpose imposed by fortran-order arrays
<del> b_out = bstar.T
<add> gufunc = _umath_linalg.lstsq_n
<add>
<add> signature = 'DDd->Did' if isComplexType(t) else 'ddd->did'
<add> extobj = get_linalg_error_extobj(_raise_linalgerror_lstsq)
<add> b_out, rank, s = gufunc(a, b, rcond, signature=signature, extobj=extobj)
<ide>
<ide> # b_out contains both the solution and the components of the residuals
<del> x = b_out[:n,:]
<del> r_parts = b_out[n:,:]
<add> x = b_out[...,:n,:]
<add> r_parts = b_out[...,n:,:]
<ide> if isComplexType(t):
<ide> resids = sum(abs(r_parts)**2, axis=-2)
<ide> else:
<ide> resids = sum(r_parts**2, axis=-2)
<ide>
<del> rank = results['rank']
<del>
<ide> # remove the axis we added
<ide> if is_1d:
<ide> x = x.squeeze(axis=-1) | 1 |
Ruby | Ruby | use native class#subclasses if available | 359240dff6f7ce5c295aa573399937ac76ad8d92 | <ide><path>activesupport/lib/active_support.rb
<ide> def self.utc_to_local_returns_utc_offset_times
<ide> def self.utc_to_local_returns_utc_offset_times=(value)
<ide> DateAndTime::Compatibility.utc_to_local_returns_utc_offset_times = value
<ide> end
<del>
<del> @has_native_class_descendants = Class.method_defined?(:descendants) # RUBY_VERSION >= "3.1"
<ide> end
<ide>
<ide> autoload :I18n, "active_support/i18n"
<ide><path>activesupport/lib/active_support/core_ext/class/subclasses.rb
<ide> # frozen_string_literal: true
<ide>
<add>require "active_support/ruby_features"
<add>
<ide> class Class
<ide> # Returns an array with all classes that are < than its receiver.
<ide> #
<ide> def descendants
<ide> ObjectSpace.each_object(singleton_class).reject do |k|
<ide> k.singleton_class? || k == self
<ide> end
<del> end unless ActiveSupport.instance_variable_get(:@has_native_class_descendants) # RUBY_VERSION >= "3.1"
<add> end unless ActiveSupport::RubyFeatures::CLASS_DESCENDANTS
<ide>
<ide> # Returns an array with the direct children of +self+.
<ide> #
<ide> def descendants
<ide> # Foo.subclasses # => [Bar]
<ide> def subclasses
<ide> descendants.select { |descendant| descendant.superclass == self }
<del> end
<add> end unless ActiveSupport::RubyFeatures::CLASS_SUBCLASSES
<ide> end
<ide><path>activesupport/lib/active_support/descendants_tracker.rb
<ide> # frozen_string_literal: true
<ide>
<ide> require "weakref"
<add>require "active_support/ruby_features"
<ide>
<ide> module ActiveSupport
<ide> # This module provides an internal implementation to track descendants
<ide> def direct_descendants(klass)
<ide> end
<ide> end
<ide>
<del> if ActiveSupport.instance_variable_get(:@has_native_class_descendants) # RUBY_VERSION >= "3.1"
<add> if RubyFeatures::CLASS_DESCENDANTS
<ide> class << self
<ide> def subclasses(klass)
<ide> klass.subclasses
<ide> def native? # :nodoc:
<ide> end
<ide> end
<ide>
<del> def subclasses
<del> descendants.select { |descendant| descendant.superclass == self }
<add> unless RubyFeatures::CLASS_SUBCLASSES
<add> def subclasses
<add> descendants.select { |descendant| descendant.superclass == self }
<add> end
<ide> end
<ide>
<ide> def direct_descendants
<ide><path>activesupport/lib/active_support/ruby_features.rb
<add># frozen_string_literal: true
<add>
<add>module ActiveSupport
<add> module RubyFeatures # :nodoc:
<add> CLASS_DESCENDANTS = Class.method_defined?(:descendants) # RUBY_VERSION >= "3.1"
<add> CLASS_SUBCLASSES = Class.method_defined?(:subclasses) # RUBY_VERSION >= "3.1"
<add> end
<add>end | 4 |
PHP | PHP | fix loss of coverage | 47cf2942fad1a0ae6ac68445f6c37679ee0bb1ec | <ide><path>tests/TestCase/ORM/Association/HasManyTest.php
<ide> public function testSaveAssociatedEmptySetWithReplaceStrategyRemovesAssociatedRe
<ide> *
<ide> * @return void
<ide> */
<del> public function testInvalidStrategy()
<add> public function testInvalidSaveStrategy()
<ide> {
<ide> $this->expectException(\InvalidArgumentException::class);
<ide> $articles = TableRegistry::get('Articles');
<ide>
<ide> $association = $articles->hasMany('Comments');
<del> $association->setStrategy('anotherThing');
<add> $association->setSaveStrategy('anotherThing');
<add> }
<add>
<add> /**
<add> * Tests saveStrategy
<add> *
<add> * @return void
<add> */
<add> public function testSetSaveStrategy()
<add> {
<add> $articles = TableRegistry::get('Articles');
<add>
<add> $association = $articles->hasMany('Comments');
<add> $this->assertSame($association, $association->setSaveStrategy(HasMany::SAVE_REPLACE));
<add> $this->assertSame(HasMany::SAVE_REPLACE, $association->getSaveStrategy());
<add> }
<add>
<add> /**
<add> * Tests saveStrategy
<add> *
<add> * @group deprecated
<add> * @return void
<add> */
<add> public function testSaveStrategy()
<add> {
<add> $this->deprecated(function () {
<add> $articles = TableRegistry::get('Articles');
<add>
<add> $association = $articles->hasMany('Comments');
<add> $this->assertSame(HasMany::SAVE_REPLACE, $association->saveStrategy(HasMany::SAVE_REPLACE));
<add> $this->assertSame(HasMany::SAVE_REPLACE, $association->saveStrategy());
<add> });
<ide> }
<ide> } | 1 |
Java | Java | avoid potential npe (spr-6300) | 613b4d182b23b474db4e43257cf75ef8454e6bdf | <ide><path>org.springframework.test/src/main/java/org/springframework/test/web/ModelAndViewAssert.java
<ide> /*
<del> * Copyright 2002-2008 the original author or authors.
<add> * Copyright 2002-2009 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import java.util.Map;
<ide> import java.util.Set;
<ide>
<add>import org.springframework.util.ObjectUtils;
<ide> import org.springframework.web.servlet.ModelAndView;
<ide>
<ide> /**
<ide> public static void assertSortAndCompareListModelAttribute(
<ide> */
<ide> public static void assertViewName(ModelAndView mav, String expectedName) {
<ide> assertCondition(mav != null, "ModelAndView is null");
<del> assertCondition(expectedName.equals(mav.getViewName()), "View name is not equal to '" + expectedName +
<del> "' but was '" + mav.getViewName() + "'");
<add> assertCondition(ObjectUtils.nullSafeEquals(expectedName, mav.getViewName()),
<add> "View name is not equal to '" + expectedName + "' but was '" + mav.getViewName() + "'");
<ide> }
<ide>
<ide> | 1 |
Ruby | Ruby | remove scm directories recursively | 15c3fb32b26228b4cef2bb33863e8072ab270a41 | <ide><path>Library/Homebrew/cmd/cleanup.rb
<ide> def cleanup_cache
<ide> if path.file?
<ide> cleanup_path(path) { path.unlink }
<ide> elsif path.directory? && path.to_s.include?("--")
<del> cleanup_path(path) { path.rmdir }
<add> cleanup_path(path) { FileUtils.rm_rf path }
<ide> end
<ide> next
<ide> end | 1 |
Text | Text | fix typo in readablestream.locked description | 829a34ac1ddca53dc7f80cd0defc172f9e57d7b7 | <ide><path>doc/api/webstreams.md
<ide> added: v16.5.0
<ide> {ReadableStream}.
<ide>
<ide> The `readableStream.locked` property is `false` by default, and is
<del>switch to `true` while there is an active reader consuming the
<add>switched to `true` while there is an active reader consuming the
<ide> stream's data.
<ide>
<ide> #### `readableStream.cancel([reason])` | 1 |
Javascript | Javascript | fix lint issue | 4bb706fbd6b90e32396ee973180eef89ce43db88 | <ide><path>lib/HotModuleReplacement.runtime.js
<ide> MIT License http://www.opensource.org/licenses/mit-license.php
<ide> Author Tobias Koppers @sokra
<ide> */
<del>/*global $hash$ $requestTimeout$ installedModules $require$ hotDownloadManifest hotDownloadUpdateChunk hotDisposeChunk modules */
<del>
<add>// eslint-disable no-unused-vars
<ide> var $hash$ = undefined;
<ide> var $requestTimeout$ = undefined;
<ide> var installedModules = undefined;
<ide> var $require$ = undefined;
<add>var hotDownloadManifest = undefined;
<add>var hotDownloadUpdateChunk = undefined;
<add>var hotDisposeChunk = undefined;
<add>var modules = undefined;
<ide>
<ide> module.exports = function() {
<ide> var hotApplyOnUpdate = true;
<ide><path>lib/node/NodeMainTemplate.runtime.js
<ide> MIT License http://www.opensource.org/licenses/mit-license.php
<ide> Author Tobias Koppers @sokra
<ide> */
<del>/*global installedChunks $hotChunkFilename$ hotAddUpdateChunk $hotMainFilename$ */
<del>
<add>// eslint-disable-next-line no-unused-vars
<ide> var $hotChunkFilename$ = undefined;
<ide> var hotAddUpdateChunk = undefined;
<ide> var installedChunks = undefined;
<add>var $hotMainFilename$ = undefined;
<ide>
<ide> module.exports = function() {
<ide> // eslint-disable-next-line no-unused-vars
<ide><path>lib/node/NodeMainTemplateAsync.runtime.js
<ide> MIT License http://www.opensource.org/licenses/mit-license.php
<ide> Author Tobias Koppers @sokra
<ide> */
<del>/*global installedChunks $hotChunkFilename$ $require$ hotAddUpdateChunk $hotMainFilename$ */
<del>
<add>// eslint-disable-next-line no-unused-vars
<ide> var $hotChunkFilename$ = undefined;
<ide> var $require$ = undefined;
<ide> var hotAddUpdateChunk = undefined;
<ide><path>lib/web/JsonpMainTemplate.runtime.js
<ide> MIT License http://www.opensource.org/licenses/mit-license.php
<ide> Author Tobias Koppers @sokra
<ide> */
<del>/*globals hotAddUpdateChunk parentHotUpdateCallback document XMLHttpRequest $require$ $hotChunkFilename$ $hotMainFilename$ $crossOriginLoading$ */
<del>
<add>// eslint-disable-next-line no-unused-vars
<ide> var hotAddUpdateChunk = undefined;
<ide> var parentHotUpdateCallback = undefined;
<del>var $require$ = undefined;
<add>var $require$ = undefined;
<ide> var $hotMainFilename$ = undefined;
<ide> var $hotChunkFilename$ = undefined;
<ide> var $crossOriginLoading$ = undefined;
<ide><path>lib/webworker/WebWorkerMainTemplate.runtime.js
<ide> MIT License http://www.opensource.org/licenses/mit-license.php
<ide> Author Tobias Koppers @sokra
<ide> */
<del>/*globals installedChunks hotAddUpdateChunk parentHotUpdateCallback importScripts XMLHttpRequest $require$ $hotChunkFilename$ $hotMainFilename$ */
<del>
<add>// eslint-disable-next-line no-unused-vars
<ide> var hotAddUpdateChunk = undefined;
<ide> var parentHotUpdateCallback = undefined;
<ide> var $require$ = undefined; | 5 |
Text | Text | add note about decorator loading in engines guide | ab28bafc9f1118652f5090fa1260923118930518 | <ide><path>guides/source/engines.md
<ide> Engine model and controller classes can be extended by open classing them in the
<ide>
<ide> For simple class modifications use `Class#class_eval`, and for complex class modifications, consider using `ActiveSupport::Concern`.
<ide>
<add>#### A note on Decorators and loading code
<add>
<add>Because these decorators are not referenced by your Rails application itself,
<add>Rails' autoloading system will not kick in and load your decorators. This
<add>means that you need to require them yourself.
<add>
<add>Here is some sample code to do this:
<add>
<add>```ruby
<add># lib/blorgh/engine.rb
<add>module Blorgh
<add> class Engine < ::Rails::Engine
<add> isolate_namespace Blorgh
<add>
<add> config.to_prepare do
<add> Dir.glob(Rails.root + "app/decorators/**/*_decorator*.rb").each do |c|
<add> require_dependency(c)
<add> end
<add> end
<add> end
<add>end
<add>```
<add>
<add>This doesn't apply to just Decorators, but anything that you add in an engine
<add>that isn't referenced by your main application.
<add>
<ide> #### Implementing Decorator Pattern Using Class#class_eval
<ide>
<ide> **Adding** `Post#time_since_created`, | 1 |
Text | Text | remove duplicated word | 3757691c9188b76b246e52ade7c23bffc73bc566 | <ide><path>docs/how-to-translate-the-website.md
<ide> Here are some tips:
<ide> - Only translate the values in the JSON files, the keys need to stay the same
<ide> - The English versions of the JSON files are the "source of truth" for what needs to be translated. If you are unsure what text to translate, find the matching key in the English version of the file and translate that text
<ide> - Be sure to escape double quotes (`\"`) in the strings if you need to use them
<del>- Most of the time, when you see text wrapped in number tags (`<0>`text`</0>`) tags, it's a link. It is okay to change the text that it is wrapped around. Just keep the same tags.
<add>- Most of the time, when you see text wrapped in number tags (`<0>`text`</0>`), it's a link. It is okay to change the text that it is wrapped around. Just keep the same tags.
<ide> - A value that has something like `{{value}}` in it is a variable. Don't change any of those characters. You can move that whole group of characters around though.
<ide>
<ide> There's some [help on how make changes and open a PR here](/how-to-setup-freecodecamp-locally). | 1 |
Text | Text | update response content for volume mounts | 6a04ecf81333d7b26928f0dcbfb416ca788f287c | <ide><path>docs/reference/api/docker_remote_api_v1.22.md
<ide> Create a container
<ide> },
<ide> "Mounts": [
<ide> {
<add> "Name": "fac362...80535",
<ide> "Source": "/data",
<ide> "Destination": "/data",
<add> "Driver": "local",
<ide> "Mode": "ro,Z",
<del> "RW": false
<add> "RW": false,
<add> "Propagation": ""
<ide> }
<ide> ],
<ide> "WorkingDir": "",
<ide> Return low-level information on the container `id`
<ide> },
<ide> "Mounts": [
<ide> {
<add> "Name": "fac362...80535",
<ide> "Source": "/data",
<ide> "Destination": "/data",
<add> "Driver": "local",
<ide> "Mode": "ro,Z",
<del> "RW": false
<add> "RW": false,
<add> "Propagation": ""
<ide> }
<ide> ]
<ide> }
<ide><path>docs/userguide/dockervolumes.md
<ide> volumes. The output should look something similar to the following:
<ide> "Destination": "/webapp",
<ide> "Driver": "local",
<ide> "Mode": "",
<del> "RW": true
<add> "RW": true,
<add> "Propagation": ""
<ide> }
<ide> ]
<ide> ... | 2 |
Javascript | Javascript | provide argument for custom callback | 2e4d623bf3b2974b3873584fb85cf97d9ffb6721 | <ide><path>src/plugins/plugin.tooltip.js
<ide> export class Tooltip extends Element {
<ide> }
<ide>
<ide> if (changed && options.custom) {
<del> options.custom.call(me);
<add> options.custom.call(me, [me]);
<ide> }
<ide> }
<ide> | 1 |
Text | Text | fix a typo in the tutorial | 9ff7bbe0ac4172ea15242218e9ac4668eddc1e7f | <ide><path>docs/sources/examples/nodejs_web_app.md
<ide> Your `Dockerfile` should now look like this:
<ide> ## Building your image
<ide>
<ide> Go to the directory that has your `Dockerfile` and run the following command
<del>to build a Docker image. The `-t` flag let's you tag your image so it's easier
<add>to build a Docker image. The `-t` flag lets you tag your image so it's easier
<ide> to find later using the `docker images` command:
<ide>
<ide> $ sudo docker build -t <your username>/centos-node-hello . | 1 |
Text | Text | add faq section for creating reproducible results | a8f1b28cee7f2b24a57323a0354cdebddd7630f7 | <ide><path>docs/templates/getting-started/faq.md
<ide> - [How can I use pre-trained models in Keras?](#how-can-i-use-pre-trained-models-in-keras)
<ide> - [How can I use HDF5 inputs with Keras?](#how-can-i-use-hdf5-inputs-with-keras)
<ide> - [Where is the Keras configuration file stored?](#where-is-the-keras-configuration-file-stored)
<add>- [How can I obtain reproducible results using Keras during development?](#how-can-i-obtain-reproducible-results-using-keras-during-development)
<ide>
<ide> ---
<ide>
<ide> It contains the following fields:
<ide> - The default backend. See the [backend documentation](/backend).
<ide>
<ide> Likewise, cached dataset files, such as those downloaded with [`get_file()`](/utils/#get_file), are stored by default in `$HOME/.keras/datasets/`.
<add>
<add>---
<add>
<add>### How can I obtain reproducible results using Keras during development?
<add>
<add>During development of a model, sometimes it is useful to be able to obtain reproducible results from run to run in order to determine if a change in performance is due to an actual model or data modification, or merely a result of a new random sample. The below snippet of code provides an example of how to obtain reproducible results - this is geared towards a TensorFlow backend for a Python 3 environment.
<add>
<add>```python
<add>import numpy as np
<add>import tensorflow as tf
<add>import random as rn
<add>
<add># The below is necessary in Python 3.2.3 onwards to
<add># have reproducible behavior for certain hash-based operations.
<add># See these references for further details:
<add># https://docs.python.org/3.4/using/cmdline.html#envvar-PYTHONHASHSEED
<add># https://github.com/fchollet/keras/issues/2280#issuecomment-306959926
<add>
<add>import os
<add>os.environ['PYTHONHASHSEED'] = '0'
<add>
<add># The below is necessary for starting Numpy generated random numbers
<add># in a well-defined initial state.
<add>
<add>np.random.seed(42)
<add>
<add># The below is necessary for starting core Python generated random numbers
<add># in a well-defined state.
<add>
<add>rn.seed(12345)
<add>
<add># Force TensorFlow to use single thread.
<add># Multiple threads are a potential source of
<add># non-reproducible results.
<add># For further details, see: https://stackoverflow.com/questions/42022950/which-seeds-have-to-be-set-where-to-realize-100-reproducibility-of-training-res
<add>
<add>session_conf = tf.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)
<add>
<add>from keras import backend as K
<add>
<add># The below tf.set_random_seed() will make random number generation
<add># in the TensorFlow backend have a well-defined initial state.
<add># For further details, see: https://www.tensorflow.org/api_docs/python/tf/set_random_seed
<add>
<add>tf.set_random_seed(1234)
<add>
<add>sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)
<add>K.set_session(sess)
<add>
<add># Rest of code follows ...
<add>```
<ide>\ No newline at end of file | 1 |
Ruby | Ruby | allow multiple option_names in dep/reqs | e5ba31fcdcc4150e5aa8ecf110cfa502dd62a802 | <ide><path>Library/Homebrew/build_options.rb
<ide> def include?(name)
<ide> # args << "--with-example1"
<ide> # end</pre>
<ide> def with?(val)
<del> name = val.respond_to?(:option_name) ? val.option_name : val
<add> option_names = val.respond_to?(:option_names) ? val.option_names : [val]
<ide>
<del> if option_defined? "with-#{name}"
<del> include? "with-#{name}"
<del> elsif option_defined? "without-#{name}"
<del> !include? "without-#{name}"
<del> else
<del> false
<add> option_names.any? do |name|
<add> if option_defined? "with-#{name}"
<add> include? "with-#{name}"
<add> elsif option_defined? "without-#{name}"
<add> !include? "without-#{name}"
<add> else
<add> false
<add> end
<ide> end
<ide> end
<ide>
<ide> # True if a {Formula} is being built without a specific option.
<ide> # <pre>args << "--no-spam-plz" if build.without? "spam"
<del> def without?(name)
<del> !with? name
<add> def without?(val)
<add> !with?(val)
<ide> end
<ide>
<ide> # True if a {Formula} is being built as a bottle (i.e. binary package).
<ide><path>Library/Homebrew/dependency.rb
<ide> class Dependency
<ide> include Dependable
<ide>
<del> attr_reader :name, :tags, :env_proc, :option_name
<add> attr_reader :name, :tags, :env_proc, :option_names
<ide>
<ide> DEFAULT_ENV_PROC = proc {}
<ide>
<del> def initialize(name, tags = [], env_proc = DEFAULT_ENV_PROC, option_name = name)
<add> def initialize(name, tags = [], env_proc = DEFAULT_ENV_PROC, option_names = [name])
<ide> @name = name
<ide> @tags = tags
<ide> @env_proc = env_proc
<del> @option_name = option_name
<add> @option_names = option_names
<ide> end
<ide>
<ide> def to_s
<ide> def merge_repeats(all)
<ide> deps = grouped.fetch(name)
<ide> dep = deps.first
<ide> tags = deps.flat_map(&:tags).uniq
<del> dep.class.new(name, tags, dep.env_proc)
<add> option_names = deps.flat_map(&:option_names).uniq
<add> dep.class.new(name, tags, dep.env_proc, option_names)
<ide> end
<ide> end
<ide> end
<ide> def merge_repeats(all)
<ide> class TapDependency < Dependency
<ide> attr_reader :tap
<ide>
<del> def initialize(name, tags = [], env_proc = DEFAULT_ENV_PROC, option_name = name.split("/").last)
<add> def initialize(name, tags = [], env_proc = DEFAULT_ENV_PROC, option_names = [name.split("/").last])
<ide> @tap = name.rpartition("/").first
<del> super(name, tags, env_proc, option_name)
<add> super(name, tags, env_proc, option_names)
<ide> end
<ide>
<ide> def installed?
<ide><path>Library/Homebrew/requirement.rb
<ide> class Requirement
<ide> include Dependable
<ide>
<ide> attr_reader :tags, :name, :cask, :download, :default_formula
<del> alias_method :option_name, :name
<ide>
<ide> def initialize(tags = [])
<ide> @default_formula = self.class.default_formula
<ide> def initialize(tags = [])
<ide> @name ||= infer_name
<ide> end
<ide>
<add> def option_names
<add> [name]
<add> end
<add>
<ide> # The message to show when the requirement is not met.
<ide> def message
<ide> s = ""
<ide><path>Library/Homebrew/software_spec.rb
<ide> def add_legacy_patches(list)
<ide> end
<ide>
<ide> def add_dep_option(dep)
<del> name = dep.option_name
<del>
<del> if dep.optional? && !option_defined?("with-#{name}")
<del> options << Option.new("with-#{name}", "Build with #{name} support")
<del> elsif dep.recommended? && !option_defined?("without-#{name}")
<del> options << Option.new("without-#{name}", "Build without #{name} support")
<add> dep.option_names.each do |name|
<add> if dep.optional? && !option_defined?("with-#{name}")
<add> options << Option.new("with-#{name}", "Build with #{name} support")
<add> elsif dep.recommended? && !option_defined?("without-#{name}")
<add> options << Option.new("without-#{name}", "Build without #{name} support")
<add> end
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/tab.rb
<ide> def self.empty
<ide> end
<ide>
<ide> def with?(val)
<del> name = val.respond_to?(:option_name) ? val.option_name : val
<del> include?("with-#{name}") || unused_options.include?("without-#{name}")
<add> option_names = val.respond_to?(:option_names) ? val.option_names : [val]
<add>
<add> option_names.any? do |name|
<add> include?("with-#{name}") || unused_options.include?("without-#{name}")
<add> end
<ide> end
<ide>
<del> def without?(name)
<del> !with? name
<add> def without?(val)
<add> !with?(val)
<ide> end
<ide>
<ide> def include?(opt)
<ide><path>Library/Homebrew/test/test_dependency.rb
<ide> def test_merge_repeats
<ide>
<ide> foo_named_dep = merged.find {|d| d.name == "foo"}
<ide> assert_equal [:build, "bar"], foo_named_dep.tags
<del> assert_includes foo_named_dep.option_name, "foo"
<del> assert_includes foo_named_dep.option_name, "foo2"
<add> assert_includes foo_named_dep.option_names, "foo"
<add> assert_includes foo_named_dep.option_names, "foo2"
<ide>
<ide> xyz_named_dep = merged.find {|d| d.name == "xyz"}
<ide> assert_equal ["abc"], xyz_named_dep.tags
<del> assert_includes xyz_named_dep.option_name, "foo"
<del> refute_includes xyz_named_dep.option_name, "foo2"
<add> assert_includes xyz_named_dep.option_names, "foo"
<add> refute_includes xyz_named_dep.option_names, "foo2"
<ide> end
<ide>
<ide> def test_equality
<ide> def test_equality
<ide> end
<ide>
<ide> class TapDependencyTests < Homebrew::TestCase
<del> def test_option_name
<add> def test_option_names
<ide> dep = TapDependency.new("foo/bar/dog")
<del> assert_equal "dog", dep.option_name
<add> assert_equal %w[dog], dep.option_names
<ide> end
<ide> end
<ide><path>Library/Homebrew/test/test_requirement.rb
<ide> def test_accepts_multiple_tags
<ide> assert_equal %w[bar baz].sort, dep.tags.sort
<ide> end
<ide>
<del> def test_option_name
<add> def test_option_names
<ide> dep = TestRequirement.new
<del> assert_equal "test", dep.option_name
<add> assert_equal %w[test], dep.option_names
<ide> end
<ide>
<ide> def test_preserves_symbol_tags | 7 |
Text | Text | remove obsolete 'bootcamp' project | 46492ee65a26c8b3a138f4b9f5eea0e648dc8b45 | <ide><path>hack/bootcamp/README.md
<del># Docker maintainer bootcamp
<del>
<del>## Introduction: we need more maintainers
<del>
<del>Docker is growing incredibly fast. At the time of writing, it has received over 200 contributions from 90 people,
<del>and its API is used by dozens of 3rd-party tools. Over 1,000 issues have been opened. As the first production deployments
<del>start going live, the growth will only accelerate.
<del>
<del>Also at the time of writing, Docker has 3 full-time maintainers, and 7 part-time subsystem maintainers. If docker
<del>is going to live up to the expectations, we need more than that.
<del>
<del>This document describes a *bootcamp* to guide and train volunteers interested in helping the project, either with individual
<del>contributions, maintainer work, or both.
<del>
<del>This bootcamp is an experiment. If you decide to go through it, consider yourself an alpha-tester. You should expect quirks,
<del>and report them to us as you encounter them to help us smooth out the process.
<del>
<del>
<del>## How it works
<del>
<del>The maintainer bootcamp is a 12-step program - one step for each of the maintainer's responsibilities. The aspiring maintainer must
<del>validate all 12 steps by 1) studying it, 2) practicing it, and 3) getting endorsed for it.
<del>
<del>Steps are all equally important and can be validated in any order. Validating all 12 steps is a pre-requisite for becoming a core
<del>maintainer, but even 1 step will make you a better contributor!
<del>
<del>### List of steps
<del>
<del>#### 1) Be a power user
<del>
<del>Use docker daily, build cool things with it, know its quirks inside and out.
<del>
<del>
<del>#### 2) Help users
<del>
<del>Answer questions on irc, twitter, email, in person.
<del>
<del>
<del>#### 3) Manage the bug tracker
<del>
<del>Help triage tickets - ask the right questions, find duplicates, reference relevant resources, know when to close a ticket when necessary, take the time to go over older tickets.
<del>
<del>
<del>#### 4) Improve the documentation
<del>
<del>Follow the documentation from scratch regularly and make sure it is still up-to-date. Find and fix inconsistencies. Remove stale information. Find a frequently asked question that is not documented. Simplify the content and the form.
<del>
<del>
<del>#### 5) Evangelize the principles of docker
<del>
<del>Understand what the underlying goals and principle of docker are. Explain design decisions based on what docker is, and what it is not. When someone is not using docker, find how docker can be valuable to them. If they are using docker, find how they can use it better.
<del>
<del>
<del>#### 6) Fix bugs
<del>
<del>Self-explanatory. Contribute improvements to docker which solve defects. Bugfixes should be well-tested, and prioritized by impact to the user.
<del>
<del>
<del>#### 7) Improve the testing infrastructure
<del>
<del>Automated testing is complicated and should be perpetually improved. Invest time to improve the current tooling. Refactor existing tests, create new ones, make testing more accessible to developers, add new testing capabilities (integration tests, mocking, stress test...), improve integration between tests and documentation...
<del>
<del>
<del>#### 8) Contribute features
<del>
<del>Improve docker to do more things, or get better at doing the same things. Features should be well-tested, not break existing APIs, respect the project goals. They should make the user's life measurably better. Features should be discussed ahead of time to avoid wasting time and duplicating effort.
<del>
<del>
<del>#### 9) Refactor internals
<del>
<del>Improve docker to repay technical debt. Simplify code layout, improve performance, add missing comments, reduce the number of files and functions, rename functions and variables to be more readable, go over FIXMEs, etc.
<del>
<del>#### 10) Review and merge contributions
<del>
<del>Review pull requests in a timely manner, review code in detail and offer feedback. Keep a high bar without being pedantic. Share the load of testing and merging pull requests.
<del>
<del>#### 11) Release
<del>
<del>Manage a release of docker from beginning to end. Tests, final review, tags, builds, upload to mirrors, distro packaging, etc.
<del>
<del>#### 12) Train other maintainers
<del>
<del>Contribute to training other maintainers. Give advice, delegate work, help organize the bootcamp. This also means contribute to the maintainer's manual, look for ways to improve the project organization etc.
<del>
<del>### How to study a step
<del>
<del>### How to practice a step
<del>
<del>### How to get endorsed for a step
<del>
<del> | 1 |
Javascript | Javascript | add check for startlatency in fast refresh | 4d13d671aa7b2121a9c364ae0e569aeafc815b15 | <ide><path>packages/next/client/dev/error-overlay/hot-dev-client.js
<ide> function handleErrors(errors) {
<ide> }
<ide> }
<ide>
<add>let startLatency = undefined
<add>
<ide> function onFastRefresh(hasUpdates) {
<ide> DevOverlay.onBuildOk()
<ide> if (hasUpdates) {
<ide> DevOverlay.onRefresh()
<ide> }
<ide>
<del> const latency = Date.now() - startLatency
<del> console.log(`[Fast Refresh] done in ${latency}ms`)
<del> if (self.__NEXT_HMR_LATENCY_CB) {
<del> self.__NEXT_HMR_LATENCY_CB(latency)
<add> if (startLatency) {
<add> const latency = Date.now() - startLatency
<add> console.log(`[Fast Refresh] done in ${latency}ms`)
<add> if (self.__NEXT_HMR_LATENCY_CB) {
<add> self.__NEXT_HMR_LATENCY_CB(latency)
<add> }
<ide> }
<ide> }
<ide>
<ide> function handleAvailableHash(hash) {
<ide> mostRecentCompilationHash = hash
<ide> }
<ide>
<del>let startLatency = undefined
<del>
<ide> // Handle messages from the server.
<ide> function processMessage(e) {
<ide> const obj = JSON.parse(e.data) | 1 |
Go | Go | use ioctl helpers from x/sys/unix | c7c02eea8117347c5ebd43ef3b9192d43bcdd10b | <ide><path>pkg/loopback/attach_loopback.go
<ide> import (
<ide> // Loopback related errors
<ide> var (
<ide> ErrAttachLoopbackDevice = errors.New("loopback attach failed")
<del> ErrGetLoopbackBackingFile = errors.New("Unable to get loopback backing file")
<del> ErrSetCapacity = errors.New("Unable set loopback capacity")
<add> ErrGetLoopbackBackingFile = errors.New("unable to get loopback backing file")
<add> ErrSetCapacity = errors.New("unable set loopback capacity")
<ide> )
<ide>
<del>func stringToLoopName(src string) [LoNameSize]uint8 {
<del> var dst [LoNameSize]uint8
<add>func stringToLoopName(src string) [unix.LO_NAME_SIZE]uint8 {
<add> var dst [unix.LO_NAME_SIZE]uint8
<ide> copy(dst[:], src[:])
<ide> return dst
<ide> }
<ide> func getNextFreeLoopbackIndex() (int, error) {
<ide> return 0, err
<ide> }
<ide> defer f.Close()
<del>
<del> index, err := ioctlLoopCtlGetFree(f.Fd())
<del> if index < 0 {
<del> index = 0
<del> }
<del> return index, err
<add> return unix.IoctlRetInt(int(f.Fd()), unix.LOOP_CTL_GET_FREE)
<ide> }
<ide>
<ide> func openNextAvailableLoopback(index int, sparseFile *os.File) (loopFile *os.File, err error) {
<ide> func openNextAvailableLoopback(index int, sparseFile *os.File) (loopFile *os.Fil
<ide> }
<ide>
<ide> // Try to attach to the loop file
<del> if err := ioctlLoopSetFd(loopFile.Fd(), sparseFile.Fd()); err != nil {
<add> if err = unix.IoctlSetInt(int(loopFile.Fd()), unix.LOOP_SET_FD, int(sparseFile.Fd())); err != nil {
<ide> loopFile.Close()
<ide>
<ide> // If the error is EBUSY, then try the next loopback
<ide> func AttachLoopDevice(sparseName string) (loop *os.File, err error) {
<ide> loopInfo := &unix.LoopInfo64{
<ide> File_name: stringToLoopName(loopFile.Name()),
<ide> Offset: 0,
<del> Flags: LoFlagsAutoClear,
<add> Flags: unix.LO_FLAGS_AUTOCLEAR,
<ide> }
<ide>
<del> if err := ioctlLoopSetStatus64(loopFile.Fd(), loopInfo); err != nil {
<add> if err = unix.IoctlLoopSetStatus64(int(loopFile.Fd()), loopInfo); err != nil {
<ide> logrus.Errorf("Cannot set up loopback device info: %s", err)
<ide>
<ide> // If the call failed, then free the loopback device
<del> if err := ioctlLoopClrFd(loopFile.Fd()); err != nil {
<add> if err = unix.IoctlSetInt(int(loopFile.Fd()), unix.LOOP_CLR_FD, 0); err != nil {
<ide> logrus.Error("Error while cleaning up the loopback device")
<ide> }
<ide> loopFile.Close()
<ide><path>pkg/loopback/ioctl.go
<del>//go:build linux
<del>// +build linux
<del>
<del>package loopback // import "github.com/docker/docker/pkg/loopback"
<del>
<del>import (
<del> "unsafe"
<del>
<del> "golang.org/x/sys/unix"
<del>)
<del>
<del>func ioctlLoopCtlGetFree(fd uintptr) (int, error) {
<del> // The ioctl interface for /dev/loop-control (since Linux 3.1) is a bit
<del> // off compared to what you'd expect: instead of writing an integer to a
<del> // parameter pointer like unix.IoctlGetInt() expects, it returns the first
<del> // available loop device index directly.
<del> ioctlReturn, _, err := unix.Syscall(unix.SYS_IOCTL, fd, LoopCtlGetFree, 0)
<del> if err != 0 {
<del> return 0, err
<del> }
<del> return int(ioctlReturn), nil
<del>}
<del>
<del>func ioctlLoopSetFd(loopFd, sparseFd uintptr) error {
<del> return unix.IoctlSetInt(int(loopFd), unix.LOOP_SET_FD, int(sparseFd))
<del>}
<del>
<del>func ioctlLoopSetStatus64(loopFd uintptr, loopInfo *unix.LoopInfo64) error {
<del> if _, _, err := unix.Syscall(unix.SYS_IOCTL, loopFd, unix.LOOP_SET_STATUS64, uintptr(unsafe.Pointer(loopInfo))); err != 0 {
<del> return err
<del> }
<del> return nil
<del>}
<del>
<del>func ioctlLoopClrFd(loopFd uintptr) error {
<del> if _, _, err := unix.Syscall(unix.SYS_IOCTL, loopFd, unix.LOOP_CLR_FD, 0); err != 0 {
<del> return err
<del> }
<del> return nil
<del>}
<del>
<del>func ioctlLoopGetStatus64(loopFd uintptr) (*unix.LoopInfo64, error) {
<del> loopInfo := &unix.LoopInfo64{}
<del>
<del> if _, _, err := unix.Syscall(unix.SYS_IOCTL, loopFd, unix.LOOP_GET_STATUS64, uintptr(unsafe.Pointer(loopInfo))); err != 0 {
<del> return nil, err
<del> }
<del> return loopInfo, nil
<del>}
<del>
<del>func ioctlLoopSetCapacity(loopFd uintptr, value int) error {
<del> return unix.IoctlSetInt(int(loopFd), unix.LOOP_SET_CAPACITY, value)
<del>}
<ide><path>pkg/loopback/loop_wrapper.go
<del>//go:build linux
<del>// +build linux
<del>
<del>package loopback // import "github.com/docker/docker/pkg/loopback"
<del>
<del>import "golang.org/x/sys/unix"
<del>
<del>// IOCTL consts
<del>const (
<del> LoopSetFd = unix.LOOP_SET_FD
<del> LoopCtlGetFree = unix.LOOP_CTL_GET_FREE
<del> LoopGetStatus64 = unix.LOOP_GET_STATUS64
<del> LoopSetStatus64 = unix.LOOP_SET_STATUS64
<del> LoopClrFd = unix.LOOP_CLR_FD
<del> LoopSetCapacity = unix.LOOP_SET_CAPACITY
<del>)
<del>
<del>// LOOP consts.
<del>const (
<del> LoFlagsAutoClear = unix.LO_FLAGS_AUTOCLEAR
<del> LoFlagsReadOnly = unix.LO_FLAGS_READ_ONLY
<del> LoFlagsPartScan = unix.LO_FLAGS_PARTSCAN
<del> LoKeySize = unix.LO_KEY_SIZE
<del> LoNameSize = unix.LO_NAME_SIZE
<del>)
<ide><path>pkg/loopback/loopback.go
<ide> import (
<ide> )
<ide>
<ide> func getLoopbackBackingFile(file *os.File) (uint64, uint64, error) {
<del> loopInfo, err := ioctlLoopGetStatus64(file.Fd())
<add> loopInfo, err := unix.IoctlLoopGetStatus64(int(file.Fd()))
<ide> if err != nil {
<ide> logrus.Errorf("Error get loopback backing file: %s", err)
<ide> return 0, 0, ErrGetLoopbackBackingFile
<ide> func getLoopbackBackingFile(file *os.File) (uint64, uint64, error) {
<ide>
<ide> // SetCapacity reloads the size for the loopback device.
<ide> func SetCapacity(file *os.File) error {
<del> if err := ioctlLoopSetCapacity(file.Fd(), 0); err != nil {
<add> if err := unix.IoctlSetInt(int(file.Fd()), unix.LOOP_SET_CAPACITY, 0); err != nil {
<ide> logrus.Errorf("Error loopbackSetCapacity: %s", err)
<ide> return ErrSetCapacity
<ide> }
<ide> func FindLoopDeviceFor(file *os.File) *os.File {
<ide> return nil
<ide> }
<ide> targetInode := stat.Ino
<del> // the type is 32bit on mips
<del> targetDevice := uint64(stat.Dev) //nolint: unconvert
<add> targetDevice := uint64(stat.Dev) //nolint: unconvert // the type is 32bit on mips
<ide>
<ide> for i := 0; true; i++ {
<ide> path := fmt.Sprintf("/dev/loop%d", i) | 4 |
Text | Text | add toc and proper noun section to challenge guide | 962f04009bfdf53cba992ab43a05504ef2f6230b | <ide><path>challenge-style-guide.md
<ide> # A guide to designing freeCodeCamp coding challenges
<ide>
<del>> “Talk is cheap. Show me the code.” — Linus Torvalds
<add>> "Talk is cheap. Show me the code." — Linus Torvalds
<ide>
<ide> freeCodeCamp offers 1,200 hours of interactive coding challenges. These are 100% focused on the practical skill of building software. You code the entire time. You learn to code by coding.
<ide>
<ide> You can learn theory through free online university courses. freeCodeCamp will focus instead on helping you learn to code and practice by building apps.
<ide>
<ide> With that practical focus in mind, let’s talk about the requirements for our coding challenges. (Note that these requirements do not apply to our algorithm challenges, checkpoint challenges, or projects.)
<ide>
<add>**Table of Contents**
<add>
<add>- [Proper nouns](#proper-nouns)
<add>- [The 2 minute rule](#the-2-minute-rule)
<add>- [Modularity](#modularity)
<add>- [Naming challenges](#naming-challenges)
<add>- [Writing tests](#writing-tests)
<add>- [Writing instructions](#writing-instructions)
<add>- [Formatting challenge text](#formatting-challenge-text)
<add>- [Formatting seed code](#formatting-seed-code)
<add>- [Why do we have all these rules?](#why-do-we-have-all-these-rules)
<add>
<add>## Proper nouns
<add>
<add>Proper nouns should use correct capitalization when possible. Below is a list of words as they should appear in the challenges.
<add>
<add>- JavaScript (capital letters in "J" and "S" and no abbreviations)
<add>- Node.js
<add>
<add>Front-end development (adjective form with a dash) is when you working on the front end (noun form with no dash). The same goes with the back end, full stack, and many other compound terms.
<add>
<ide> ## The 2 minute rule
<ide>
<ide> Each challenge should be solvable within 120 seconds by a native English speaker who has completed the challenges leading up to it. This includes the amount of time it takes to read the directions, understand the seeded code, write their own code, and get all the tests to pass.
<ide> Here are specific formatting guidelines for challenge text and examples:
<ide> - Multi-line code examples go in `<blockquote>` tags, and use the `<br>` tag to separate lines. For HTML examples, remember to use escape characters to represent the angle brackets
<ide> - A single horizontal rules (`<hr>` tag) should separate the text discussing the challenge concept and the challenge instructions
<ide> - Additional information in the form of a note should be formatted `<strong>Note</strong><br>Rest of note text...`
<add>- Use double quotes where applicable
<ide>
<ide> ## Formatting seed code
<ide>
<ide> Here are specific formatting guidelines for the challenge seed code:
<ide>
<ide> - Use two spaces to indent
<ide> - JavaScript statements end with a semicolon
<add>- Use double quotes where applicable
<ide>
<ide> ## Why do we have all these rules?
<ide> | 1 |
Javascript | Javascript | add test for visit helper in ember testing | 3c6dd5271cd4380a515c714edfbf7ca60f652445 | <ide><path>packages/ember-testing/tests/helpers_test.js
<ide> test("Ember.Application#setupForTesting", function() {
<ide>
<ide> equal(App.__container__.lookup('router:main').location.implementation, 'none');
<ide> equal(window.EMBER_APP_BEING_TESTED, App);
<del>});
<ide>\ No newline at end of file
<add>});
<add>
<add>test("visit transitions to the correct route", function() {
<add> expect(2);
<add>
<add> var visit, currentRoute;
<add>
<add> Ember.run(function() {
<add> App = Ember.Application.create();
<add> App.setupForTesting();
<add> });
<add>
<add> App.injectTestHelpers();
<add> visit = window.visit;
<add>
<add> App.Router.map(function() {
<add> this.route('posts');
<add> this.route('comments');
<add> });
<add>
<add> App.PostsRoute = Ember.Route.extend({
<add> renderTemplate: function() {
<add> currentRoute = 'posts';
<add> }
<add> });
<add>
<add> App.CommentsRoute = Ember.Route.extend({
<add> renderTemplate: function() {
<add> currentRoute = 'comments';
<add> }
<add> });
<add>
<add> Ember.run(App, App.advanceReadiness);
<add>
<add> currentRoute = 'index';
<add>
<add> visit('/posts').then(function() {
<add> equal(currentRoute, 'posts', "Successfully visited posts route");
<add> return visit('/comments');
<add> }).then(function() {
<add> equal(currentRoute, 'comments', "visit can be chained");
<add> });
<add>
<add> App.removeTestHelpers();
<add>}); | 1 |
Ruby | Ruby | fix the api documentation layout of after_*_commit | 933e11f8311e13c8d5e4355813ebcdbb50037ffc | <ide><path>activerecord/lib/active_record/transactions.rb
<ide> def after_commit(*args, &block)
<ide> set_callback(:commit, :after, *args, &block)
<ide> end
<ide>
<del> # Shortcut for +after_commit :hook, on: :create+.
<add> # Shortcut for <tt>after_commit :hook, on: :create</tt>.
<ide> def after_create_commit(*args, &block)
<ide> set_options_for_callbacks!(args, on: :create)
<ide> set_callback(:commit, :after, *args, &block)
<ide> end
<ide>
<del> # Shortcut for +after_commit :hook, on: :update+.
<add> # Shortcut for <tt>after_commit :hook, on: :update</tt>.
<ide> def after_update_commit(*args, &block)
<ide> set_options_for_callbacks!(args, on: :update)
<ide> set_callback(:commit, :after, *args, &block)
<ide> end
<ide>
<del> # Shortcut for +after_commit :hook, on: :destroy+.
<add> # Shortcut for <tt>after_commit :hook, on: :destroy</tt>.
<ide> def after_destroy_commit(*args, &block)
<ide> set_options_for_callbacks!(args, on: :destroy)
<ide> set_callback(:commit, :after, *args, &block) | 1 |
Go | Go | use errdefs for handling errors in client | 0cafc84fb21f675a9794c80e59f8f85919c618ec | <ide><path>client/container_copy.go
<ide> func (cli *Client) CopyToContainer(ctx context.Context, containerID, dstPath str
<ide> }
<ide> defer ensureReaderClosed(response)
<ide>
<add> // TODO this code converts non-error status-codes (e.g., "204 No Content") into an error; verify if this is the desired behavior
<ide> if response.statusCode != http.StatusOK {
<ide> return fmt.Errorf("unexpected status code from daemon: %d", response.statusCode)
<ide> }
<ide> func (cli *Client) CopyFromContainer(ctx context.Context, containerID, srcPath s
<ide> return nil, types.ContainerPathStat{}, wrapResponseError(err, response, "container:path", containerID+":"+srcPath)
<ide> }
<ide>
<add> // TODO this code converts non-error status-codes (e.g., "204 No Content") into an error; verify if this is the desired behavior
<ide> if response.statusCode != http.StatusOK {
<ide> return nil, types.ContainerPathStat{}, fmt.Errorf("unexpected status code from daemon: %d", response.statusCode)
<ide> }
<ide><path>client/container_copy_test.go
<ide> func TestCopyToContainerNotFoundError(t *testing.T) {
<ide> }
<ide> }
<ide>
<add>// TODO TestCopyToContainerNotStatusOKError expects a non-error status-code ("204 No Content") to produce an error; verify if this is the desired behavior
<ide> func TestCopyToContainerNotStatusOKError(t *testing.T) {
<ide> client := &Client{
<ide> client: newMockClient(errorMock(http.StatusNoContent, "No content")),
<ide> func TestCopyFromContainerNotFoundError(t *testing.T) {
<ide> }
<ide> }
<ide>
<add>// TODO TestCopyFromContainerNotStatusOKError expects a non-error status-code ("204 No Content") to produce an error; verify if this is the desired behavior
<ide> func TestCopyFromContainerNotStatusOKError(t *testing.T) {
<ide> client := &Client{
<ide> client: newMockClient(errorMock(http.StatusNoContent, "No content")),
<ide><path>client/image_pull.go
<ide> package client // import "github.com/docker/docker/client"
<ide> import (
<ide> "context"
<ide> "io"
<del> "net/http"
<ide> "net/url"
<ide> "strings"
<ide>
<ide> "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/api/types"
<add> "github.com/docker/docker/errdefs"
<ide> )
<ide>
<ide> // ImagePull requests the docker host to pull an image from a remote registry.
<ide> func (cli *Client) ImagePull(ctx context.Context, refStr string, options types.I
<ide> }
<ide>
<ide> resp, err := cli.tryImageCreate(ctx, query, options.RegistryAuth)
<del> if resp.statusCode == http.StatusUnauthorized && options.PrivilegeFunc != nil {
<add> if errdefs.IsUnauthorized(err) && options.PrivilegeFunc != nil {
<ide> newAuthHeader, privilegeErr := options.PrivilegeFunc()
<ide> if privilegeErr != nil {
<ide> return nil, privilegeErr
<ide><path>client/image_push.go
<ide> import (
<ide> "context"
<ide> "errors"
<ide> "io"
<del> "net/http"
<ide> "net/url"
<ide>
<ide> "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/api/types"
<add> "github.com/docker/docker/errdefs"
<ide> )
<ide>
<ide> // ImagePush requests the docker host to push an image to a remote registry.
<ide> func (cli *Client) ImagePush(ctx context.Context, image string, options types.Im
<ide> query.Set("tag", tag)
<ide>
<ide> resp, err := cli.tryImagePush(ctx, name, query, options.RegistryAuth)
<del> if resp.statusCode == http.StatusUnauthorized && options.PrivilegeFunc != nil {
<add> if errdefs.IsUnauthorized(err) && options.PrivilegeFunc != nil {
<ide> newAuthHeader, privilegeErr := options.PrivilegeFunc()
<ide> if privilegeErr != nil {
<ide> return nil, privilegeErr
<ide><path>client/image_search.go
<ide> import (
<ide> "context"
<ide> "encoding/json"
<ide> "fmt"
<del> "net/http"
<ide> "net/url"
<ide>
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/api/types/filters"
<ide> "github.com/docker/docker/api/types/registry"
<add> "github.com/docker/docker/errdefs"
<ide> )
<ide>
<ide> // ImageSearch makes the docker host to search by a term in a remote registry.
<ide> func (cli *Client) ImageSearch(ctx context.Context, term string, options types.I
<ide> }
<ide>
<ide> resp, err := cli.tryImageSearch(ctx, query, options.RegistryAuth)
<del> if resp.statusCode == http.StatusUnauthorized && options.PrivilegeFunc != nil {
<add> if errdefs.IsUnauthorized(err) && options.PrivilegeFunc != nil {
<ide> newAuthHeader, privilegeErr := options.PrivilegeFunc()
<ide> if privilegeErr != nil {
<ide> return results, privilegeErr
<ide><path>client/login.go
<ide> package client // import "github.com/docker/docker/client"
<ide> import (
<ide> "context"
<ide> "encoding/json"
<del> "net/http"
<ide> "net/url"
<ide>
<ide> "github.com/docker/docker/api/types"
<ide> import (
<ide> func (cli *Client) RegistryLogin(ctx context.Context, auth types.AuthConfig) (registry.AuthenticateOKBody, error) {
<ide> resp, err := cli.post(ctx, "/auth", url.Values{}, auth, nil)
<ide>
<del> if resp.statusCode == http.StatusUnauthorized {
<del> return registry.AuthenticateOKBody{}, unauthorizedError{err}
<del> }
<ide> if err != nil {
<ide> return registry.AuthenticateOKBody{}, err
<ide> }
<ide><path>client/plugin_install.go
<ide> import (
<ide> "context"
<ide> "encoding/json"
<ide> "io"
<del> "net/http"
<ide> "net/url"
<ide>
<ide> "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/api/types"
<add> "github.com/docker/docker/errdefs"
<ide> "github.com/pkg/errors"
<ide> )
<ide>
<ide> func (cli *Client) tryPluginPull(ctx context.Context, query url.Values, privileg
<ide>
<ide> func (cli *Client) checkPluginPermissions(ctx context.Context, query url.Values, options types.PluginInstallOptions) (types.PluginPrivileges, error) {
<ide> resp, err := cli.tryPluginPrivileges(ctx, query, options.RegistryAuth)
<del> if resp.statusCode == http.StatusUnauthorized && options.PrivilegeFunc != nil {
<add> if errdefs.IsUnauthorized(err) && options.PrivilegeFunc != nil {
<ide> // todo: do inspect before to check existing name before checking privileges
<ide> newAuthHeader, privilegeErr := options.PrivilegeFunc()
<ide> if privilegeErr != nil { | 7 |
Javascript | Javascript | add test for manual time tracking and dispose | a1eb8dd975c20702e9af07ebdad89f307037efc0 | <ide><path>test/unit/media.js
<ide> test('should synthesize progress events by default', function() {
<ide> clock.tick(500);
<ide> equal(progresses, 1, 'triggered one event');
<ide> });
<add>
<add>test('dispose() should stop time tracking', function() {
<add> var tech = new videojs.MediaTechController({
<add> id: noop,
<add> on: noop,
<add> trigger: noop
<add> });
<add> tech.dispose();
<add>
<add> // progress and timeupdate events will throw exceptions after the
<add> // tech is disposed
<add> try {
<add> clock.tick(10 * 1000);
<add> } catch (e) {
<add> return equal(e, undefined, 'threw an exception');
<add> }
<add> ok(true, 'no exception was thrown');
<add>}); | 1 |
Java | Java | detect unsent disconnect messages | 371d93b3463c5157f3c4b2b809084ddb00ae2b9e | <ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/broker/AbstractBrokerMessageHandler.java
<ide> import org.springframework.messaging.MessageChannel;
<ide> import org.springframework.messaging.MessageHandler;
<ide> import org.springframework.messaging.SubscribableChannel;
<add>import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
<add>import org.springframework.messaging.simp.SimpMessageType;
<add>import org.springframework.messaging.support.AbstractMessageChannel;
<add>import org.springframework.messaging.support.ChannelInterceptor;
<add>import org.springframework.messaging.support.ChannelInterceptorAdapter;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.CollectionUtils;
<ide>
<ide>
<ide> private final Object lifecycleMonitor = new Object();
<ide>
<add> private ChannelInterceptor unsentDisconnectInterceptor = new UnsentDisconnectChannelInterceptor();
<add>
<ide>
<ide> /**
<ide> * Constructor with no destination prefixes (matches all destinations).
<ide> public void start() {
<ide> }
<ide> this.clientInboundChannel.subscribe(this);
<ide> this.brokerChannel.subscribe(this);
<add> if (this.clientInboundChannel instanceof AbstractMessageChannel) {
<add> ((AbstractMessageChannel) this.clientInboundChannel).addInterceptor(0, this.unsentDisconnectInterceptor);
<add> }
<ide> startInternal();
<ide> this.running = true;
<ide> if (logger.isInfoEnabled()) {
<ide> public void stop() {
<ide> stopInternal();
<ide> this.clientInboundChannel.unsubscribe(this);
<ide> this.brokerChannel.unsubscribe(this);
<add> if (this.clientInboundChannel instanceof AbstractMessageChannel) {
<add> ((AbstractMessageChannel) this.clientInboundChannel).removeInterceptor(this.unsentDisconnectInterceptor);
<add> }
<ide> this.running = false;
<ide> if (logger.isDebugEnabled()) {
<ide> logger.info("Stopped.");
<ide> protected void publishBrokerUnavailableEvent() {
<ide> }
<ide> }
<ide>
<add>
<add> /**
<add> * Detect unsent DISCONNECT messages and process them anyway.
<add> */
<add> private class UnsentDisconnectChannelInterceptor extends ChannelInterceptorAdapter {
<add>
<add> @Override
<add> public void afterSendCompletion(Message<?> message, MessageChannel channel, boolean sent, Exception ex) {
<add> if (!sent) {
<add> SimpMessageType messageType = SimpMessageHeaderAccessor.getMessageType(message.getHeaders());
<add> if (SimpMessageType.DISCONNECT.equals(messageType)) {
<add> logger.debug("Detected unsent DISCONNECT message. Processing anyway.");
<add> handleMessage(message);
<add> }
<add> }
<add> }
<add> }
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/support/AbstractMessageChannel.java
<ide> public void addInterceptor(ChannelInterceptor interceptor) {
<ide> this.interceptors.add(interceptor);
<ide> }
<ide>
<add> /**
<add> * Add a channel interceptor at the specified index.
<add> */
<add> public void addInterceptor(int index, ChannelInterceptor interceptor) {
<add> this.interceptors.add(index, interceptor);
<add> }
<add>
<ide> /**
<ide> * Return a read-only list of the configured interceptors.
<ide> */
<ide> public List<ChannelInterceptor> getInterceptors() {
<ide> return Collections.unmodifiableList(this.interceptors);
<ide> }
<ide>
<add> /**
<add> * Remove the given interceptor.
<add> */
<add> public boolean removeInterceptor(ChannelInterceptor interceptor) {
<add> return this.interceptors.remove(interceptor);
<add> }
<add>
<ide>
<ide> @Override
<ide> public final boolean send(Message<?> message) {
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
<ide> public void clientInboundChannelCustomized() {
<ide> AbstractSubscribableChannel channel = this.customContext.getBean(
<ide> "clientInboundChannel", AbstractSubscribableChannel.class);
<ide>
<del> assertEquals(1, channel.getInterceptors().size());
<add> assertEquals(2, channel.getInterceptors().size());
<ide>
<ide> ThreadPoolTaskExecutor taskExecutor = this.customContext.getBean(
<ide> "clientInboundChannelExecutor", ThreadPoolTaskExecutor.class);
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/config/MessageBrokerBeanDefinitionParserTests.java
<ide> public void simpleBroker() {
<ide> List<Class<? extends MessageHandler>> subscriberTypes =
<ide> Arrays.<Class<? extends MessageHandler>>asList(SimpAnnotationMethodMessageHandler.class,
<ide> UserDestinationMessageHandler.class, SimpleBrokerMessageHandler.class);
<del> testChannel("clientInboundChannel", subscriberTypes, 0);
<add> testChannel("clientInboundChannel", subscriberTypes, 1);
<ide> testExecutor("clientInboundChannel", Runtime.getRuntime().availableProcessors() * 2, Integer.MAX_VALUE, 60);
<ide>
<ide> subscriberTypes = Arrays.<Class<? extends MessageHandler>>asList(SubProtocolWebSocketHandler.class);
<ide> public void stompBrokerRelay() {
<ide> List<Class<? extends MessageHandler>> subscriberTypes =
<ide> Arrays.<Class<? extends MessageHandler>>asList(SimpAnnotationMethodMessageHandler.class,
<ide> UserDestinationMessageHandler.class, StompBrokerRelayMessageHandler.class);
<del> testChannel("clientInboundChannel", subscriberTypes, 0);
<add> testChannel("clientInboundChannel", subscriberTypes, 1);
<ide> testExecutor("clientInboundChannel", Runtime.getRuntime().availableProcessors() * 2, Integer.MAX_VALUE, 60);
<ide>
<ide> subscriberTypes = Arrays.<Class<? extends MessageHandler>>asList(SubProtocolWebSocketHandler.class);
<ide> public void customChannels() {
<ide> Arrays.<Class<? extends MessageHandler>>asList(SimpAnnotationMethodMessageHandler.class,
<ide> UserDestinationMessageHandler.class, SimpleBrokerMessageHandler.class);
<ide>
<del> testChannel("clientInboundChannel", subscriberTypes, 1);
<add> testChannel("clientInboundChannel", subscriberTypes, 2);
<ide> testExecutor("clientInboundChannel", 100, 200, 600);
<ide>
<ide> subscriberTypes = Arrays.<Class<? extends MessageHandler>>asList(SubProtocolWebSocketHandler.class); | 4 |
Ruby | Ruby | change app to main_app in mounted_helpers | 6258f7c972f0dcf85916a0ac9ce3d34460201353 | <ide><path>actionpack/lib/action_dispatch/routing/route_set.rb
<ide> def install_helpers(destinations = [ActionController::Base, ActionView::Base], r
<ide> module MountedHelpers
<ide> end
<ide>
<del> def mounted_helpers(name = :app)
<add> def mounted_helpers(name = :main_app)
<ide> define_mounted_helper(name) if name
<ide> MountedHelpers
<ide> end
<ide><path>actionpack/test/dispatch/prefix_generation_test.rb
<ide> def show
<ide> end
<ide>
<ide> def url_to_application
<del> path = app.url_for( :controller => "outside_engine_generating",
<del> :action => "index",
<del> :only_path => true)
<add> path = main_app.url_for(:controller => "outside_engine_generating",
<add> :action => "index",
<add> :only_path => true)
<ide> render :text => path
<ide> end
<ide>
<ide><path>railties/test/application/initializers/frameworks_test.rb
<ide> def notify
<ide>
<ide> require "#{app_path}/config/environment"
<ide> assert Foo.method_defined?(:foo_path)
<del> assert Foo.method_defined?(:app)
<add> assert Foo.method_defined?(:main_app)
<ide> assert_equal ["notify"], Foo.action_methods
<ide> end
<ide>
<ide><path>railties/test/railties/engine_test.rb
<ide> def from_app
<ide> end
<ide>
<ide> def routes_helpers_in_view
<del> render :inline => "<%= foo_path %>, <%= app.bar_path %>"
<add> render :inline => "<%= foo_path %>, <%= main_app.bar_path %>"
<ide> end
<ide>
<ide> def polymorphic_path_without_namespace
<ide><path>railties/test/railties/mounted_engine_test.rb
<ide> def index
<ide> end
<ide>
<ide> def generate_application_route
<del> path = app.url_for(:controller => "/main",
<add> path = main_app.url_for(:controller => "/main",
<ide> :action => "index",
<ide> :only_path => true)
<ide> render :text => path
<ide> end
<ide>
<ide> def application_route_in_view
<del> render :inline => "<%= app.root_path %>"
<add> render :inline => "<%= main_app.root_path %>"
<ide> end
<ide> end
<ide> end | 5 |
Ruby | Ruby | fix inconsistent behavior from string#pluralize | 459f7bf38aa196cf4d2d970173b02d88e4a4e75c | <ide><path>activesupport/lib/active_support/core_ext/string/inflections.rb
<ide> class String
<ide> def pluralize(count = nil, locale = :en)
<ide> locale = count if count.is_a?(Symbol)
<ide> if count == 1
<del> self
<add> self.dup
<ide> else
<ide> ActiveSupport::Inflector.pluralize(self, locale)
<ide> end
<ide><path>activesupport/test/core_ext/string_ext_test.rb
<ide> def test_pluralize
<ide> assert_equal("blargles", "blargle".pluralize(2))
<ide> end
<ide>
<add> test 'pluralize with count = 1 still returns new string' do
<add> name = "Kuldeep"
<add> assert_not_same name.pluralize(1), name
<add> end
<add>
<ide> def test_singularize
<ide> SingularToPlural.each do |singular, plural|
<ide> assert_equal(singular, plural.singularize) | 2 |
Mixed | Java | remove ios platform check for running devtools | 22fbb6d46d4b91bfe08b83aaba46ad92a32bf984 | <ide><path>Libraries/JavaScriptAppEngine/Initialization/InitializeJavaScriptAppEngine.js
<ide> function setUpCollections(): void {
<ide> function setUpDevTools(): void {
<ide> if (__DEV__) {
<ide> // not when debugging in chrome
<del> if (!window.document && require('Platform').OS === 'ios') {
<add> if (!window.document) {
<ide> const setupDevtools = require('setupDevtools');
<ide> setupDevtools();
<ide> }
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/testing/FakeWebSocketModule.java
<add>/**
<add> * Copyright (c) 2014-present, Facebook, Inc.
<add> * All rights reserved.
<add> * This source code is licensed under the BSD-style license found in the
<add> * LICENSE file in the root directory of this source tree. An additional grant
<add> * of patent rights can be found in the PATENTS file in the same directory.
<add> */
<add>
<add>package com.facebook.react.testing;
<add>
<add>import javax.annotation.Nullable;
<add>
<add>import com.facebook.react.bridge.BaseJavaModule;
<add>import com.facebook.react.bridge.ReactMethod;
<add>import com.facebook.react.bridge.ReadableArray;
<add>import com.facebook.react.bridge.ReadableMap;
<add>
<add>/**
<add> * Dummy implementation of storage module, used for testing
<add> */
<add>public final class FakeWebSocketModule extends BaseJavaModule {
<add>
<add> @Override
<add> public String getName() {
<add> return "WebSocketModule";
<add> }
<add>
<add> @Override
<add> public boolean canOverrideExistingModule() {
<add> return true;
<add> }
<add>
<add> @ReactMethod
<add> public void connect(
<add> final String url,
<add> @Nullable final ReadableArray protocols,
<add> @Nullable final ReadableMap headers,
<add> final int id) {
<add> }
<add>
<add> @ReactMethod
<add> public void close(int code, String reason, int id) {
<add> }
<add>
<add> @ReactMethod
<add> public void send(String message, int id) {
<add> }
<add>
<add> @ReactMethod
<add> public void sendBinary(String base64String, int id) {
<add> }
<add>}
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/testing/ReactInstanceSpecForTest.java
<ide> package com.facebook.react.testing;
<ide>
<ide> import java.util.ArrayList;
<add>import java.util.Arrays;
<ide> import java.util.List;
<ide>
<ide> import android.annotation.SuppressLint;
<ide> @SuppressLint("JavatestsIncorrectFolder")
<ide> public class ReactInstanceSpecForTest {
<ide>
<del> private final List<NativeModule> mNativeModules = new ArrayList<>();
<add> private final List<NativeModule> mNativeModules =
<add> new ArrayList<NativeModule>(Arrays.asList(new FakeWebSocketModule()));
<ide> private final List<Class<? extends JavaScriptModule>> mJSModuleSpecs = new ArrayList<>();
<ide> private final List<ViewManager> mViewManagers = new ArrayList<>();
<ide> private ReactPackage mReactPackage = null;
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/tests/CatalystNativeJSToJavaParametersTestCase.java
<ide> import com.facebook.react.bridge.WritableMap;
<ide> import com.facebook.react.bridge.WritableNativeMap;
<ide> import com.facebook.react.modules.systeminfo.AndroidInfoModule;
<add>import com.facebook.react.testing.FakeWebSocketModule;
<ide> import com.facebook.react.testing.ReactIntegrationTestCase;
<ide> import com.facebook.react.testing.ReactTestHelper;
<ide> import com.facebook.react.uimanager.UIImplementation;
<ide> import com.facebook.react.uimanager.UIManagerModule;
<ide> import com.facebook.react.uimanager.ViewManager;
<ide> import com.facebook.react.views.view.ReactViewManager;
<ide>
<del>import org.junit.Ignore;
<del>
<ide> /**
<ide> * Integration test to verify passing various types of parameters from JS to Java works
<ide> */
<ide> private interface TestJSToJavaParametersModule extends JavaScriptModule {
<ide> @Override
<ide> protected void setUp() throws Exception {
<ide> super.setUp();
<del>
<add>
<ide> List<ViewManager> viewManagers = Arrays.<ViewManager>asList(
<ide> new ReactViewManager());
<ide> final UIManagerModule mUIManager = new UIManagerModule(
<ide> public void run() {
<ide> mCatalystInstance = ReactTestHelper.catalystInstanceBuilder(this)
<ide> .addNativeModule(mRecordingTestModule)
<ide> .addNativeModule(new AndroidInfoModule())
<add> .addNativeModule(new FakeWebSocketModule())
<ide> .addNativeModule(mUIManager)
<ide> .addJSModule(TestJSToJavaParametersModule.class)
<ide> .build();
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/tests/CatalystNativeJavaToJSArgumentsTestCase.java
<ide> import com.facebook.react.bridge.WritableNativeMap;
<ide> import com.facebook.react.bridge.UiThreadUtil;
<ide> import com.facebook.react.testing.AssertModule;
<add>import com.facebook.react.testing.FakeWebSocketModule;
<ide> import com.facebook.react.testing.ReactIntegrationTestCase;
<ide> import com.facebook.react.testing.ReactTestHelper;
<ide> import com.facebook.react.uimanager.UIImplementation;
<ide> public void run() {
<ide>
<ide> mInstance = ReactTestHelper.catalystInstanceBuilder(this)
<ide> .addNativeModule(mAssertModule)
<add> .addNativeModule(new FakeWebSocketModule())
<ide> .addJSModule(TestJavaToJSArgumentsModule.class)
<ide> .addNativeModule(mUIManager)
<ide> .build();
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/tests/CatalystUIManagerTestCase.java
<ide> import com.facebook.react.views.text.ReactRawTextManager;
<ide> import com.facebook.react.views.text.ReactTextViewManager;
<ide> import com.facebook.react.views.view.ReactViewManager;
<add>import com.facebook.react.testing.FakeWebSocketModule;
<ide> import com.facebook.react.testing.ReactIntegrationTestCase;
<ide> import com.facebook.react.testing.ReactTestHelper;
<ide>
<ide> public void run() {
<ide> jsModule = ReactTestHelper.catalystInstanceBuilder(this)
<ide> .addNativeModule(uiManager)
<ide> .addNativeModule(new AndroidInfoModule())
<add> .addNativeModule(new FakeWebSocketModule())
<ide> .addJSModule(UIManagerTestModule.class)
<ide> .build()
<ide> .getJSModule(UIManagerTestModule.class);
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/tests/InitialPropsTestCase.java
<ide> import android.test.ActivityInstrumentationTestCase2;
<ide>
<ide> import com.facebook.react.bridge.BaseJavaModule;
<add>import com.facebook.react.testing.FakeWebSocketModule;
<ide> import com.facebook.react.testing.ReactInstanceSpecForTest;
<ide> import com.facebook.react.bridge.ReactMethod;
<ide> import com.facebook.react.bridge.ReadableArray;
<ide> public void testInitialProps() throws Throwable {
<ide> @Override
<ide> public void run() {
<ide> ReactInstanceSpecForTest catalystInstanceSpec = new ReactInstanceSpecForTest();
<add> catalystInstanceSpec.addNativeModule(new FakeWebSocketModule());
<ide> catalystInstanceSpec.addNativeModule(mRecordingModule);
<ide> Bundle props = new Bundle();
<ide> props.putString("key1", "string");
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/tests/JSLocaleTest.java
<ide> import java.util.Arrays;
<ide> import java.util.List;
<ide>
<add>import com.facebook.react.testing.FakeWebSocketModule;
<ide> import com.facebook.react.testing.ReactIntegrationTestCase;
<ide> import com.facebook.react.testing.ReactTestHelper;
<ide> import com.facebook.react.testing.StringRecordingModule;
<ide> public void run() {
<ide> mInstance = ReactTestHelper.catalystInstanceBuilder(this)
<ide> .addNativeModule(mStringRecordingModule)
<ide> .addNativeModule(mUIManager)
<add> .addNativeModule(new FakeWebSocketModule())
<ide> .addJSModule(TestJSLocaleModule.class)
<ide> .build();
<del>
<ide> }
<ide>
<ide> public void testToUpper() {
<ide> public void testToLower() {
<ide> assertEquals("γαζίες καὶ μυρτιὲς δὲν θὰ βρῶ πιὰ στὸ χρυσαφὶ ξέφωτο", answers[3]);
<ide> assertEquals("chinese: 幓 厏吪吙 鈊釿閍 碞碠粻 曮禷", answers[4]);
<ide> }
<del>
<del>
<ide> }
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/tests/ProgressBarTestCase.java
<ide> import com.facebook.react.uimanager.ViewManager;
<ide> import com.facebook.react.views.progressbar.ReactProgressBarViewManager;
<ide> import com.facebook.react.views.view.ReactViewManager;
<add>import com.facebook.react.testing.FakeWebSocketModule;
<ide> import com.facebook.react.testing.ReactIntegrationTestCase;
<ide> import com.facebook.react.testing.ReactTestHelper;
<ide>
<ide> public void run() {
<ide> mInstance = ReactTestHelper.catalystInstanceBuilder(this)
<ide> .addNativeModule(mUIManager)
<ide> .addNativeModule(new AndroidInfoModule())
<add> .addNativeModule(new FakeWebSocketModule())
<ide> .addJSModule(ProgressBarTestModule.class)
<ide> .build();
<ide>
<ide><path>ReactAndroid/src/androidTest/java/com/facebook/react/tests/ViewRenderingTestCase.java
<ide> import com.facebook.react.uimanager.ViewManager;
<ide> import com.facebook.react.views.view.ReactViewGroup;
<ide> import com.facebook.react.views.view.ReactViewManager;
<add>import com.facebook.react.testing.FakeWebSocketModule;
<ide> import com.facebook.react.testing.ReactIntegrationTestCase;
<ide> import com.facebook.react.testing.ReactTestHelper;
<ide>
<ide> public void run() {
<ide> mCatalystInstance = ReactTestHelper.catalystInstanceBuilder(this)
<ide> .addNativeModule(uiManager)
<ide> .addNativeModule(new AndroidInfoModule())
<add> .addNativeModule(new FakeWebSocketModule())
<ide> .addJSModule(ViewRenderingTestModule.class)
<ide> .build();
<ide> | 10 |
Javascript | Javascript | fix license headers on swipablerow | 09fe99972d35eded7e739c9f7af1691395a8eb35 | <ide><path>Libraries/Experimental/SwipeableRow/SwipeableListView.js
<ide> /**
<del> * Copyright (c) 2013-present, Facebook, Inc.
<add> * Copyright (c) 2015-present, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> *
<del> * The examples provided by Facebook are for non-commercial testing and
<del> * evaluation purposes only.
<del> *
<del> * Facebook reserves all rights not expressly granted.
<del> *
<del> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<del> * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<del> * FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
<del> * FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
<del> * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
<del> * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
<del> *
<ide> * @providesModule SwipeableListView
<ide> * @flow
<ide> */
<ide><path>Libraries/Experimental/SwipeableRow/SwipeableListViewDataSource.js
<ide> /**
<del> * Copyright (c) 2013-present, Facebook, Inc.
<add> * Copyright (c) 2015-present, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> *
<del> * The examples provided by Facebook are for non-commercial testing and
<del> * evaluation purposes only.
<del> *
<del> * Facebook reserves all rights not expressly granted.
<del> *
<del> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<del> * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<del> * FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
<del> * FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
<del> * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
<del> * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
<del> *
<ide> * @providesModule SwipeableListViewDataSource
<ide> */
<ide> 'use strict';
<ide><path>Libraries/Experimental/SwipeableRow/SwipeableQuickActionButton.js
<ide> /**
<del> * Copyright (c) 2013-present, Facebook, Inc.
<add> * Copyright (c) 2015-present, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> *
<del> * The examples provided by Facebook are for non-commercial testing and
<del> * evaluation purposes only.
<del> *
<del> * Facebook reserves all rights not expressly granted.
<del> *
<del> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<del> * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<del> * FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
<del> * FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
<del> * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
<del> * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
<del> *
<ide> * @providesModule SwipeableQuickActionButton
<ide> * @flow
<ide> */
<ide><path>Libraries/Experimental/SwipeableRow/SwipeableQuickActions.js
<ide> /**
<del> * Copyright (c) 2013-present, Facebook, Inc.
<add> * Copyright (c) 2015-present, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> *
<del> * The examples provided by Facebook are for non-commercial testing and
<del> * evaluation purposes only.
<del> *
<del> * Facebook reserves all rights not expressly granted.
<del> *
<del> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<del> * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<del> * FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
<del> * FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
<del> * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
<del> * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
<del> *
<ide> * @providesModule SwipeableQuickActions
<ide> * @flow
<ide> */
<ide><path>Libraries/Experimental/SwipeableRow/SwipeableRow.js
<ide> /**
<del> * Copyright (c) 2013-present, Facebook, Inc.
<add> * Copyright (c) 2015-present, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> *
<del> * The examples provided by Facebook are for non-commercial testing and
<del> * evaluation purposes only.
<del> *
<del> * Facebook reserves all rights not expressly granted.
<del> *
<del> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<del> * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<del> * FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
<del> * FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
<del> * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
<del> * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
<del> *
<ide> * @providesModule SwipeableRow
<ide> * @flow
<ide> */ | 5 |
Java | Java | eliminate duplication of list in tolist | 1913d4def31b489b421bfbb700a8ee23da147ee7 | <ide><path>rxjava-core/src/main/java/rx/internal/operators/OperatorToObservableList.java
<ide> import rx.Subscriber;
<ide>
<ide> import java.util.ArrayList;
<add>import java.util.Collections;
<ide> import java.util.LinkedList;
<ide> import java.util.List;
<ide>
<ide> public Subscriber<? super T> call(final Subscriber<? super List<T>> o) {
<ide> return new Subscriber<T>(o) {
<ide>
<add> private boolean completed = false;
<ide> final List<T> list = new LinkedList<T>();
<ide>
<ide> @Override
<ide> public void onStart() {
<ide> @Override
<ide> public void onCompleted() {
<ide> try {
<del> o.onNext(new ArrayList<T>(list));
<add> completed = true;
<add> o.onNext(Collections.unmodifiableList(list));
<ide> o.onCompleted();
<ide> } catch (Throwable e) {
<ide> onError(e);
<ide> public void onError(Throwable e) {
<ide>
<ide> @Override
<ide> public void onNext(T value) {
<del> list.add(value);
<add> if (!completed) {
<add> list.add(value);
<add> }
<ide> }
<ide>
<ide> }; | 1 |
Python | Python | remove duplicate word from docstring | cf425403c8ff24623812a46272dc91f712ed5086 | <ide><path>flask/app.py
<ide> class Flask(_PackageBoundObject):
<ide> at `static_url_path`. Defaults to the ``'static'``
<ide> folder in the root path of the application.
<ide> :param host_matching: sets the app's ``url_map.host_matching`` to the given
<del> given value. Defaults to False.
<add> value. Defaults to False.
<ide> :param static_host: the host to use when adding the static route. Defaults
<ide> to None. Required when using ``host_matching=True``
<ide> with a ``static_folder`` configured. | 1 |
Ruby | Ruby | modify autoremove to use uninstall | 10ae9bcde728148ffed445f976cb06d3cf08e65d | <ide><path>Library/Homebrew/cmd/autoremove.rb
<ide>
<ide> require "formula"
<ide> require "cli/parser"
<add>require "uninstall"
<ide>
<ide> module Homebrew
<add> extend Uninstall
<add>
<ide> module_function
<ide>
<ide> def autoremove_args
<ide> def autoremove_args
<ide> end
<ide> end
<ide>
<del> def get_removable_formulae(installed_formulae)
<del> removable_formulae = []
<del>
<del> installed_formulae.each do |formula|
<del> # Reject formulae installed on request.
<del> next if Tab.for_keg(formula.any_installed_keg).installed_on_request
<del> # Reject formulae which are needed at runtime by other formulae.
<del> next if installed_formulae.flat_map(&:runtime_formula_dependencies).include?(formula)
<add> def get_removable_formulae(formulae)
<add> removable_formulae = Formula.installed_non_deps(formulae).reject {
<add> |f| Tab.for_keg(f.any_installed_keg).installed_on_request
<add> }
<ide>
<del> removable_formulae << installed_formulae.delete(formula)
<del> removable_formulae += get_removable_formulae(installed_formulae)
<add> if removable_formulae.any?
<add> removable_formulae += get_removable_formulae(formulae - removable_formulae)
<ide> end
<ide>
<ide> removable_formulae
<ide> def get_removable_formulae(installed_formulae)
<ide> def autoremove
<ide> args = autoremove_args.parse
<ide>
<del> removable_formulae = get_removable_formulae(Formula.installed.sort)
<add> removable_formulae = get_removable_formulae(Formula.installed)
<ide>
<ide> return if removable_formulae.blank?
<ide>
<del> formulae_names = removable_formulae.map(&:full_name)
<add> formulae_names = removable_formulae.map(&:full_name).sort
<ide>
<ide> oh1 "Formulae that could be removed"
<ide> puts formulae_names
<del>
<add>
<ide> return if args.dry_run?
<ide>
<del> system HOMEBREW_BREW_FILE, "rm", *formulae_names
<add> kegs_by_rack = removable_formulae.map(&:any_installed_keg).group_by(&:rack)
<add> uninstall_kegs(kegs_by_rack)
<ide> end
<ide> end | 1 |
Text | Text | add note about no-optional to swc error doc | 59712b16c7174e60dde17dd0878f764eba59cd26 | <ide><path>errors/failed-loading-swc.md
<ide> SWC requires a binary be downloaded that is compatible specific to your system.
<ide>
<ide> #### Possible Ways to Fix It
<ide>
<add>You might need to allow optional packages to be installed by your package manager (remove `--no-optional` flag) for the package to download correctly.
<add>
<ide> If SWC continues to fail to load you can opt-out by disabling `swcMinify` in your `next.config.js` or by adding a `.babelrc` to your project with the following content:
<ide>
<ide> ```json | 1 |
Python | Python | fix typo and use password instead of passphrase | 7d827aa4d7550b9d261080fa355c4f049dd303f9 | <ide><path>libcloud/compute/ssh.py
<ide> def connect(self):
<ide> if self.key_material:
<ide> conninfo['pkey'] = self._get_pkey_object(
<ide> key=self.key_material,
<del> passpharse=self.password)
<add> password=self.password)
<ide>
<ide> if not self.password and not (self.key_files or self.key_material):
<ide> conninfo['allow_agent'] = True
<ide> def connect(self):
<ide>
<ide> try:
<ide> pkey = self._get_pkey_object(key=key_material,
<del> passpharse=self.password)
<add> password=self.password)
<ide> except paramiko.ssh_exception.PasswordRequiredException as e:
<ide> raise e
<ide> except Exception:
<ide> def _consume_data_from_channel(self, chan, recv_method, recv_ready_method):
<ide> result.write(result_bytes.decode('utf-8'))
<ide> return result
<ide>
<del> def _get_pkey_object(self, key, passpharse=None):
<add> def _get_pkey_object(self, key, password=None):
<ide> """
<ide> Try to detect private key type and return paramiko.PKey object.
<ide>
<ide> def _get_pkey_object(self, key, passpharse=None):
<ide> key_value = key
<ide>
<ide> try:
<del> key = cls.from_private_key(StringIO(key_value), passpharse)
<add> key = cls.from_private_key(StringIO(key_value), password)
<ide> except paramiko.ssh_exception.PasswordRequiredException as e:
<ide> raise e
<ide> except (paramiko.ssh_exception.SSHException, AssertionError) as e:
<ide> if 'private key file checkints do not match' in str(e).lower():
<del> msg = ('Invalid passpharse provided for encrypted key. '
<add> msg = ('Invalid password provided for encrypted key. '
<ide> 'Original error: %s' % (str(e)))
<ide> # Indicates invalid password for password protected keys
<ide> raise paramiko.ssh_exception.SSHException(msg) | 1 |
Javascript | Javascript | add color support for more terminals | ceaeee012066e7e56332ba9d9f9306401c971560 | <ide><path>lib/internal/tty.js
<ide> const COLORS_16 = 4;
<ide> const COLORS_256 = 8;
<ide> const COLORS_16m = 24;
<ide>
<add>// Some entries were taken from `dircolors`
<add>// (https://linux.die.net/man/1/dircolors). The corresponding terminals might
<add>// support more than 16 colors, but this was not tested for.
<add>//
<add>// Copyright (C) 1996-2016 Free Software Foundation, Inc. Copying and
<add>// distribution of this file, with or without modification, are permitted
<add>// provided the copyright notice and this notice are preserved.
<add>const TERM_ENVS = [
<add> 'Eterm',
<add> 'cons25',
<add> 'console',
<add> 'cygwin',
<add> 'dtterm',
<add> 'gnome',
<add> 'hurd',
<add> 'jfbterm',
<add> 'konsole',
<add> 'kterm',
<add> 'mlterm',
<add> 'putty',
<add> 'st',
<add> 'terminator'
<add>];
<add>
<add>const TERM_ENVS_REG_EXP = [
<add> /ansi/,
<add> /color/,
<add> /linux/,
<add> /^con[0-9]*x[0-9]/,
<add> /^rxvt/,
<add> /^screen/,
<add> /^xterm/,
<add> /^vt100/
<add>];
<add>
<ide> // The `getColorDepth` API got inspired by multiple sources such as
<ide> // https://github.com/chalk/supports-color,
<ide> // https://github.com/isaacs/color-support.
<ide> function getColorDepth(env = process.env) {
<ide> if (env.TERM) {
<ide> if (/^xterm-256/.test(env.TERM))
<ide> return COLORS_256;
<del> if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(env.TERM))
<del> return COLORS_16;
<add>
<add> const termEnv = env.TERM.toLowerCase();
<add>
<add> for (const term of TERM_ENVS) {
<add> if (termEnv === term) {
<add> return COLORS_16;
<add> }
<add> }
<add> for (const term of TERM_ENVS_REG_EXP) {
<add> if (term.test(termEnv)) {
<add> return COLORS_16;
<add> }
<add> }
<ide> }
<ide>
<ide> if (env.COLORTERM) | 1 |
Python | Python | drop redundant comment | 27590fea8b34f95fc4419524db18cc5053699bee | <ide><path>djangorestframework/status.py
<ide> Also see django.core.handlers.wsgi.STATUS_CODE_TEXT
<ide> """
<ide>
<del># Verbose format
<ide> HTTP_100_CONTINUE = 100
<ide> HTTP_101_SWITCHING_PROTOCOLS = 101
<ide> HTTP_200_OK = 200 | 1 |
Text | Text | fix typo in url fragment | 441ecc459f0d08a930ee2d6f4502f24e2217e1fc | <ide><path>docs/reference/builder.md
<ide> instruction must be \`FROM\`** in order to specify the [*Base
<ide> Image*](glossary.md#base-image) from which you are building.
<ide>
<ide> Docker treats lines that *begin* with `#` as a comment, unless the line is
<del>a valid [parser directive](builder.md#parser directives). A `#` marker anywhere
<add>a valid [parser directive](builder.md#parser-directives). A `#` marker anywhere
<ide> else in a line is treated as an argument. This allows statements like:
<ide>
<ide> ```Dockerfile | 1 |
Python | Python | improve english tag map. re #593, | 7503e1e505dc70c93713d8848df3cbe1d5a6f44c | <ide><path>spacy/lang/en/tag_map.py
<ide> ":": {POS: PUNCT},
<ide> "$": {POS: SYM, "Other": {"SymType": "currency"}},
<ide> "#": {POS: SYM, "Other": {"SymType": "numbersign"}},
<del> "AFX": {POS: ADJ, "Hyph": "yes"},
<add> "AFX": {POS: X, "Hyph": "yes"},
<ide> "CC": {POS: CCONJ, "ConjType": "coor"},
<ide> "CD": {POS: NUM, "NumType": "card"},
<ide> "DT": {POS: DET},
<ide> "NNP": {POS: PROPN, "NounType": "prop", "Number": "sing"},
<ide> "NNPS": {POS: PROPN, "NounType": "prop", "Number": "plur"},
<ide> "NNS": {POS: NOUN, "Number": "plur"},
<del> "PDT": {POS: ADJ, "AdjType": "pdt", "PronType": "prn"},
<add> "PDT": {POS: DET, "AdjType": "pdt", "PronType": "prn"},
<ide> "POS": {POS: PART, "Poss": "yes"},
<del> "PRP": {POS: PRON, "PronType": "prs"},
<del> "PRP$": {POS: ADJ, "PronType": "prs", "Poss": "yes"},
<add> "PRP": {POS: DET, "PronType": "prs"},
<add> "PRP$": {POS: DET, "PronType": "prs", "Poss": "yes"},
<ide> "RB": {POS: ADV, "Degree": "pos"},
<ide> "RBR": {POS: ADV, "Degree": "comp"},
<ide> "RBS": {POS: ADV, "Degree": "sup"},
<ide> "Number": "sing",
<ide> "Person": 3,
<ide> },
<del> "WDT": {POS: ADJ, "PronType": "int|rel"},
<del> "WP": {POS: NOUN, "PronType": "int|rel"},
<del> "WP$": {POS: ADJ, "Poss": "yes", "PronType": "int|rel"},
<add> "WDT": {POS: DET, "PronType": "int|rel"},
<add> "WP": {POS: PRON, "PronType": "int|rel"},
<add> "WP$": {POS: DET, "Poss": "yes", "PronType": "int|rel"},
<ide> "WRB": {POS: ADV, "PronType": "int|rel"},
<ide> "ADD": {POS: X},
<ide> "NFP": {POS: PUNCT}, | 1 |
Javascript | Javascript | improve display of filenames in component stack | 54d86eb8227255b207c8a82619d374738b542ca8 | <ide><path>packages/shared/__tests__/describeComponentFrame-test.js
<add>/**
<add> * Copyright (c) 2016-present, Facebook, Inc.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @emails react-core
<add> */
<add>
<add>'use strict';
<add>
<add>let React;
<add>let ReactDOM;
<add>
<add>describe('Component stack trace displaying', () => {
<add> beforeEach(() => {
<add> React = require('react');
<add> ReactDOM = require('react-dom');
<add> });
<add>
<add> it('should provide filenames in stack traces', () => {
<add> class Component extends React.Component {
<add> render() {
<add> return [<span>a</span>, <span>b</span>];
<add> }
<add> }
<add>
<add> spyOnDev(console, 'error');
<add> const container = document.createElement('div');
<add> const fileNames = {
<add> '': '',
<add> '/': '',
<add> '\\': '',
<add> Foo: 'Foo',
<add> 'Bar/Foo': 'Foo',
<add> 'Bar\\Foo': 'Foo',
<add> 'Baz/Bar/Foo': 'Foo',
<add> 'Baz\\Bar\\Foo': 'Foo',
<add>
<add> 'Foo.js': 'Foo.js',
<add> 'Foo.jsx': 'Foo.jsx',
<add> '/Foo.js': 'Foo.js',
<add> '/Foo.jsx': 'Foo.jsx',
<add> '\\Foo.js': 'Foo.js',
<add> '\\Foo.jsx': 'Foo.jsx',
<add> 'Bar/Foo.js': 'Foo.js',
<add> 'Bar/Foo.jsx': 'Foo.jsx',
<add> 'Bar\\Foo.js': 'Foo.js',
<add> 'Bar\\Foo.jsx': 'Foo.jsx',
<add> '/Bar/Foo.js': 'Foo.js',
<add> '/Bar/Foo.jsx': 'Foo.jsx',
<add> '\\Bar\\Foo.js': 'Foo.js',
<add> '\\Bar\\Foo.jsx': 'Foo.jsx',
<add> 'Bar/Baz/Foo.js': 'Foo.js',
<add> 'Bar/Baz/Foo.jsx': 'Foo.jsx',
<add> 'Bar\\Baz\\Foo.js': 'Foo.js',
<add> 'Bar\\Baz\\Foo.jsx': 'Foo.jsx',
<add> '/Bar/Baz/Foo.js': 'Foo.js',
<add> '/Bar/Baz/Foo.jsx': 'Foo.jsx',
<add> '\\Bar\\Baz\\Foo.js': 'Foo.js',
<add> '\\Bar\\Baz\\Foo.jsx': 'Foo.jsx',
<add> 'C:\\funny long (path)/Foo.js': 'Foo.js',
<add> 'C:\\funny long (path)/Foo.jsx': 'Foo.jsx',
<add>
<add> 'index.js': 'index.js',
<add> 'index.jsx': 'index.jsx',
<add> '/index.js': 'index.js',
<add> '/index.jsx': 'index.jsx',
<add> '\\index.js': 'index.js',
<add> '\\index.jsx': 'index.jsx',
<add> 'Bar/index.js': 'Bar/index.js',
<add> 'Bar/index.jsx': 'Bar/index.jsx',
<add> 'Bar\\index.js': 'Bar/index.js',
<add> 'Bar\\index.jsx': 'Bar/index.jsx',
<add> '/Bar/index.js': 'Bar/index.js',
<add> '/Bar/index.jsx': 'Bar/index.jsx',
<add> '\\Bar\\index.js': 'Bar/index.js',
<add> '\\Bar\\index.jsx': 'Bar/index.jsx',
<add> 'Bar/Baz/index.js': 'Baz/index.js',
<add> 'Bar/Baz/index.jsx': 'Baz/index.jsx',
<add> 'Bar\\Baz\\index.js': 'Baz/index.js',
<add> 'Bar\\Baz\\index.jsx': 'Baz/index.jsx',
<add> '/Bar/Baz/index.js': 'Baz/index.js',
<add> '/Bar/Baz/index.jsx': 'Baz/index.jsx',
<add> '\\Bar\\Baz\\index.js': 'Baz/index.js',
<add> '\\Bar\\Baz\\index.jsx': 'Baz/index.jsx',
<add> 'C:\\funny long (path)/index.js': 'funny long (path)/index.js',
<add> 'C:\\funny long (path)/index.jsx': 'funny long (path)/index.jsx',
<add> };
<add> Object.keys(fileNames).forEach((fileName, i) => {
<add> ReactDOM.render(
<add> <Component __source={{fileName, lineNumber: i}} />,
<add> container,
<add> );
<add> });
<add> if (__DEV__) {
<add> let i = 0;
<add> expect(console.error.calls.count()).toBe(Object.keys(fileNames).length);
<add> for (let fileName in fileNames) {
<add> if (!fileNames.hasOwnProperty(fileName)) {
<add> continue;
<add> }
<add> const args = console.error.calls.argsFor(i);
<add> const stack = args[args.length - 1];
<add> const expected = fileNames[fileName];
<add> expect(stack).toContain(`at ${expected}:`);
<add> i++;
<add> }
<add> }
<add> });
<add>});
<ide><path>packages/shared/describeComponentFrame.js
<ide> * @flow
<ide> */
<ide>
<add>const BEFORE_SLASH_RE = /^(.*)[\\\/]/;
<add>
<ide> export default function(
<ide> name: null | string,
<ide> source: any,
<ide> ownerName: null | string,
<ide> ) {
<del> return (
<del> '\n in ' +
<del> (name || 'Unknown') +
<del> (source
<del> ? ' (at ' +
<del> source.fileName.replace(/^.*[\\\/]/, '') +
<del> ':' +
<del> source.lineNumber +
<del> ')'
<del> : ownerName
<del> ? ' (created by ' + ownerName + ')'
<del> : '')
<del> );
<add> let sourceInfo = '';
<add> if (source) {
<add> let path = source.fileName;
<add> let fileName = path.replace(BEFORE_SLASH_RE, '');
<add> if (/^index\./.test(fileName)) {
<add> // Special case: include closest folder name for `index.*` filenames.
<add> const match = path.match(BEFORE_SLASH_RE);
<add> if (match) {
<add> const pathBeforeSlash = match[1];
<add> if (pathBeforeSlash) {
<add> const folderName = pathBeforeSlash.replace(BEFORE_SLASH_RE, '');
<add> fileName = folderName + '/' + fileName;
<add> }
<add> }
<add> }
<add> sourceInfo = ' (at ' + fileName + ':' + source.lineNumber + ')';
<add> } else if (ownerName) {
<add> sourceInfo = ' (created by ' + ownerName + ')';
<add> }
<add> return '\n in ' + (name || 'Unknown') + sourceInfo;
<ide> } | 2 |
PHP | PHP | add tests for rebinding event | d1175f90de7f541468c90661dc363b424fd7decd | <ide><path>tests/Container/ContainerTest.php
<ide> public function testUnsetRemoveBoundInstances()
<ide> $this->assertFalse($container->bound('object'));
<ide> }
<ide>
<add>
<add> public function testReboundListeners()
<add> {
<add> unset($_SERVER['__test.rebind']);
<add>
<add> $container = new Container;
<add> $container->bind('foo', function() {});
<add> $container->rebinding('foo', function() { $_SERVER['__test.rebind'] = true; });
<add> $container->bind('foo', function() {});
<add>
<add> $this->assertTrue($_SERVER['__test.rebind']);
<add> }
<add>
<add>
<add> public function testReboundListenersOnInstances()
<add> {
<add> unset($_SERVER['__test.rebind']);
<add>
<add> $container = new Container;
<add> $container->instance('foo', function() {});
<add> $container->rebinding('foo', function() { $_SERVER['__test.rebind'] = true; });
<add> $container->instance('foo', function() {});
<add>
<add> $this->assertTrue($_SERVER['__test.rebind']);
<add> }
<add>
<ide> }
<ide>
<ide> class ContainerConcreteStub {} | 1 |
Ruby | Ruby | add explicit anaconda warning | dafa11af9a0b346d12fb9e26adb21d8cff0d5a16 | <ide><path>Library/Homebrew/cmd/doctor.rb
<ide> def check_for_macgpg2
<ide> end
<ide> end
<ide>
<add> # Anaconda installs multiple system & brew dupes, including OpenSSL, Python,
<add> # sqlite, libpng, Qt, etc. Regularly breaks compile on Vim, MacVim and others.
<add> # Is flagged as part of the *-config script checks below, but people seem
<add> # to ignore those as warnings rather than extremely likely breakage.
<add> def check_for_anaconda
<add> return unless which("anaconda")
<add> return unless which("python")
<add>
<add> anaconda = which("anaconda").realpath.dirname
<add> python_binary = Utils.popen_read which("python"), "-c", "import sys; sys.stdout.write(sys.executable)"
<add> python = Pathname.new(python_binary).realpath.dirname
<add>
<add> # Only warn if Python lives with Anaconda, since is most problematic case.
<add> if python == anaconda then <<-EOS.undent
<add> Anaconda is known to frequently break Homebrew builds, including Vim and
<add> MacVim, due to bundling many duplicates of system and Homebrew-available
<add> tools.
<add>
<add> If you encounter a build failure please temporarily remove Anaconda
<add> from your $PATH and attempt the build again prior to reporting the
<add> failure to us. Thanks!
<add> EOS
<add> end
<add> end
<add>
<ide> def __check_stray_files(dir, pattern, white_list, message)
<ide> return unless File.directory?(dir)
<ide> | 1 |
Ruby | Ruby | add more tests to some key points in railties | e7418ab63cc4aa024367851f2f692032840cfe76 | <ide><path>railties/lib/rails/application.rb
<ide> def inherited(base)
<ide> Rails.application = base.instance
<ide> end
<ide>
<add> def respond_to?(*args)
<add> super || instance.respond_to?(*args)
<add> end
<add>
<ide> protected
<ide>
<ide> def method_missing(*args, &block)
<ide><path>railties/test/application/configuration_test.rb
<ide> def app
<ide> end
<ide>
<ide> def setup
<del> FileUtils.rm_rf(new_app) if File.directory?(new_app)
<ide> build_app
<ide> boot_rails
<ide> FileUtils.rm_rf("#{app_path}/config/environments")
<ide> end
<ide>
<add> def teardown
<add> FileUtils.rm_rf(new_app) if File.directory?(new_app)
<add> end
<add>
<ide> test "Rails::Application.instance is nil until app is initialized" do
<ide> require 'rails'
<ide> assert_nil Rails::Application.instance
<ide> require "#{app_path}/config/environment"
<ide> assert_equal AppTemplate::Application.instance, Rails::Application.instance
<ide> end
<ide>
<add> test "Rails::Application responds to all instance methods" do
<add> require "#{app_path}/config/environment"
<add> assert_respond_to Rails::Application, :routes_reloader
<add> assert_equal Rails::Application.routes_reloader, Rails.application.routes_reloader
<add> end
<add>
<ide> test "the application root is set correctly" do
<ide> require "#{app_path}/config/environment"
<ide> assert_equal Pathname.new(app_path), Rails.application.root
<ide><path>railties/test/application/initializers/frameworks_test.rb
<ide> def setup
<ide> assert_equal expects, middleware & expects
<ide> end
<ide>
<add> test "active_record extensions are applied to ActiveRecord" do
<add> add_to_config "config.active_record.table_name_prefix = 'tbl_'"
<add> require "#{app_path}/config/environment"
<add> assert_equal 'tbl_', ActiveRecord::Base.table_name_prefix
<add> end
<add>
<ide> test "database middleware doesn't initialize when activerecord is not in frameworks" do
<ide> use_frameworks []
<ide> require "#{app_path}/config/environment"
<ide><path>railties/test/application/initializers/load_path_test.rb
<ide> def setup
<ide> FileUtils.rm_rf "#{app_path}/config/environments"
<ide> end
<ide>
<del> # General
<ide> test "initializing an application adds the application paths to the load path" do
<ide> add_to_config <<-RUBY
<ide> config.root = "#{app_path}"
<ide> def setup
<ide> assert $:.include?("#{app_path}/app/models")
<ide> end
<ide>
<add> test "initializing an application eager load any path under app" do
<add> app_file "app/anything/foo.rb", <<-RUBY
<add> module Foo; end
<add> RUBY
<add>
<add> add_to_config <<-RUBY
<add> config.root = "#{app_path}"
<add> RUBY
<add>
<add> require "#{app_path}/config/environment"
<add> assert Foo
<add> end
<add>
<ide> test "eager loading loads parent classes before children" do
<ide> app_file "lib/zoo.rb", <<-ZOO
<ide> class Zoo ; include ReptileHouse ; end
<ide> ZOO
<add>
<ide> app_file "lib/zoo/reptile_house.rb", <<-ZOO
<ide> module Zoo::ReptileHouse ; end
<ide> ZOO
<ide> module Zoo::ReptileHouse ; end
<ide> RUBY
<ide>
<ide> require "#{app_path}/config/environment"
<del>
<ide> assert Zoo
<ide> end
<ide>
<ide><path>railties/test/railties/configuration_test.rb
<del>require "isolation/abstract_unit"
<del>
<del>module RailtiesTest
<del> class ConfigurationTest < Test::Unit::TestCase
<del> def setup
<del> build_app
<del> boot_rails
<del> require "rails/all"
<del> end
<del>
<del> test "config is available to plugins" do
<del> class Foo < Rails::Railtie ; end
<del> assert_nil Foo.config.action_controller.foo
<del> end
<del>
<del> test "a config name is available for the plugin" do
<del> class Foo < Rails::Railtie ; config.foo.greetings = "hello" ; end
<del> assert_equal "hello", Foo.config.foo.greetings
<del> end
<del>
<del> test "railtie configurations are available in the application" do
<del> class Foo < Rails::Railtie ; config.foo.greetings = "hello" ; end
<del> require "#{app_path}/config/application"
<del> assert_equal "hello", AppTemplate::Application.config.foo.greetings
<del> end
<del>
<del> test "railtie config merges are deep" do
<del> class Foo < Rails::Railtie ; config.foo.greetings = 'hello' ; end
<del> class Bar < Rails::Railtie
<del> config.foo.bar = "bar"
<del> end
<del> assert_equal "hello", Bar.config.foo.greetings
<del> assert_equal "bar", Bar.config.foo.bar
<del> end
<del>
<del> test "railtie can add subscribers" do
<del> begin
<del> class Foo < Rails::Railtie; subscriber(Rails::Subscriber.new); end
<del> assert_kind_of Rails::Subscriber, Rails::Subscriber.subscribers[:foo]
<del> ensure
<del> Rails::Subscriber.subscribers.clear
<del> end
<del> end
<del> end
<del>end
<ide><path>railties/test/railties/engine_test.rb
<ide> class Engine < ::Rails::Engine
<ide> def reload_config
<ide> :reload_engines
<ide> end
<add>
<add> test "Rails::Engine itself does not respond to config" do
<add> assert !Rails::Engine.respond_to?(:config)
<add> end
<ide> end
<ide> end
<ide><path>railties/test/railties/framework_extension_test.rb
<del>require "isolation/abstract_unit"
<del>
<del>module RailtiesTest
<del> class FrameworkExtensionTest < Test::Unit::TestCase
<del> include ActiveSupport::Testing::Isolation
<del>
<del> def setup
<del> build_app
<del> boot_rails
<del> FileUtils.rm_rf("#{app_path}/config/environments")
<del> require "rails/all"
<del> end
<del>
<del> test "rake_tasks block is executed when MyApp.load_tasks is called" do
<del> $ran_block = false
<del>
<del> class MyTie < Rails::Railtie
<del> rake_tasks do
<del> $ran_block = true
<del> end
<del> end
<del>
<del> require "#{app_path}/config/environment"
<del>
<del> assert !$ran_block
<del> require 'rake'
<del> require 'rake/testtask'
<del> require 'rake/rdoctask'
<del>
<del> AppTemplate::Application.load_tasks
<del> assert $ran_block
<del> end
<del>
<del> test "generators block is executed when MyApp.load_generators is called" do
<del> $ran_block = false
<del>
<del> class MyTie < Rails::Railtie
<del> generators do
<del> $ran_block = true
<del> end
<del> end
<del>
<del> require "#{app_path}/config/environment"
<del>
<del> assert !$ran_block
<del> AppTemplate::Application.load_generators
<del> assert $ran_block
<del> end
<del>
<del> test "railtie initializer" do
<del> $ran_block = false
<del>
<del> class MyTie < Rails::Railtie
<del> initializer :something_nice do
<del> $ran_block = true
<del> end
<del> end
<del>
<del> assert !$ran_block
<del> require "#{app_path}/config/environment"
<del> assert $ran_block
<del> end
<del> end
<del>
<del> class ActiveRecordExtensionTest < Test::Unit::TestCase
<del> include ActiveSupport::Testing::Isolation
<del>
<del> def setup
<del> build_app
<del> boot_rails
<del> FileUtils.rm_rf("#{app_path}/config/environments")
<del> end
<del>
<del> test "active_record extensions are applied to ActiveRecord" do
<del> add_to_config "config.active_record.table_name_prefix = 'tbl_'"
<del>
<del> require "#{app_path}/config/environment"
<del>
<del> assert_equal 'tbl_', ActiveRecord::Base.table_name_prefix
<del> end
<del> end
<del>end
<ide>\ No newline at end of file
<ide><path>railties/test/railties/plugin_test.rb
<ide> def reload_config
<ide> :reload_plugins
<ide> end
<ide>
<add> test "Rails::Plugin itself does not respond to config" do
<add> assert !Rails::Plugin.respond_to?(:config)
<add> end
<add>
<add> test "cannot inherit from Rails::Plugin" do
<add> boot_rails
<add> assert_raise RuntimeError do
<add> class Foo < Rails::Plugin; end
<add> end
<add> end
<add>
<ide> test "plugin can load the file with the same name in lib" do
<ide> boot_rails
<ide> require "bukkits"
<ide><path>railties/test/railties/railtie_test.rb
<add>require "isolation/abstract_unit"
<add>
<add>module RailtiesTest
<add> class RailtieTest < Test::Unit::TestCase
<add> include ActiveSupport::Testing::Isolation
<add>
<add> def setup
<add> build_app
<add> boot_rails
<add> FileUtils.rm_rf("#{app_path}/config/environments")
<add> require "rails/all"
<add> end
<add>
<add> def app
<add> @app ||= Rails.application
<add> end
<add>
<add> test "Rails::Railtie itself does not respond to config" do
<add> assert !Rails::Railtie.respond_to?(:config)
<add> end
<add>
<add> test "cannot inherit from a railtie" do
<add> class Foo < Rails::Railtie ; end
<add> assert_raise RuntimeError do
<add> class Bar < Foo; end
<add> end
<add> end
<add>
<add> test "config is available to railtie" do
<add> class Foo < Rails::Railtie ; end
<add> assert_nil Foo.config.action_controller.foo
<add> end
<add>
<add> test "config name is available for the railtie" do
<add> class Foo < Rails::Railtie ; config.foo.greetings = "hello" ; end
<add> assert_equal "hello", Foo.config.foo.greetings
<add> end
<add>
<add> test "railtie configurations are available in the application" do
<add> class Foo < Rails::Railtie ; config.foo.greetings = "hello" ; end
<add> require "#{app_path}/config/application"
<add> assert_equal "hello", AppTemplate::Application.config.foo.greetings
<add> end
<add>
<add> test "railtie config merges are deep" do
<add> class Foo < Rails::Railtie ; config.foo.greetings = 'hello' ; end
<add> class Bar < Rails::Railtie
<add> config.foo.bar = "bar"
<add> end
<add> assert_equal "hello", Bar.config.foo.greetings
<add> assert_equal "bar", Bar.config.foo.bar
<add> end
<add>
<add> test "railtie can add subscribers" do
<add> begin
<add> class Foo < Rails::Railtie ; subscriber(Rails::Subscriber.new) ; end
<add> assert_kind_of Rails::Subscriber, Rails::Subscriber.subscribers[:foo]
<add> ensure
<add> Rails::Subscriber.subscribers.clear
<add> end
<add> end
<add>
<add> test "railtie can add to_prepare callbacks" do
<add> $to_prepare = false
<add> class Foo < Rails::Railtie ; config.to_prepare { $to_prepare = true } ; end
<add> assert !$to_prepare
<add> require "#{app_path}/config/environment"
<add> require "rack/test"
<add> extend Rack::Test::Methods
<add> get "/"
<add> assert $to_prepare
<add> end
<add>
<add> test "railtie can add after_initialize callbacks" do
<add> $after_initialize = false
<add> class Foo < Rails::Railtie ; config.after_initialize { $after_initialize = true } ; end
<add> assert !$after_initialize
<add> require "#{app_path}/config/environment"
<add> assert $after_initialize
<add> end
<add>
<add> test "rake_tasks block is executed when MyApp.load_tasks is called" do
<add> $ran_block = false
<add>
<add> class MyTie < Rails::Railtie
<add> rake_tasks do
<add> $ran_block = true
<add> end
<add> end
<add>
<add> require "#{app_path}/config/environment"
<add>
<add> assert !$ran_block
<add> require 'rake'
<add> require 'rake/testtask'
<add> require 'rake/rdoctask'
<add>
<add> AppTemplate::Application.load_tasks
<add> assert $ran_block
<add> end
<add>
<add> test "generators block is executed when MyApp.load_generators is called" do
<add> $ran_block = false
<add>
<add> class MyTie < Rails::Railtie
<add> generators do
<add> $ran_block = true
<add> end
<add> end
<add>
<add> require "#{app_path}/config/environment"
<add>
<add> assert !$ran_block
<add> AppTemplate::Application.load_generators
<add> assert $ran_block
<add> end
<add>
<add> test "railtie can add initializers" do
<add> $ran_block = false
<add>
<add> class MyTie < Rails::Railtie
<add> initializer :something_nice do
<add> $ran_block = true
<add> end
<add> end
<add>
<add> assert !$ran_block
<add> require "#{app_path}/config/environment"
<add> assert $ran_block
<add> end
<add> end
<add>end
<ide><path>railties/test/railties/shared_tests.rb
<ide> def bukkits
<ide> assert_equal "Hello bukkits\n", response[2].body
<ide> end
<ide>
<add> def test_plugin_eager_load_any_path_under_app
<add> @plugin.write "app/anything/foo.rb", <<-RUBY
<add> module Foo; end
<add> RUBY
<add>
<add> boot_rails
<add> assert Foo
<add> end
<add>
<ide> def test_routes_are_added_to_router
<ide> @plugin.write "config/routes.rb", <<-RUBY
<ide> class Sprokkit | 10 |
Javascript | Javascript | update gltfexporter.js | 09a68603cda2e17bf1a60988fa124132ee5f86fa | <ide><path>examples/js/exporters/GLTFExporter.js
<ide>
<ide> if ( material.emissive ) {
<ide>
<del> // emissiveFactor
<add> // note: `emissive` is not scaled by `material.emissiveIntensity` for now to accommodate glTF spec. see #21849.
<ide> const emissive = material.emissive.toArray();
<ide>
<ide> if ( ! equalArray( emissive, [ 0, 0, 0 ] ) ) {
<ide><path>examples/jsm/exporters/GLTFExporter.js
<ide> class GLTFWriter {
<ide>
<ide> if ( material.emissive ) {
<ide>
<del> // emissiveFactor
<add> // note: `emissive` is not scaled by `material.emissiveIntensity` for now to accommodate glTF spec. see #21849.
<ide> const emissive = material.emissive.toArray();
<ide>
<ide> if ( ! equalArray( emissive, [ 0, 0, 0 ] ) ) { | 2 |
Python | Python | patch albert with heads in tensorflow | 2708b44ee9c151a2cdb84620d295c997af6fa7f0 | <ide><path>src/transformers/modeling_tf_albert.py
<ide> def _prune_heads(self, heads_to_prune):
<ide> raise NotImplementedError
<ide>
<ide> def call(
<del> self,
<del> inputs,
<del> attention_mask=None,
<del> token_type_ids=None,
<del> position_ids=None,
<del> head_mask=None,
<del> inputs_embeds=None,
<del> training=False,
<add> self,
<add> inputs,
<add> attention_mask=None,
<add> token_type_ids=None,
<add> position_ids=None,
<add> head_mask=None,
<add> inputs_embeds=None,
<add> training=False,
<ide> ):
<ide> if isinstance(inputs, (tuple, list)):
<ide> input_ids = inputs[0] | 1 |
Javascript | Javascript | fix global event triggering that i broke in r6323 | cbda6c541b9f2dd8fbaa084ecce7f421e8dc3dc4 | <ide><path>src/event.js
<ide> jQuery.event = {
<ide> event.stopPropagation();
<ide> // Only trigger if we've ever bound an event for it
<ide> if ( this.global[ type ] ) {
<del> for ( var cached in jQuery.cache ) {
<del> if ( cached.events && cached.events[ type ] ) {
<del> this.trigger( event, data, cached.handle.elem );
<add> jQuery.each( jQuery.cache, function() {
<add> if ( this.events && this.events[type] ) {
<add> jQuery.event.trigger( event, data, this.handle.elem );
<ide> }
<del> }
<add> });
<ide> }
<ide> }
<ide>
<ide><path>test/unit/ajax.js
<ide> if ( !isLocal ) {
<ide>
<ide> test("jQuery.ajax() - success callbacks", function() {
<ide> expect( 8 );
<del>
<add>
<ide> jQuery.ajaxSetup({ timeout: 0 });
<del>
<del> stop();
<del>
<del> setTimeout(function(){
<del> jQuery('#foo').ajaxStart(function(){
<del> ok( true, "ajaxStart" );
<del> }).ajaxStop(function(){
<del> ok( true, "ajaxStop" );
<del> start();
<del> }).ajaxSend(function(){
<del> ok( true, "ajaxSend" );
<del> }).ajaxComplete(function(){
<del> ok( true, "ajaxComplete" );
<del> }).ajaxError(function(){
<del> ok( false, "ajaxError" );
<del> }).ajaxSuccess(function(){
<del> ok( true, "ajaxSuccess" );
<del> });
<del>
<del> jQuery.ajax({
<del> url: url("data/name.html"),
<del> beforeSend: function(){ ok(true, "beforeSend"); },
<del> success: function(){ ok(true, "success"); },
<del> error: function(){ ok(false, "error"); },
<del> complete: function(){ ok(true, "complete"); }
<del> });
<del> }, 13);
<add>
<add> stop();
<add>
<add> setTimeout(function(){
<add> jQuery('#foo').ajaxStart(function(){
<add> ok( true, "ajaxStart" );
<add> }).ajaxStop(function(){
<add> ok( true, "ajaxStop" );
<add> start();
<add> }).ajaxSend(function(){
<add> ok( true, "ajaxSend" );
<add> }).ajaxComplete(function(){
<add> ok( true, "ajaxComplete" );
<add> }).ajaxError(function(){
<add> ok( false, "ajaxError" );
<add> }).ajaxSuccess(function(){
<add> ok( true, "ajaxSuccess" );
<add> });
<add>
<add> jQuery.ajax({
<add> url: url("data/name.html"),
<add> beforeSend: function(){ ok(true, "beforeSend"); },
<add> success: function(){ ok(true, "success"); },
<add> error: function(){ ok(false, "error"); },
<add> complete: function(){ ok(true, "complete"); }
<add> });
<add> }, 13);
<ide> });
<ide>
<ide> test("jQuery.ajax() - error callbacks", function() {
<del> expect( 8 );
<del> stop();
<del>
<del> jQuery('#foo').ajaxStart(function(){
<del> ok( true, "ajaxStart" );
<del> }).ajaxStop(function(){
<del> ok( true, "ajaxStop" );
<del> start();
<del> }).ajaxSend(function(){
<del> ok( true, "ajaxSend" );
<del> }).ajaxComplete(function(){
<del> ok( true, "ajaxComplete" );
<del> }).ajaxError(function(){
<del> ok( true, "ajaxError" );
<del> }).ajaxSuccess(function(){
<del> ok( false, "ajaxSuccess" );
<del> });
<del>
<del> jQuery.ajaxSetup({ timeout: 500 });
<del>
<del> jQuery.ajax({
<del> url: url("data/name.php?wait=5"),
<del> beforeSend: function(){ ok(true, "beforeSend"); },
<del> success: function(){ ok(false, "success"); },
<del> error: function(){ ok(true, "error"); },
<del> complete: function(){ ok(true, "complete"); }
<del> });
<add> expect( 8 );
<add> stop();
<add>
<add> jQuery('#foo').ajaxStart(function(){
<add> ok( true, "ajaxStart" );
<add> }).ajaxStop(function(){
<add> ok( true, "ajaxStop" );
<add> start();
<add> }).ajaxSend(function(){
<add> ok( true, "ajaxSend" );
<add> }).ajaxComplete(function(){
<add> ok( true, "ajaxComplete" );
<add> }).ajaxError(function(){
<add> ok( true, "ajaxError" );
<add> }).ajaxSuccess(function(){
<add> ok( false, "ajaxSuccess" );
<add> });
<add>
<add> jQuery.ajaxSetup({ timeout: 500 });
<add>
<add> jQuery.ajax({
<add> url: url("data/name.php?wait=5"),
<add> beforeSend: function(){ ok(true, "beforeSend"); },
<add> success: function(){ ok(false, "success"); },
<add> error: function(){ ok(true, "error"); },
<add> complete: function(){ ok(true, "complete"); }
<add> });
<ide> });
<ide>
<ide> test("jQuery.ajax() - disabled globals", function() {
<ide> expect( 3 );
<ide> stop();
<del>
<add>
<ide> jQuery('#foo').ajaxStart(function(){
<ide> ok( false, "ajaxStart" );
<ide> }).ajaxStop(function(){
<ide> test("jQuery.ajax() - disabled globals", function() {
<ide> }).ajaxSuccess(function(){
<ide> ok( false, "ajaxSuccess" );
<ide> });
<del>
<add>
<ide> jQuery.ajax({
<ide> global: false,
<ide> url: url("data/name.html"),
<ide> test("jQuery.ajax() - disabled globals", function() {
<ide> complete: function(){
<ide> ok(true, "complete");
<ide> setTimeout(function(){ start(); }, 13);
<del> }
<add> }
<ide> });
<ide> });
<ide>
<ide> test("jQuery.ajax - xml: non-namespace elements inside namespaced elements", fun
<ide> url: url("data/with_fries.xml"),
<ide> dataType: "xml",
<ide> success: function(resp) {
<del> equals( jQuery("properties", resp).length, 1, 'properties in responseXML' );
<del> equals( jQuery("jsconf", resp).length, 1, 'jsconf in responseXML' );
<del> equals( jQuery("thing", resp).length, 2, 'things in responseXML' );
<del> start();
<add> equals( jQuery("properties", resp).length, 1, 'properties in responseXML' );
<add> equals( jQuery("jsconf", resp).length, 1, 'jsconf in responseXML' );
<add> equals( jQuery("thing", resp).length, 2, 'things in responseXML' );
<add> start();
<ide> }
<ide> });
<ide> });
<ide>
<ide> test("jQuery.ajax - beforeSend", function() {
<ide> expect(1);
<ide> stop();
<del>
<add>
<ide> var check = false;
<del>
<add>
<ide> jQuery.ajaxSetup({ timeout: 0 });
<del>
<add>
<ide> jQuery.ajax({
<del> url: url("data/name.html"),
<add> url: url("data/name.html"),
<ide> beforeSend: function(xml) {
<ide> check = true;
<ide> },
<ide> test("jQuery.ajax - beforeSend", function() {
<ide> test("jQuery.ajax - beforeSend, cancel request (#2688)", function() {
<ide> expect(2);
<ide> var request = jQuery.ajax({
<del> url: url("data/name.html"),
<add> url: url("data/name.html"),
<ide> beforeSend: function() {
<ide> ok( true, "beforeSend got called, canceling" );
<ide> return false;
<ide> window.testFoo = undefined;
<ide> test("jQuery.ajax - dataType html", function() {
<ide> expect(5);
<ide> stop();
<del>
<add>
<ide> var verifyEvaluation = function() {
<ide> equals( testFoo, "foo", 'Check if script was evaluated for datatype html' );
<ide> equals( foobar, "bar", 'Check if script src was evaluated for datatype html' );
<del>
<add>
<ide> start();
<ide> };
<ide>
<ide> jQuery.ajax({
<ide> dataType: "html",
<ide> url: url("data/test.html"),
<ide> success: function(data) {
<del> jQuery("#ap").html(data);
<del> ok( data.match(/^html text/), 'Check content for datatype html' );
<del> setTimeout(verifyEvaluation, 600);
<add> jQuery("#ap").html(data);
<add> ok( data.match(/^html text/), 'Check content for datatype html' );
<add> setTimeout(verifyEvaluation, 600);
<ide> }
<ide> });
<ide> });
<ide>
<ide> test("serialize()", function() {
<ide> expect(6);
<del>
<add>
<ide> equals( jQuery('#form').serialize(),
<ide> "action=Test&radio2=on&check=on&hidden=&foo%5Bbar%5D=&name=name&search=search&select1=&select2=3&select3=1&select3=2",
<ide> 'Check form serialization as query string');
<del>
<add>
<ide> equals( jQuery('#form :input').serialize(),
<ide> "action=Test&radio2=on&check=on&hidden=&foo%5Bbar%5D=&name=name&search=search&select1=&select2=3&select3=1&select3=2",
<ide> 'Check input serialization as query string');
<del>
<del> equals( jQuery('#testForm').serialize(),
<del> 'T3=%3F%0AZ&H1=x&H2=&PWD=&T1=&T2=YES&My+Name=me&S1=abc&S3=YES&S4=',
<add>
<add> equals( jQuery('#testForm').serialize(),
<add> 'T3=%3F%0AZ&H1=x&H2=&PWD=&T1=&T2=YES&My+Name=me&S1=abc&S3=YES&S4=',
<ide> 'Check form serialization as query string');
<del>
<del> equals( jQuery('#testForm :input').serialize(),
<del> 'T3=%3F%0AZ&H1=x&H2=&PWD=&T1=&T2=YES&My+Name=me&S1=abc&S3=YES&S4=',
<add>
<add> equals( jQuery('#testForm :input').serialize(),
<add> 'T3=%3F%0AZ&H1=x&H2=&PWD=&T1=&T2=YES&My+Name=me&S1=abc&S3=YES&S4=',
<ide> 'Check input serialization as query string');
<del>
<add>
<ide> equals( jQuery('#form, #testForm').serialize(),
<ide> "action=Test&radio2=on&check=on&hidden=&foo%5Bbar%5D=&name=name&search=search&select1=&select2=3&select3=1&select3=2&T3=%3F%0AZ&H1=x&H2=&PWD=&T1=&T2=YES&My+Name=me&S1=abc&S3=YES&S4=",
<ide> 'Multiple form serialization as query string');
<del>
<add>
<ide> equals( jQuery('#form, #testForm :input').serialize(),
<ide> "action=Test&radio2=on&check=on&hidden=&foo%5Bbar%5D=&name=name&search=search&select1=&select2=3&select3=1&select3=2&T3=%3F%0AZ&H1=x&H2=&PWD=&T1=&T2=YES&My+Name=me&S1=abc&S3=YES&S4=",
<ide> 'Mixed form/input serialization as query string');
<ide> test("jQuery.param()", function() {
<ide> expect(4);
<ide> var params = {foo:"bar", baz:42, quux:"All your base are belong to us"};
<ide> equals( jQuery.param(params), "foo=bar&baz=42&quux=All+your+base+are+belong+to+us", "simple" );
<del>
<add>
<ide> params = {someName: [1, 2, 3], regularThing: "blah" };
<ide> equals( jQuery.param(params), "someName=1&someName=2&someName=3®ularThing=blah", "with array" );
<del>
<add>
<ide> params = {"foo[]":["baz", 42, "All your base are belong to us"]};
<ide> equals( jQuery.param(params), "foo%5B%5D=baz&foo%5B%5D=42&foo%5B%5D=All+your+base+are+belong+to+us", "more array" );
<del>
<add>
<ide> params = {"foo[bar]":"baz", "foo[beep]":42, "foo[quux]":"All your base are belong to us"};
<ide> equals( jQuery.param(params), "foo%5Bbar%5D=baz&foo%5Bbeep%5D=42&foo%5Bquux%5D=All+your+base+are+belong+to+us", "even more arrays" );
<ide> });
<ide> test("synchronous request with callbacks", function() {
<ide> test("pass-through request object", function() {
<ide> expect(8);
<ide> stop();
<del>
<add>
<ide> var target = "data/name.html";
<ide> var successCount = 0;
<ide> var errorCount = 0;
<ide> test("pass-through request object", function() {
<ide> equals(successCount, 5, "Check all ajax calls successful");
<ide> equals(errorCount, 0, "Check no ajax errors (status" + errorEx + ")");
<ide> jQuery("#foo").unbind('ajaxError');
<del>
<add>
<ide> start();
<ide> });
<del>
<add>
<ide> ok( jQuery.get(url(target), success), "get" );
<ide> ok( jQuery.post(url(target), success), "post" );
<ide> ok( jQuery.getScript(url("data/test.js"), success), "script" );
<ide> test("pass-through request object", function() {
<ide> test("ajax cache", function () {
<ide> expect(18);
<ide> stop();
<del>
<add>
<ide> var count = 0;
<ide>
<ide> jQuery("#firstp").bind("ajaxSuccess", function (e, xml, s) {
<ide> var re = /_=(.*?)(&|$)/g;
<del> var oldOne = null;
<add> var oldOne = null;
<ide> for (var i = 0; i < 6; i++) {
<del> var ret = re.exec(s.url);
<add> var ret = re.exec(s.url);
<ide> if (!ret) {
<ide> break;
<ide> }
<del> oldOne = ret[1];
<add> oldOne = ret[1];
<ide> }
<ide> equals(i, 1, "Test to make sure only one 'no-cache' parameter is there");
<ide> ok(oldOne != "tobereplaced555", "Test to be sure parameter (if it was there) was replaced");
<ide> test("global ajaxSettings", function() {
<ide> expect(2);
<ide>
<ide> var tmp = jQuery.extend({}, jQuery.ajaxSettings);
<del> var orig = { url: "data/with_fries.xml" };
<add> var orig = { url: "data/with_fries.xml" };
<ide> var t;
<ide>
<ide> jQuery.ajaxSetup({ data: {foo: 'bar', bar: 'BAR'} });
<ide>
<del> t = jQuery.extend({}, orig);
<add> t = jQuery.extend({}, orig);
<ide> t.data = {};
<del> jQuery.ajax(t);
<add> jQuery.ajax(t);
<ide> ok( t.url.indexOf('foo') > -1 && t.url.indexOf('bar') > -1, "Check extending {}" );
<ide>
<del> t = jQuery.extend({}, orig);
<add> t = jQuery.extend({}, orig);
<ide> t.data = { zoo: 'a', ping: 'b' };
<del> jQuery.ajax(t);
<add> jQuery.ajax(t);
<ide> ok( t.url.indexOf('ping') > -1 && t.url.indexOf('zoo') > -1 && t.url.indexOf('foo') > -1 && t.url.indexOf('bar') > -1, "Check extending { zoo: 'a', ping: 'b' }" );
<del>
<add>
<ide> jQuery.ajaxSettings = tmp;
<ide> });
<ide>
<ide> test("load(String, Function) - simple: inject text into DOM", function() {
<ide> test("load(String, Function) - check scripts", function() {
<ide> expect(7);
<ide> stop();
<del>
<add>
<ide> var verifyEvaluation = function() {
<ide> equals( foobar, "bar", 'Check if script src was evaluated after load' );
<ide> equals( jQuery('#ap').html(), 'bar', 'Check if script evaluation has modified DOM');
<del>
<add>
<ide> start();
<ide> };
<ide> jQuery('#first').load(url('data/test.html'), function() {
<ide> test("load(String, Function) - check file with only a script tag", function() {
<ide> jQuery('#first').load(url('data/test2.html'), function() {
<ide> equals( jQuery('#foo').html(), 'foo', 'Check if script evaluation has modified DOM');
<ide> equals( testFoo, "foo", 'Check if script was evaluated after load' );
<del>
<add>
<ide> start();
<ide> });
<ide> });
<ide>
<ide> test("load(String, Object, Function)", function() {
<ide> expect(2);
<ide> stop();
<del>
<add>
<ide> jQuery('<div />').load(url('data/params_html.php'), { foo:3, bar:'ok' }, function() {
<del> var $post = jQuery(this).find('#post');
<add> var $post = jQuery(this).find('#post');
<ide> equals( $post.find('#foo').text(), '3', 'Check if a hash of data is passed correctly');
<ide> equals( $post.find('#bar').text(), 'ok', 'Check if a hash of data is passed correctly');
<ide> start();
<ide> test("load(String, Object, Function)", function() {
<ide> test("load(String, String, Function)", function() {
<ide> expect(2);
<ide> stop();
<del>
<add>
<ide> jQuery('<div />').load(url('data/params_html.php'), 'foo=3&bar=ok', function() {
<del> var $get = jQuery(this).find('#get');
<add> var $get = jQuery(this).find('#get');
<ide> equals( $get.find('#foo').text(), '3', 'Check if a string of data is passed correctly');
<del> equals( $get.find('#bar').text(), 'ok', 'Check if a of data is passed correctly');
<add> equals( $get.find('#bar').text(), 'ok', 'Check if a of data is passed correctly');
<ide> start();
<ide> });
<ide> });
<ide> test("jQuery.post(String, Hash, Function) - simple with xml", function() {
<ide>
<ide> jQuery.post(url("data/name.php"), {xml: "5-2"}, function(xml){
<ide> jQuery('math', xml).each(function() {
<del> equals( jQuery('calculation', this).text(), '5-2', 'Check for XML' );
<del> equals( jQuery('result', this).text(), '3', 'Check for XML' );
<add> equals( jQuery('calculation', this).text(), '5-2', 'Check for XML' );
<add> equals( jQuery('result', this).text(), '3', 'Check for XML' );
<ide> });
<ide> if ( ++done === 2 ) start();
<ide> });
<ide>
<ide> jQuery.post(url("data/name.php?xml=5-2"), {}, function(xml){
<ide> jQuery('math', xml).each(function() {
<del> equals( jQuery('calculation', this).text(), '5-2', 'Check for XML' );
<del> equals( jQuery('result', this).text(), '3', 'Check for XML' );
<add> equals( jQuery('calculation', this).text(), '5-2', 'Check for XML' );
<add> equals( jQuery('result', this).text(), '3', 'Check for XML' );
<ide> });
<ide> if ( ++done === 2 ) start();
<ide> });
<ide> });
<ide>
<ide> test("jQuery.ajaxSetup({timeout: Number}) - with global timeout", function() {
<ide> stop();
<del>
<add>
<ide> var passed = 0;
<ide>
<ide> jQuery.ajaxSetup({timeout: 1000});
<del>
<add>
<ide> var pass = function() {
<ide> passed++;
<ide> if ( passed == 2 ) {
<ide> ok( true, 'Check local and global callbacks after timeout' );
<del> jQuery('#main').unbind("ajaxError");
<add> jQuery('#main').unbind("ajaxError");
<ide> start();
<ide> }
<ide> };
<del>
<add>
<ide> var fail = function(a,b,c) {
<ide> ok( false, 'Check for timeout failed ' + a + ' ' + b );
<ide> start();
<ide> };
<del>
<add>
<ide> jQuery('#main').ajaxError(pass);
<del>
<add>
<ide> jQuery.ajax({
<ide> type: "GET",
<ide> url: url("data/name.php?wait=5"),
<ide> error: pass,
<ide> success: fail
<ide> });
<del>
<add>
<ide> // reset timeout
<ide> jQuery.ajaxSetup({timeout: 0});
<ide> });
<ide> test("jQuery.ajaxSetup({timeout: Number}) with localtimeout", function() {
<ide> start();
<ide> },
<ide> success: function() {
<del> ok( true, 'Check for local timeout' );
<del> start();
<add> ok( true, 'Check for local timeout' );
<add> start();
<ide> }
<ide> });
<ide>
<ide> test("jQuery.ajax - simple get", function() {
<ide> type: "GET",
<ide> url: url("data/name.php?name=foo"),
<ide> success: function(msg){
<del> equals( msg, 'bar', 'Check for GET' );
<del> start();
<add> equals( msg, 'bar', 'Check for GET' );
<add> start();
<ide> }
<ide> });
<ide> });
<ide> test("jQuery.ajax - simple post", function() {
<ide> url: url("data/name.php"),
<ide> data: "name=peter",
<ide> success: function(msg){
<del> equals( msg, 'pan', 'Check for POST' );
<del> start();
<add> equals( msg, 'pan', 'Check for POST' );
<add> start();
<ide> }
<ide> });
<ide> });
<ide> test("ajaxSetup()", function() {
<ide> jQuery.ajaxSetup({
<ide> url: url("data/name.php?name=foo"),
<ide> success: function(msg){
<del> equals( msg, 'bar', 'Check for GET' );
<add> equals( msg, 'bar', 'Check for GET' );
<ide> start();
<ide> }
<ide> }); | 2 |
Text | Text | fix typo in async_context.md | e387269586152ec285c6f98c49cfc7dc2eb276fb | <ide><path>doc/api/async_context.md
<ide> http.get('http://localhost:8080');
<ide>
<ide> Each instance of `AsyncLocalStorage` maintains an independent storage context.
<ide> Multiple instances can safely exist simultaneously without risk of interfering
<del>with each other data.
<add>with each other's data.
<ide>
<ide> ### `new AsyncLocalStorage()`
<ide> | 1 |
Java | Java | correct javadoc in servleturicomponentsbuilder | bb5c8ed4e8061eaf8ba990f1d54c3a6f761cf868 | <ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/support/ServletUriComponentsBuilder.java
<ide> * UriComponentsBuilder with additional static factory methods to create links
<ide> * based on the current HttpServletRequest.
<ide> *
<del> * <p><strong>Note:</strong> This class uses values from "Forwarded"
<del> * (<a href="http://tools.ietf.org/html/rfc7239">RFC 7239</a>),
<del> * "X-Forwarded-Host", "X-Forwarded-Port", and "X-Forwarded-Proto" headers,
<del> * if present, in order to reflect the client-originated protocol and address.
<del> * Consider using the {@code ForwardedHeaderFilter} in order to choose from a
<del> * central place whether to extract and use, or to discard such headers.
<del> * See the Spring Framework reference for more on this filter.
<add> * <p><strong>Note:</strong> As of 5.1, methods in this class do not extract
<add> * {@code "Forwarded"} and {@code "X-Forwarded-*"} headers that specify the
<add> * client-originated address. Please, use
<add> * {@link org.springframework.web.filter.ForwardedHeaderFilter
<add> * ForwardedHeaderFilter}, or similar from the underlying server, to extract
<add> * and use such headers, or to discard them.
<ide> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 3.1
<ide> protected ServletUriComponentsBuilder(ServletUriComponentsBuilder other) {
<ide> /**
<ide> * Prepare a builder from the host, port, scheme, and context path of the
<ide> * given HttpServletRequest.
<del> * <p><strong>Note:</strong> As of 5.1, this method ignores
<del> * {@code "Forwarded"} and {@code "X-Forwarded-*"} headers that specify the
<del> * client-originated address. Consider using the {@code ForwardedHeaderFilter}
<del> * to extract and use, or to discard such headers.
<ide> */
<ide> public static ServletUriComponentsBuilder fromContextPath(HttpServletRequest request) {
<ide> ServletUriComponentsBuilder builder = initFromRequest(request);
<ide> public static ServletUriComponentsBuilder fromContextPath(HttpServletRequest req
<ide> * will end with "/main". If the servlet is mapped otherwise, e.g.
<ide> * {@code "/"} or {@code "*.do"}, the result will be the same as
<ide> * if calling {@link #fromContextPath(HttpServletRequest)}.
<del> * <p><strong>Note:</strong> As of 5.1, this method ignores
<del> * {@code "Forwarded"} and {@code "X-Forwarded-*"} headers that specify the
<del> * client-originated address. Consider using the {@code ForwardedHeaderFilter}
<del> * to extract and use, or to discard such headers.
<ide> */
<ide> public static ServletUriComponentsBuilder fromServletMapping(HttpServletRequest request) {
<ide> ServletUriComponentsBuilder builder = fromContextPath(request);
<ide> public static ServletUriComponentsBuilder fromServletMapping(HttpServletRequest
<ide> /**
<ide> * Prepare a builder from the host, port, scheme, and path (but not the query)
<ide> * of the HttpServletRequest.
<del> * <p><strong>Note:</strong> As of 5.1, this method ignores
<del> * {@code "Forwarded"} and {@code "X-Forwarded-*"} headers that specify the
<del> * client-originated address. Consider using the {@code ForwardedHeaderFilter}
<del> * to extract and use, or to discard such headers.
<ide> */
<ide> public static ServletUriComponentsBuilder fromRequestUri(HttpServletRequest request) {
<ide> ServletUriComponentsBuilder builder = initFromRequest(request);
<ide> public static ServletUriComponentsBuilder fromRequestUri(HttpServletRequest requ
<ide> /**
<ide> * Prepare a builder by copying the scheme, host, port, path, and
<ide> * query string of an HttpServletRequest.
<del> * <p><strong>Note:</strong> As of 5.1, this method ignores
<del> * {@code "Forwarded"} and {@code "X-Forwarded-*"} headers that specify the
<del> * client-originated address. Consider using the {@code ForwardedHeaderFilter}
<del> * to extract and use, or to discard such headers.
<ide> */
<ide> public static ServletUriComponentsBuilder fromRequest(HttpServletRequest request) {
<ide> ServletUriComponentsBuilder builder = initFromRequest(request);
<ide> private static ServletUriComponentsBuilder initFromRequest(HttpServletRequest re
<ide> /**
<ide> * Same as {@link #fromContextPath(HttpServletRequest)} except the
<ide> * request is obtained through {@link RequestContextHolder}.
<del> * <p><strong>Note:</strong> This method extracts values from "Forwarded"
<del> * and "X-Forwarded-*" headers if found. See class-level docs.
<del> * <p>As of 4.3.15, this method replaces the contextPath with the value
<del> * of "X-Forwarded-Prefix" rather than prepending, thus aligning with
<del> * {@code ForwardedHeaderFilter}.
<ide> */
<ide> public static ServletUriComponentsBuilder fromCurrentContextPath() {
<ide> return fromContextPath(getCurrentRequest());
<ide> public static ServletUriComponentsBuilder fromCurrentContextPath() {
<ide> /**
<ide> * Same as {@link #fromServletMapping(HttpServletRequest)} except the
<ide> * request is obtained through {@link RequestContextHolder}.
<del> * <p><strong>Note:</strong> This method extracts values from "Forwarded"
<del> * and "X-Forwarded-*" headers if found. See class-level docs.
<del> * <p>As of 4.3.15, this method replaces the contextPath with the value
<del> * of "X-Forwarded-Prefix" rather than prepending, thus aligning with
<del> * {@code ForwardedHeaderFilter}.
<ide> */
<ide> public static ServletUriComponentsBuilder fromCurrentServletMapping() {
<ide> return fromServletMapping(getCurrentRequest());
<ide> public static ServletUriComponentsBuilder fromCurrentServletMapping() {
<ide> /**
<ide> * Same as {@link #fromRequestUri(HttpServletRequest)} except the
<ide> * request is obtained through {@link RequestContextHolder}.
<del> * <p><strong>Note:</strong> This method extracts values from "Forwarded"
<del> * and "X-Forwarded-*" headers if found. See class-level docs.
<del> * <p>As of 4.3.15, this method replaces the contextPath with the value
<del> * of "X-Forwarded-Prefix" rather than prepending, thus aligning with
<del> * {@code ForwardedHeaderFilter}.
<ide> */
<ide> public static ServletUriComponentsBuilder fromCurrentRequestUri() {
<ide> return fromRequestUri(getCurrentRequest());
<ide> public static ServletUriComponentsBuilder fromCurrentRequestUri() {
<ide> /**
<ide> * Same as {@link #fromRequest(HttpServletRequest)} except the
<ide> * request is obtained through {@link RequestContextHolder}.
<del> * <p><strong>Note:</strong> This method extracts values from "Forwarded"
<del> * and "X-Forwarded-*" headers if found. See class-level docs.
<del> * <p>As of 4.3.15, this method replaces the contextPath with the value
<del> * of "X-Forwarded-Prefix" rather than prepending, thus aligning with
<del> * {@code ForwardedHeaderFilter}.
<ide> */
<ide> public static ServletUriComponentsBuilder fromCurrentRequest() {
<ide> return fromRequest(getCurrentRequest()); | 1 |
PHP | PHP | add test for charset removal | 86ca3d1bcba403d227f2691c2692428a965b4ac3 | <ide><path>tests/TestCase/Http/Middleware/BodyParserMiddlewareTest.php
<ide> public function testInvokeParse($method)
<ide> $parser($request, $response, $next);
<ide> }
<ide>
<add> /**
<add> * test parsing on valid http method with charset
<add> *
<add> * @return void
<add> */
<add> public function testInvokeParseStripCharset()
<add> {
<add> $parser = new BodyParserMiddleware();
<add>
<add> $request = new ServerRequest([
<add> 'environment' => [
<add> 'REQUEST_METHOD' => 'POST',
<add> 'CONTENT_TYPE' => 'application/json; charset=utf-8',
<add> ],
<add> 'input' => '{"title": "yay"}'
<add> ]);
<add> $response = new Response();
<add> $next = function ($req, $res) {
<add> $this->assertEquals(['title' => 'yay'], $req->getParsedBody());
<add> };
<add> $parser($request, $response, $next);
<add> }
<add>
<ide> /**
<ide> * test parsing on ignored http method
<ide> * | 1 |
Ruby | Ruby | switch route constructors and pass in the regexp | c989e2c56f415e0a4429b1348e76be4fc8e9f35b | <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb
<ide> def initialize(set, ast, defaults, controller, default_action, modyoule, to, for
<ide> end
<ide>
<ide> def make_route(name, precedence)
<del> route = Journey::Route.build(name,
<add> route = Journey::Route.new(name,
<ide> application,
<ide> path,
<ide> conditions,
<ide> required_defaults,
<del> defaults)
<add> defaults,
<add> request_method)
<ide>
<ide> route.precedence = precedence
<ide> route
<ide> def conditions
<ide> def build_conditions(current_conditions, request_class)
<ide> conditions = current_conditions.dup
<ide>
<add> conditions.keep_if do |k, _|
<add> request_class.public_method_defined?(k)
<add> end
<add> end
<add> private :build_conditions
<add>
<add> def request_method
<ide> # Rack-Mount requires that :request_method be a regular expression.
<ide> # :request_method represents the HTTP verb that matches this route.
<ide> #
<ide> # Here we munge values before they get sent on to rack-mount.
<del> unless @via == [:all]
<add> if @via == [:all]
<add> //
<add> else
<ide> verbs = @via.map { |m| m.to_s.dasherize.upcase }
<del> conditions[:request_method] = %r[^#{verbs.join('|')}$]
<del> end
<del>
<del> conditions.keep_if do |k, _|
<del> request_class.public_method_defined?(k)
<add> %r[^#{verbs.join('|')}$]
<ide> end
<ide> end
<del> private :build_conditions
<add> private :request_method
<ide>
<ide> JOINED_SEPARATORS = SEPARATORS.join # :nodoc:
<ide> | 1 |
Java | Java | fix typo in javadoc | e3b3481c194abf03074ea4fe7d1ca8093e924e96 | <ide><path>spring-core/src/main/java/org/springframework/core/DecoratingProxy.java
<ide> /*
<del> * Copyright 2002-2016 the original author or authors.
<add> * Copyright 2002-2019 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * class there anyway.
<ide> *
<ide> * <p>Defined in the core module in order to allow
<del> * #{@link org.springframework.core.annotation.AnnotationAwareOrderComparator}
<add> * {@link org.springframework.core.annotation.AnnotationAwareOrderComparator}
<ide> * (and potential other candidates without spring-aop dependencies) to use it
<ide> * for introspection purposes, in particular annotation lookups.
<ide> * | 1 |
Text | Text | fix a typo in design principles doc | c60f852cdbb9f115264537d39e2aedea6c1c9636 | <ide><path>docs/contributing/design-principles.md
<ide> Optimizing for search is also important because of our reliance on [codemods](ht
<ide>
<ide> [JSX](/react/docs/displaying-data.html#jsx-syntax) plays a similar role. While it is not required with React, we use it extensively at Facebook both for aesthetic and pragmatic reasons.
<ide>
<del>In our codebase, JSX provides an unambigious hint to the tools that they are dealing with a React element tree. This makes it possible to add build-time optimizations such as [hoisting constant elements](http://babeljs.io/docs/plugins/transform-react-constant-elements/), safely lint and codemod internal component usage, and [include JSX source location](https://github.com/facebook/react/pull/6771) into the warnings.
<add>In our codebase, JSX provides an unambiguous hint to the tools that they are dealing with a React element tree. This makes it possible to add build-time optimizations such as [hoisting constant elements](http://babeljs.io/docs/plugins/transform-react-constant-elements/), safely lint and codemod internal component usage, and [include JSX source location](https://github.com/facebook/react/pull/6771) into the warnings.
<ide>
<ide> ### Dogfooding
<ide> | 1 |
Javascript | Javascript | remove orderedmap and reactproptransferer | 27233230064316a8820cd1eb825adf3ea68b8160 | <ide><path>src/isomorphic/deprecated/OrderedMap.js
<del>/**
<del> * Copyright 2013-present, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> *
<del> * @providesModule OrderedMap
<del> */
<del>
<del>'use strict';
<del>
<del>var invariant = require('invariant');
<del>
<del>var PREFIX = 'key:';
<del>
<del>/**
<del> * Utility to extract a backing object from an initialization `Array`, allowing
<del> * the caller to assist in resolving the unique ID for each entry via the
<del> * `keyExtractor` callback. The `keyExtractor` must extract non-empty strings or
<del> * numbers.
<del> * @param {Array<Object!>} arr Array of items.
<del> * @param {function} keyExtractor Extracts a unique key from each item.
<del> * @return {Object} Map from unique key to originating value that the key was
<del> * extracted from.
<del> * @throws Exception if the initialization array has duplicate extracted keys.
<del> */
<del>function extractObjectFromArray(arr, keyExtractor) {
<del> var normalizedObj = {};
<del> for (var i = 0; i < arr.length; i++) {
<del> var item = arr[i];
<del> var key = keyExtractor(item);
<del> assertValidPublicKey(key);
<del> var normalizedKey = PREFIX + key;
<del> invariant(
<del> !(normalizedKey in normalizedObj),
<del> 'OrderedMap: IDs returned by the key extraction function must be unique.'
<del> );
<del> normalizedObj[normalizedKey] = item;
<del> }
<del> return normalizedObj;
<del>}
<del>
<del>/**
<del> * Utility class for mappings with ordering. This class is to be used in an
<del> * immutable manner. A `OrderedMap` is very much like the native JavaScript
<del> * object, where keys map to values via the `get()` function. Also, like the
<del> * native JavaScript object, there is an ordering associated with the mapping.
<del> * This class is helpful because it eliminates many of the pitfalls that come
<del> * with the native JavaScript ordered mappings. Specifically, there are
<del> * inconsistencies with numeric keys in some JavaScript implementations
<del> * (enumeration ordering). This class protects against those pitfalls and
<del> * provides functional utilities for dealing with these `OrderedMap`s.
<del> *
<del> * - TODO:
<del> * - orderedMergeExclusive: Merges mutually exclusive `OrderedMap`s.
<del> * - mapReverse().
<del> *
<del> * @class {OrderedMap}
<del> * @constructor {OrderedMap}
<del> * @param {Object} normalizedObj Object that is known to be a defensive copy of
<del> * caller supplied data. We require a defensive copy to guard against callers
<del> * mutating. It is also assumed that the keys of `normalizedObj` have been
<del> * normalized and do not contain any numeric-appearing strings.
<del> * @param {number} computedLength The precomputed length of `_normalizedObj`
<del> * keys.
<del> * @private
<del> */
<del>function OrderedMapImpl(normalizedObj, computedLength) {
<del> this._normalizedObj = normalizedObj;
<del> this._computedPositions = null;
<del> this.length = computedLength;
<del>}
<del>
<del>/**
<del> * Validates a "public" key - that is, one that the public facing API supplies.
<del> * The key is then normalized for internal storage. In order to be considered
<del> * valid, all keys must be non-empty, defined, non-null strings or numbers.
<del> *
<del> * @param {string?} key Validates that the key is suitable for use in a
<del> * `OrderedMap`.
<del> * @throws Error if key is not appropriate for use in `OrderedMap`.
<del> */
<del>function assertValidPublicKey(key) {
<del> invariant(
<del> key !== '' && (typeof key === 'string' || typeof key === 'number'),
<del> 'OrderedMap: Key must be non-empty, non-null string or number.'
<del> );
<del>}
<del>
<del>/**
<del> * Validates that arguments to range operations are within the correct limits.
<del> *
<del> * @param {number} start Start of range.
<del> * @param {number} length Length of range.
<del> * @param {number} actualLen Actual length of range that should not be
<del> * exceeded.
<del> * @throws Error if range arguments are out of bounds.
<del> */
<del>function assertValidRangeIndices(start, length, actualLen) {
<del> invariant(
<del> typeof start === 'number' &&
<del> typeof length === 'number' &&
<del> length >= 0 &&
<del> start >= 0 &&
<del> start + length <= actualLen,
<del> 'OrderedMap: `mapRange` and `forEachRange` expect non-negative start and ' +
<del> 'length arguments within the bounds of the instance.'
<del> );
<del>}
<del>
<del>/**
<del> * Merges two "normalized" objects (objects who's key have been normalized) into
<del> * a `OrderedMap`.
<del> *
<del> * @param {Object} a Object of key value pairs.
<del> * @param {Object} b Object of key value pairs.
<del> * @return {OrderedMap} new `OrderedMap` that results in merging `a` and `b`.
<del> */
<del>function _fromNormalizedObjects(a, b) {
<del> // Second optional, both must be plain JavaScript objects.
<del> invariant(
<del> a && a.constructor === Object && (!b || b.constructor === Object),
<del> 'OrderedMap: Corrupted instance of OrderedMap detected.'
<del> );
<del>
<del> var newSet = {};
<del> var length = 0;
<del> var key;
<del> for (key in a) {
<del> if (a.hasOwnProperty(key)) {
<del> newSet[key] = a[key];
<del> length++;
<del> }
<del> }
<del>
<del> for (key in b) {
<del> if (b.hasOwnProperty(key)) {
<del> // Increment length if not already added via first object (a)
<del> if (!(key in newSet)) {
<del> length++;
<del> }
<del> newSet[key] = b[key];
<del> }
<del> }
<del> return new OrderedMapImpl(newSet, length);
<del>}
<del>
<del>/**
<del> * Methods for `OrderedMap` instances.
<del> *
<del> * @lends OrderedMap.prototype
<del> * TODO: Make this data structure lazy, unify with LazyArray.
<del> * TODO: Unify this with ImmutableObject - it is to be used immutably.
<del> * TODO: If so, consider providing `fromObject` API.
<del> * TODO: Create faster implementation of merging/mapping from original Array,
<del> * without having to first create an object - simply for the sake of merging.
<del> */
<del>var OrderedMapMethods = {
<del>
<del> /**
<del> * Returns whether or not a given key is present in the map.
<del> *
<del> * @param {string} key Valid string key to lookup membership for.
<del> * @return {boolean} Whether or not `key` is a member of the map.
<del> * @throws Error if provided known invalid key.
<del> */
<del> has: function(key) {
<del> assertValidPublicKey(key);
<del> var normalizedKey = PREFIX + key;
<del> return normalizedKey in this._normalizedObj;
<del> },
<del>
<del> /**
<del> * Returns the object for a given key, or `undefined` if not present. To
<del> * distinguish an undefined entry vs not being in the set, use `has()`.
<del> *
<del> * @param {string} key String key to lookup the value for.
<del> * @return {Object?} Object at key `key`, or undefined if not in map.
<del> * @throws Error if provided known invalid key.
<del> */
<del> get: function(key) {
<del> assertValidPublicKey(key);
<del> var normalizedKey = PREFIX + key;
<del> return this.has(key) ? this._normalizedObj[normalizedKey] : undefined;
<del> },
<del>
<del> /**
<del> * Merges, appending new keys to the end of the ordering. Keys in `orderedMap`
<del> * that are redundant with `this`, maintain the same ordering index that they
<del> * had in `this`. This is how standard JavaScript object merging would work.
<del> * If you wish to prepend a `OrderedMap` to the beginning of another
<del> * `OrderedMap` then simply reverse the order of operation. This is the analog
<del> * to `merge(x, y)`.
<del> *
<del> * @param {OrderedMap} orderedMap OrderedMap to merge onto the end.
<del> * @return {OrderedMap} New OrderedMap that represents the result of the
<del> * merge.
<del> */
<del> merge: function(orderedMap) {
<del> invariant(
<del> orderedMap instanceof OrderedMapImpl,
<del> 'OrderedMap.merge(...): Expected an OrderedMap instance.'
<del> );
<del> return _fromNormalizedObjects(
<del> this._normalizedObj,
<del> orderedMap._normalizedObj
<del> );
<del> },
<del>
<del> /**
<del> * Functional map API. Returns a new `OrderedMap`.
<del> *
<del> * @param {Function} cb Callback to invoke for each item.
<del> * @param {Object?=} context Context to invoke callback from.
<del> * @return {OrderedMap} OrderedMap that results from mapping.
<del> */
<del> map: function(cb, context) {
<del> return this.mapRange(cb, 0, this.length, context);
<del> },
<del>
<del> /**
<del> * The callback `cb` is invoked with the arguments (item, key,
<del> * indexInOriginal).
<del> *
<del> * @param {Function} cb Determines result for each item.
<del> * @param {number} start Start index of map range.
<del> * @param {end} length End index of map range.
<del> * @param {*!?} context Context of callback invocation.
<del> * @return {OrderedMap} OrderedMap resulting from mapping the range.
<del> */
<del> mapRange: function(cb, start, length, context) {
<del> var thisSet = this._normalizedObj;
<del> var newSet = {};
<del> var i = 0;
<del> assertValidRangeIndices(start, length, this.length);
<del> var end = start + length - 1;
<del> for (var key in thisSet) {
<del> if (thisSet.hasOwnProperty(key)) {
<del> if (i >= start) {
<del> if (i > end) {
<del> break;
<del> }
<del> var item = thisSet[key];
<del> newSet[key] = cb.call(context, item, key.substr(PREFIX.length), i);
<del> }
<del> i++;
<del> }
<del> }
<del> return new OrderedMapImpl(newSet, length);
<del> },
<del>
<del> /**
<del> * Function filter API. Returns new `OrderedMap`.
<del> *
<del> * @param {Function} cb Callback to invoke for each item.
<del> * @param {Object?=} context Context to invoke callback from.
<del> * @return {OrderedMap} OrderedMap that results from filtering.
<del> */
<del> filter: function(cb, context) {
<del> return this.filterRange(cb, 0, this.length, context);
<del> },
<del>
<del> /**
<del> * The callback `cb` is invoked with the arguments (item, key,
<del> * indexInOriginal).
<del> *
<del> * @param {Function} cb Returns true if item should be in result.
<del> * @param {number} start Start index of filter range.
<del> * @param {number} length End index of map range.
<del> * @param {*!?} context Context of callback invocation.
<del> * @return {OrderedMap} OrderedMap resulting from filtering the range.
<del> */
<del> filterRange: function(cb, start, length, context) {
<del> var newSet = {};
<del> var newSetLength = 0;
<del> this.forEachRange(function(item, key, originalIndex) {
<del> if (cb.call(context, item, key, originalIndex)) {
<del> var normalizedKey = PREFIX + key;
<del> newSet[normalizedKey] = item;
<del> newSetLength++;
<del> }
<del> }, start, length);
<del> return new OrderedMapImpl(newSet, newSetLength);
<del> },
<del>
<del> forEach: function(cb, context) {
<del> this.forEachRange(cb, 0, this.length, context);
<del> },
<del>
<del> forEachRange: function(cb, start, length, context) {
<del> assertValidRangeIndices(start, length, this.length);
<del> var thisSet = this._normalizedObj;
<del> var i = 0;
<del> var end = start + length - 1;
<del> for (var key in thisSet) {
<del> if (thisSet.hasOwnProperty(key)) {
<del> if (i >= start) {
<del> if (i > end) {
<del> break;
<del> }
<del> var item = thisSet[key];
<del> cb.call(context, item, key.substr(PREFIX.length), i);
<del> }
<del> i++;
<del> }
<del> }
<del> },
<del>
<del> /**
<del> * Even though `mapRange`/`forEachKeyRange` allow zero length mappings, we'll
<del> * impose an additional restriction here that the length of mapping be greater
<del> * than zero - the only reason is that there are many ways to express length
<del> * zero in terms of two keys and that is confusing.
<del> */
<del> mapKeyRange: function(cb, startKey, endKey, context) {
<del> var startIndex = this.indexOfKey(startKey);
<del> var endIndex = this.indexOfKey(endKey);
<del> invariant(
<del> startIndex !== undefined && endIndex !== undefined,
<del> 'mapKeyRange must be given keys that are present.'
<del> );
<del> invariant(
<del> endIndex >= startIndex,
<del> 'OrderedMap.mapKeyRange(...): `endKey` must not come before `startIndex`.'
<del> );
<del> return this.mapRange(cb, startIndex, (endIndex - startIndex) + 1, context);
<del> },
<del>
<del> forEachKeyRange: function(cb, startKey, endKey, context) {
<del> var startIndex = this.indexOfKey(startKey);
<del> var endIndex = this.indexOfKey(endKey);
<del> invariant(
<del> startIndex !== undefined && endIndex !== undefined,
<del> 'forEachKeyRange must be given keys that are present.'
<del> );
<del> invariant(
<del> endIndex >= startIndex,
<del> 'OrderedMap.forEachKeyRange(...): `endKey` must not come before ' +
<del> '`startIndex`.'
<del> );
<del> this.forEachRange(cb, startIndex, (endIndex - startIndex) + 1, context);
<del> },
<del>
<del> /**
<del> * @param {number} pos Index to search for key at.
<del> * @return {string|undefined} Either the key at index `pos` or undefined if
<del> * not in map.
<del> */
<del> keyAtIndex: function(pos) {
<del> var computedPositions = this._getOrComputePositions();
<del> var keyAtPos = computedPositions.keyByIndex[pos];
<del> return keyAtPos ? keyAtPos.substr(PREFIX.length) : undefined;
<del> },
<del>
<del> /**
<del> * @param {string} key String key from which to find the next key.
<del> * @return {string|undefined} Either the next key, or undefined if there is no
<del> * next key.
<del> * @throws Error if `key` is not in this `OrderedMap`.
<del> */
<del> keyAfter: function(key) {
<del> return this.nthKeyAfter(key, 1);
<del> },
<del>
<del> /**
<del> * @param {string} key String key from which to find the preceding key.
<del> * @return {string|undefined} Either the preceding key, or undefined if there
<del> * is no preceding.key.
<del> * @throws Error if `key` is not in this `OrderedMap`.
<del> */
<del> keyBefore: function(key) {
<del> return this.nthKeyBefore(key, 1);
<del> },
<del>
<del> /**
<del> * @param {string} key String key from which to find a following key.
<del> * @param {number} n Distance to scan forward after `key`.
<del> * @return {string|undefined} Either the nth key after `key`, or undefined if
<del> * there is no next key.
<del> * @throws Error if `key` is not in this `OrderedMap`.
<del> */
<del> nthKeyAfter: function(key, n) {
<del> var curIndex = this.indexOfKey(key);
<del> invariant(
<del> curIndex !== undefined,
<del> 'OrderedMap.nthKeyAfter: The key `%s` does not exist in this instance.',
<del> key
<del> );
<del> return this.keyAtIndex(curIndex + n);
<del> },
<del>
<del> /**
<del> * @param {string} key String key from which to find a preceding key.
<del> * @param {number} n Distance to scan backwards before `key`.
<del> * @return {string|undefined} Either the nth key before `key`, or undefined if
<del> * there is no previous key.
<del> * @throws Error if `key` is not in this `OrderedMap`.
<del> */
<del> nthKeyBefore: function(key, n) {
<del> return this.nthKeyAfter(key, -n);
<del> },
<del>
<del> /**
<del> * @param {string} key Key to find the index of.
<del> * @return {number|undefined} Index of the provided key, or `undefined` if the
<del> * key is not found.
<del> */
<del> indexOfKey: function(key) {
<del> assertValidPublicKey(key);
<del> var normalizedKey = PREFIX + key;
<del> var computedPositions = this._getOrComputePositions();
<del> var computedPosition = computedPositions.indexByKey[normalizedKey];
<del> // Just writing it this way to make it clear this is intentional.
<del> return computedPosition === undefined ? undefined : computedPosition;
<del> },
<del>
<del> /**
<del> * @return {Array} An ordered array of this object's values.
<del> */
<del> toArray: function() {
<del> var result = [];
<del> var thisSet = this._normalizedObj;
<del> for (var key in thisSet) {
<del> if (thisSet.hasOwnProperty(key)) {
<del> result.push(thisSet[key]);
<del> }
<del> }
<del> return result;
<del> },
<del>
<del> /**
<del> * Finds the key at a given position, or indicates via `undefined` that that
<del> * position does not exist in the `OrderedMap`. It is appropriate to return
<del> * undefined, indicating that the key doesn't exist in the `OrderedMap`
<del> * because `undefined` is not ever a valid `OrderedMap` key.
<del> *
<del> * @private
<del> * @return {string?} Name of the item at position `pos`, or `undefined` if
<del> * there is no item at that position.
<del> */
<del> _getOrComputePositions: function() {
<del> // TODO: Entertain computing this at construction time in some less
<del> // performance critical paths.
<del> var computedPositions = this._computedPositions;
<del> if (!computedPositions) {
<del> this._computePositions();
<del> }
<del> return this._computedPositions;
<del> },
<del>
<del> /**
<del> * Precomputes the index/key mapping for future lookup. Since `OrderedMap`s
<del> * are immutable, there is only ever a need to perform this once.
<del> * @private
<del> */
<del> _computePositions: function() {
<del> this._computedPositions = {
<del> keyByIndex: {},
<del> indexByKey: {},
<del> };
<del> var keyByIndex = this._computedPositions.keyByIndex;
<del> var indexByKey = this._computedPositions.indexByKey;
<del> var index = 0;
<del> var thisSet = this._normalizedObj;
<del> for (var key in thisSet) {
<del> if (thisSet.hasOwnProperty(key)) {
<del> keyByIndex[index] = key;
<del> indexByKey[key] = index;
<del> index++;
<del> }
<del> }
<del> },
<del>};
<del>
<del>Object.assign(OrderedMapImpl.prototype, OrderedMapMethods);
<del>
<del>var OrderedMap = {
<del> from: function(orderedMap) {
<del> invariant(
<del> orderedMap instanceof OrderedMapImpl,
<del> 'OrderedMap.from(...): Expected an OrderedMap instance.'
<del> );
<del> return _fromNormalizedObjects(orderedMap._normalizedObj, null);
<del> },
<del>
<del> fromArray: function(arr, keyExtractor) {
<del> invariant(
<del> Array.isArray(arr),
<del> 'OrderedMap.fromArray(...): First argument must be an array.'
<del> );
<del> invariant(
<del> typeof keyExtractor === 'function',
<del> 'OrderedMap.fromArray(...): Second argument must be a function used ' +
<del> 'to determine the unique key for each entry.'
<del> );
<del> return new OrderedMapImpl(
<del> extractObjectFromArray(arr, keyExtractor),
<del> arr.length
<del> );
<del> },
<del>};
<del>
<del>module.exports = OrderedMap;
<ide><path>src/isomorphic/deprecated/ReactPropTransferer.js
<del>/**
<del> * Copyright 2013-present, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> *
<del> * @providesModule ReactPropTransferer
<del> */
<del>
<del>'use strict';
<del>
<del>var emptyFunction = require('emptyFunction');
<del>var joinClasses = require('joinClasses');
<del>
<del>/**
<del> * Creates a transfer strategy that will merge prop values using the supplied
<del> * `mergeStrategy`. If a prop was previously unset, this just sets it.
<del> *
<del> * @param {function} mergeStrategy
<del> * @return {function}
<del> */
<del>function createTransferStrategy(mergeStrategy) {
<del> return function(props, key, value) {
<del> if (!props.hasOwnProperty(key)) {
<del> props[key] = value;
<del> } else {
<del> props[key] = mergeStrategy(props[key], value);
<del> }
<del> };
<del>}
<del>
<del>var transferStrategyMerge = createTransferStrategy(function(a, b) {
<del> // `merge` overrides the first object's (`props[key]` above) keys using the
<del> // second object's (`value`) keys. An object's style's existing `propA` would
<del> // get overridden. Flip the order here.
<del> return Object.assign({}, b, a);
<del>});
<del>
<del>/**
<del> * Transfer strategies dictate how props are transferred by `transferPropsTo`.
<del> * NOTE: if you add any more exceptions to this list you should be sure to
<del> * update `cloneWithProps()` accordingly.
<del> */
<del>var TransferStrategies = {
<del> /**
<del> * Never transfer `children`.
<del> */
<del> children: emptyFunction,
<del> /**
<del> * Transfer the `className` prop by merging them.
<del> */
<del> className: createTransferStrategy(joinClasses),
<del> /**
<del> * Transfer the `style` prop (which is an object) by merging them.
<del> */
<del> style: transferStrategyMerge,
<del>};
<del>
<del>/**
<del> * Mutates the first argument by transferring the properties from the second
<del> * argument.
<del> *
<del> * @param {object} props
<del> * @param {object} newProps
<del> * @return {object}
<del> */
<del>function transferInto(props, newProps) {
<del> for (var thisKey in newProps) {
<del> if (!newProps.hasOwnProperty(thisKey)) {
<del> continue;
<del> }
<del>
<del> var transferStrategy = TransferStrategies[thisKey];
<del>
<del> if (transferStrategy && TransferStrategies.hasOwnProperty(thisKey)) {
<del> transferStrategy(props, thisKey, newProps[thisKey]);
<del> } else if (!props.hasOwnProperty(thisKey)) {
<del> props[thisKey] = newProps[thisKey];
<del> }
<del> }
<del> return props;
<del>}
<del>
<del>/**
<del> * ReactPropTransferer are capable of transferring props to another component
<del> * using a `transferPropsTo` method.
<del> *
<del> * @class ReactPropTransferer
<del> */
<del>var ReactPropTransferer = {
<del>
<del> /**
<del> * Merge two props objects using TransferStrategies.
<del> *
<del> * @param {object} oldProps original props (they take precedence)
<del> * @param {object} newProps new props to merge in
<del> * @return {object} a new object containing both sets of props merged.
<del> */
<del> mergeProps: function(oldProps, newProps) {
<del> return transferInto(Object.assign({}, oldProps), newProps);
<del> },
<del>
<del>};
<del>
<del>module.exports = ReactPropTransferer;
<ide><path>src/shared/utils/__tests__/OrderedMap-test.js
<del>/**
<del> * Copyright 2013-present, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> *
<del> * @emails react-core
<del> */
<del>
<del>'use strict';
<del>
<del>var OrderedMap;
<del>
<del>/**
<del> * Shared, reusable objects.
<del> */
<del>var hasEmptyStringKey = {
<del> 'thisKeyIsFine': {data: []},
<del> '': {thisShouldCauseAFailure: []},
<del> 'thisKeyIsAlsoFine': {data: []},
<del>};
<del>
<del>/**
<del> * Used as map/forEach callback.
<del> */
<del>var duplicate = function(itm, key, count) {
<del> return {
<del> uniqueID: itm.uniqueID,
<del> val: itm.val + key + count + this.justToTestScope,
<del> };
<del>};
<del>
<del>// Should not be allowed - because then null/'null' become impossible to
<del>// distinguish. Every key MUST be a string period!
<del>var hasNullAndUndefStringKey = [
<del> {uniqueID: 'undefined', val: 'thisIsUndefined'},
<del> {uniqueID: 'null', val: 'thisIsNull'},
<del>];
<del>var hasNullKey = [
<del> {uniqueID: 'thisKeyIsFine', data: []},
<del> {uniqueID: 'thisKeyIsAlsoFine', data: []},
<del> {uniqueID: null, data: []},
<del>];
<del>
<del>var hasObjectKey = [
<del> {uniqueID: 'thisKeyIsFine', data: []},
<del> {uniqueID: 'thisKeyIsAlsoFine', data: []},
<del> {uniqueID: {}, data: []},
<del>];
<del>
<del>var hasArrayKey = [
<del> {uniqueID: 'thisKeyIsFine', data: []},
<del> {uniqueID: 'thisKeyIsAlsoFine', data: []},
<del> {uniqueID: [], data: []},
<del>];
<del>
<del>// This should be allowed
<del>var hasNullStringKey = [
<del> {uniqueID: 'thisKeyIsFine', data: []},
<del> {uniqueID: 'thisKeyIsAlsoFine', data: []},
<del> {uniqueID: 'null', data: []},
<del>];
<del>
<del>var hasUndefinedKey = [
<del> {uniqueID: 'thisKeyIsFine', data: []},
<del> {uniqueID: 'thisKeyIsAlsoFine', data: []},
<del> {uniqueID: undefined, data: []},
<del>];
<del>
<del>var hasUndefinedStringKey = [
<del> {uniqueID: 'thisKeyIsFine', data: []},
<del> {uniqueID: 'thisKeyIsAlsoFine', data: []},
<del> {uniqueID: 'undefined', data: []},
<del>];
<del>
<del>var hasPositiveNumericKey = [
<del> {uniqueID: 'notANumber', data: []},
<del> {uniqueID: '5', data: []},
<del> {uniqueID: 'notAnotherNumber', data: []},
<del>];
<del>
<del>var hasZeroStringKey = [
<del> {uniqueID: 'greg', data: 'grego'},
<del> {uniqueID: '0', data: '0o'},
<del> {uniqueID: 'tom', data: 'tomo'},
<del>];
<del>
<del>var hasZeroNumberKey = [
<del> {uniqueID: 'greg', data: 'grego'},
<del> {uniqueID: 0, data: '0o'},
<del> {uniqueID: 'tom', data: 'tomo'},
<del>];
<del>
<del>var hasAllNumericStringKeys = [
<del> {uniqueID: '0', name: 'Gregory'},
<del> {uniqueID: '2', name: 'James'},
<del> {uniqueID: '1', name: 'Tom'},
<del>];
<del>
<del>var hasAllNumericKeys = [
<del> {uniqueID: 0, name: 'Gregory'},
<del> {uniqueID: 2, name: 'James'},
<del> {uniqueID: 1, name: 'Tom'},
<del>];
<del>
<del>var hasAllValidKeys = [
<del> {uniqueID: 'keyOne', value: 'valueOne'},
<del> {uniqueID: 'keyTwo', value: 'valueTwo'},
<del>];
<del>
<del>var hasDuplicateKeys = [
<del> {uniqueID: 'keyOne', value: 'valueOne'},
<del> {uniqueID: 'keyTwo', value: 'valueTwo'},
<del> {uniqueID: 'keyOne', value: 'valueThree'},
<del>];
<del>
<del>var idEntities = [
<del> {uniqueID: 'greg', name: 'Gregory'},
<del> {uniqueID: 'james', name: 'James'},
<del> {uniqueID: 'tom', name: 'Tom'},
<del>];
<del>
<del>var hasEmptyKey = [
<del> {uniqueID: 'greg', name: 'Gregory'},
<del> {uniqueID: '', name: 'James'},
<del> {uniqueID: 'tom', name: 'Tom'},
<del>];
<del>
<del>var extractUniqueID = function(entity) {
<del> return entity.uniqueID;
<del>};
<del>
<del>describe('OrderedMap', function() {
<del> beforeEach(function() {
<del> jest.resetModuleRegistry();
<del> OrderedMap = require('OrderedMap');
<del> });
<del>
<del> it('should create according to simple object with keys', function() {
<del> OrderedMap.fromArray(hasAllValidKeys, extractUniqueID);
<del> // Iterate over and ensure key order.
<del> });
<del>
<del> it('should create from array when providing an identity CB', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(idEntities, extractUniqueID);
<del> }).not.toThrow();
<del> });
<del>
<del> it('should throw if constructing from Array without identity CB', function() {
<del> OrderedMap.fromArray(idEntities, extractUniqueID);
<del> // Iterate and ensure key order
<del> });
<del>
<del> it('should not throw when fromArray extracts a numeric key', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(hasPositiveNumericKey, extractUniqueID);
<del> }).not.toThrow();
<del>
<del> });
<del>
<del> it('should throw when any key is the empty string', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(hasEmptyKey, extractUniqueID);
<del> }).toThrow();
<del> });
<del>
<del> it('should not throw when a key is the string "undefined" or "null"',
<del> function() {
<del> var om = OrderedMap.fromArray(hasNullAndUndefStringKey, extractUniqueID);
<del> expect(om.length).toBe(2);
<del> expect(om.indexOfKey('undefined')).toBe(0);
<del> expect(om.indexOfKey('null')).toBe(1);
<del> expect(om.keyAfter('undefined')).toBe('null');
<del> expect(om.keyAfter('null')).toBe(undefined);
<del> expect(om.keyBefore('undefined')).toBe(undefined);
<del> expect(om.has('undefined')).toBe(true);
<del> expect(om.has('null')).toBe(true);
<del> expect(om.get('undefined').val).toBe('thisIsUndefined');
<del> expect(om.get('null').val).toBe('thisIsNull');
<del> });
<del>
<del>
<del> /**
<del> * Numeric keys are cast to strings.
<del> */
<del> it('should not throw when a key is the number zero', function() {
<del> var om = OrderedMap.fromArray(hasZeroNumberKey, extractUniqueID);
<del> expect(om.length).toBe(3);
<del> expect(om.indexOfKey('0')).toBe(1);
<del> expect(om.indexOfKey(0)).toBe(1);
<del> });
<del>
<del> it('should throw when any key is falsey', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(hasEmptyStringKey, extractUniqueID);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> OrderedMap.fromArray(hasNullKey, extractUniqueID);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> OrderedMap.fromArray(hasUndefinedKey, extractUniqueID);
<del> }).toThrow();
<del> });
<del>
<del> it('should not throw on string keys "undefined/null"', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(hasNullStringKey, extractUniqueID);
<del> }).not.toThrow();
<del>
<del> expect(function() {
<del> OrderedMap.fromArray(hasUndefinedStringKey, extractUniqueID);
<del> }).not.toThrow();
<del> });
<del>
<del> it('should throw on extracting keys that are not strings/nums', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(hasObjectKey, extractUniqueID);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> OrderedMap.fromArray(hasArrayKey, extractUniqueID);
<del> }).toThrow();
<del> });
<del>
<del> it('should throw if instantiating with duplicate key', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(hasDuplicateKeys, extractUniqueID);
<del> }).toThrow();
<del> });
<del>
<del> it('should not throw when a key is the string "0"', function() {
<del> var verifyOM = function(om) {
<del> expect(om.length).toBe(3);
<del> expect(om.indexOfKey('greg')).toBe(0);
<del> expect(om.indexOfKey('0')).toBe(1);
<del> expect(om.indexOfKey(0)).toBe(1); // Casts on writes and reads.
<del> expect(om.indexOfKey('tom')).toBe(2);
<del> expect(om.keyAfter('greg')).toBe('0');
<del> expect(om.keyAfter('0')).toBe('tom');
<del> expect(om.keyAfter(0)).toBe('tom');
<del> expect(om.keyAfter('tom')).toBe(undefined);
<del> expect(om.keyBefore('greg')).toBe(undefined);
<del> expect(om.keyBefore(0)).toBe('greg');
<del> expect(om.keyBefore('0')).toBe('greg');
<del> expect(om.keyBefore('tom')).toBe('0');
<del> expect(om.has('undefined')).toBe(false);
<del> expect(om.has(0)).toBe(true);
<del> expect(om.has('0')).toBe(true);
<del> };
<del> verifyOM(OrderedMap.fromArray(hasZeroStringKey, extractUniqueID));
<del> verifyOM(OrderedMap.fromArray(hasZeroNumberKey, extractUniqueID));
<del> });
<del>
<del> it('should throw when getting invalid public key', function() {
<del> var om = OrderedMap.fromArray(hasAllValidKeys, extractUniqueID);
<del> expect(function() {
<del> om.has(undefined);
<del> }).toThrow();
<del> expect(function() {
<del> om.get(undefined);
<del> }).toThrow();
<del> expect(function() {
<del> om.has(null);
<del> }).toThrow();
<del> expect(function() {
<del> om.get(null);
<del> }).toThrow();
<del> expect(function() {
<del> om.has('');
<del> }).toThrow();
<del> expect(function() {
<del> om.get('');
<del> }).toThrow();
<del> });
<del>
<del> it('should throw when any key is falsey', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(hasEmptyStringKey, extractUniqueID);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> OrderedMap.fromArray(hasNullKey, extractUniqueID);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> OrderedMap.fromArray(hasUndefinedKey, extractUniqueID);
<del> }).toThrow();
<del> });
<del>
<del>
<del> it('should throw when fromArray is passed crazy args', function() {
<del> // Test passing another OrderedMap (when it expects a plain object.)
<del> // This is probably not what you meant to do! We should error.
<del> var validOM = OrderedMap.fromArray(hasAllValidKeys, extractUniqueID);
<del> expect(function() {
<del> OrderedMap.fromArray({uniqueID: 'asdf'}, extractUniqueID);
<del> }).toThrow();
<del> expect(function() {
<del> OrderedMap.fromArray(validOM, extractUniqueID);
<del> }).toThrow();
<del> });
<del>
<del> it('should throw when fromArray is passed crazy things', function() {
<del> expect(function() {
<del> OrderedMap.fromArray(null, extractUniqueID);
<del> }).toThrow();
<del> expect(function() {
<del> OrderedMap.fromArray('stringgg', extractUniqueID);
<del> }).toThrow();
<del> expect(function() {
<del> OrderedMap.fromArray(undefined, extractUniqueID);
<del> }).toThrow();
<del> expect(function() {
<del> OrderedMap.fromArray(new Date(), extractUniqueID);
<del> }).toThrow();
<del> expect(function() {
<del> OrderedMap.fromArray({}, extractUniqueID);
<del> }).toThrow();
<del>
<del> // Test failure without extractor
<del> expect(function() {
<del> OrderedMap.fromArray(idEntities);
<del> }).toThrow();
<del> expect(function() {
<del> OrderedMap.fromArray(idEntities, extractUniqueID);
<del> }).not.toThrow();
<del> });
<del>
<del> // Testing methods that accept other `OrderedMap`s.
<del> it('should throw when from/merge is passed an non-OrderedMap.', function() {
<del> // Test passing an array to construction.
<del> expect(function() {
<del> OrderedMap.from(idEntities, extractUniqueID);
<del> }).toThrow();
<del>
<del> // Test passing an array to merge.
<del> expect(function() {
<del> OrderedMap.fromArray(idEntities, extractUniqueID)
<del> .merge(idEntities, extractUniqueID);
<del> }).toThrow();
<del>
<del>
<del> // Test passing a plain object to merge.
<del> expect(function() {
<del> OrderedMap.fromArray(
<del> idEntities,
<del> extractUniqueID
<del> ).merge({blah: 'willFail'});
<del> }).toThrow();
<del> });
<del>
<del> it('should throw when accessing key before/after of non-key', function() {
<del> var om = OrderedMap.fromArray(
<del> [
<del> {uniqueID: 'first'},
<del> {uniqueID: 'two'},
<del> ], extractUniqueID
<del> );
<del> expect(function() {
<del> om.keyBefore('dog');
<del> }).toThrow();
<del> expect(function() {
<del> om.keyAfter('cat');
<del> }).toThrow();
<del> expect(function() {
<del> om.keyAfter(null);
<del> }).toThrow();
<del> expect(function() {
<del> om.keyAfter(undefined);
<del> }).toThrow();
<del> });
<del>
<del> it('should throw passing invalid/not-present-keys to before/after',
<del> function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'one', val: 'first'},
<del> {uniqueID: 'two', val: 'second'},
<del> {uniqueID: 'three', val: 'third'},
<del> {uniqueID: 'four', val: 'fourth'},
<del> ], extractUniqueID);
<del>
<del> expect(function() {
<del> om.keyBefore('');
<del> }).toThrow();
<del> expect(function() {
<del> om.keyBefore(null);
<del> }).toThrow();
<del> expect(function() {
<del> om.keyBefore(undefined);
<del> }).toThrow();
<del> expect(function() {
<del> om.keyBefore('notInTheOrderedMap!');
<del> }).toThrow();
<del>
<del> expect(function() {
<del> om.keyAfter('');
<del> }).toThrow();
<del> expect(function() {
<del> om.keyAfter(null);
<del> }).toThrow();
<del> expect(function() {
<del> om.keyAfter(undefined);
<del> }).toThrow();
<del> expect(function() {
<del> om.keyAfter('notInTheOrderedMap!');
<del> }).toThrow();
<del>
<del> expect(function() {
<del> om.nthKeyAfter('', 1);
<del> }).toThrow();
<del> expect(function() {
<del> om.nthKeyAfter(null, 1);
<del> }).toThrow();
<del> expect(function() {
<del> om.nthKeyAfter(undefined, 1);
<del> }).toThrow();
<del> expect(function() {
<del> om.nthKeyAfter('notInTheOrderedMap!', 1);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> om.nthKeyBefore('', 1);
<del> }).toThrow();
<del> expect(function() {
<del> om.nthKeyBefore(null, 1);
<del> }).toThrow();
<del> expect(function() {
<del> om.nthKeyBefore(undefined, 1);
<del> }).toThrow();
<del> expect(function() {
<del> om.nthKeyBefore('notInTheOrderedMap!', 1);
<del> }).toThrow();
<del> });
<del>
<del> it('should correctly determine the nth key after before', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'one', val: 'first'},
<del> {uniqueID: 'two', val: 'second'},
<del> {uniqueID: 'three', val: 'third'},
<del> {uniqueID: 'four', val: 'fourth'},
<del> ], extractUniqueID);
<del> expect(om.keyBefore('one')).toBe(undefined); // first key
<del> expect(om.keyBefore('two')).toBe('one');
<del> expect(om.keyBefore('three')).toBe('two');
<del> expect(om.keyBefore('four')).toBe('three');
<del>
<del> expect(om.keyAfter('one')).toBe('two'); // first key
<del> expect(om.keyAfter('two')).toBe('three');
<del> expect(om.keyAfter('three')).toBe('four');
<del> expect(om.keyAfter('four')).toBe(undefined);
<del>
<del> expect(om.nthKeyBefore('one', 0)).toBe('one'); // first key
<del> expect(om.nthKeyBefore('one', 1)).toBe(undefined);
<del> expect(om.nthKeyBefore('one', 2)).toBe(undefined);
<del> expect(om.nthKeyBefore('two', 0)).toBe('two');
<del> expect(om.nthKeyBefore('two', 1)).toBe('one');
<del> expect(om.nthKeyBefore('four', 0)).toBe('four');
<del> expect(om.nthKeyBefore('four', 1)).toBe('three');
<del>
<del> expect(om.nthKeyAfter('one', 0)).toBe('one');
<del> expect(om.nthKeyAfter('one', 1)).toBe('two');
<del> expect(om.nthKeyAfter('one', 2)).toBe('three');
<del> expect(om.nthKeyAfter('two', 0)).toBe('two');
<del> expect(om.nthKeyAfter('two', 1)).toBe('three');
<del> expect(om.nthKeyAfter('four', 0)).toBe('four');
<del> expect(om.nthKeyAfter('four', 1)).toBe(undefined);
<del> });
<del>
<del> it('should compute key indices correctly', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'one', val: 'first'},
<del> {uniqueID: 'two', val: 'second'},
<del> ], extractUniqueID);
<del> expect(om.keyAtIndex(0)).toBe('one');
<del> expect(om.keyAtIndex(1)).toBe('two');
<del> expect(om.keyAtIndex(2)).toBe(undefined);
<del> expect(om.indexOfKey('one')).toBe(0);
<del> expect(om.indexOfKey('two')).toBe(1);
<del> expect(om.indexOfKey('nope')).toBe(undefined);
<del> expect(function() {
<del> om.indexOfKey(null);
<del> }).toThrow();
<del> expect(function() {
<del> om.indexOfKey(undefined);
<del> }).toThrow();
<del> expect(function() {
<del> om.indexOfKey(''); // Empty key is not allowed
<del> }).toThrow();
<del> });
<del>
<del> it('should compute indices on array that extracted numeric ids', function() {
<del> var som = OrderedMap.fromArray(hasZeroStringKey, extractUniqueID);
<del> expect(som.keyAtIndex(0)).toBe('greg');
<del> expect(som.keyAtIndex(1)).toBe('0');
<del> expect(som.keyAtIndex(2)).toBe('tom');
<del> expect(som.indexOfKey('greg')).toBe(0);
<del> expect(som.indexOfKey('0')).toBe(1);
<del> expect(som.indexOfKey('tom')).toBe(2);
<del>
<del>
<del> var verifyNumericKeys = function(nom) {
<del> expect(nom.keyAtIndex(0)).toBe('0');
<del> expect(nom.keyAtIndex(1)).toBe('2');
<del> expect(nom.keyAtIndex(2)).toBe('1');
<del> expect(nom.indexOfKey('0')).toBe(0);
<del> expect(nom.indexOfKey('2')).toBe(1); // Prove these are not ordered by
<del> expect(nom.indexOfKey('1')).toBe(2); // their keys
<del> };
<del> var omStringNumberKeys =
<del> OrderedMap.fromArray(hasAllNumericStringKeys, extractUniqueID);
<del> verifyNumericKeys(omStringNumberKeys);
<del> var omNumericKeys =
<del> OrderedMap.fromArray(hasAllNumericKeys, extractUniqueID);
<del> verifyNumericKeys(omNumericKeys);
<del> });
<del>
<del> it('should compute indices on mutually exclusive merge', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'one', val: 'first'},
<del> {uniqueID: 'two', val: 'second'},
<del> ], extractUniqueID);
<del> var om2 = OrderedMap.fromArray([
<del> {uniqueID: 'three', val: 'third'},
<del> ], extractUniqueID);
<del> var res = om.merge(om2);
<del>
<del> expect(res.length).toBe(3);
<del>
<del> expect(res.keyAtIndex(0)).toBe('one');
<del> expect(res.keyAtIndex(1)).toBe('two');
<del> expect(res.keyAtIndex(2)).toBe('three');
<del> expect(res.keyAtIndex(3)).toBe(undefined);
<del>
<del> expect(res.indexOfKey('one')).toBe(0);
<del> expect(res.indexOfKey('two')).toBe(1);
<del> expect(res.indexOfKey('three')).toBe(2);
<del> expect(res.indexOfKey('dog')).toBe(undefined);
<del>
<del> expect(res.has('one')).toBe(true);
<del> expect(res.has('two')).toBe(true);
<del> expect(res.has('three')).toBe(true);
<del> expect(res.has('dog')).toBe(false);
<del>
<del> expect(res.get('one').val).toBe('first');
<del> expect(res.get('two').val).toBe('second');
<del> expect(res.get('three').val).toBe('third');
<del> expect(res.get('dog')).toBe(undefined);
<del> });
<del>
<del> it('should compute indices on intersected merge', function() {
<del> var oneTwo = OrderedMap.fromArray([
<del> {uniqueID: 'one', val: 'first'},
<del> {uniqueID: 'two', val: 'secondOM1'},
<del> ], extractUniqueID);
<del>
<del> var testOneTwoMergedWithTwoThree = function(res) {
<del> expect(res.length).toBe(3);
<del> expect(res.keyAtIndex(0)).toBe('one');
<del> expect(res.keyAtIndex(1)).toBe('two');
<del> expect(res.keyAtIndex(2)).toBe('three');
<del> expect(res.keyAtIndex(3)).toBe(undefined);
<del> expect(res.indexOfKey('one')).toBe(0);
<del> expect(res.indexOfKey('two')).toBe(1);
<del> expect(res.indexOfKey('three')).toBe(2);
<del> expect(res.indexOfKey('dog')).toBe(undefined);
<del> expect(res.has('one')).toBe(true);
<del> expect(res.has('two')).toBe(true);
<del> expect(res.has('three')).toBe(true);
<del> expect(res.has('dog')).toBe(false);
<del> expect(res.get('one').val).toBe('first');
<del> expect(res.get('two').val).toBe('secondOM2');
<del> expect(res.get('three').val).toBe('third');
<del> expect(res.get('dog')).toBe(undefined);
<del> };
<del>
<del> var result =
<del> oneTwo.merge(OrderedMap.fromArray([
<del> {uniqueID: 'two', val: 'secondOM2'},
<del> {uniqueID: 'three', val: 'third'},
<del> ], extractUniqueID));
<del> testOneTwoMergedWithTwoThree(result);
<del>
<del> // Everything should be exactly as before, since the ordering of `two` was
<del> // already determined by `om`.
<del> result = oneTwo.merge(
<del> OrderedMap.fromArray([
<del> {uniqueID: 'three', val: 'third'},
<del> {uniqueID: 'two', val:'secondOM2'},
<del> ], extractUniqueID)
<del> );
<del> testOneTwoMergedWithTwoThree(result);
<del>
<del>
<del> var testTwoThreeMergedWithOneTwo = function(res) {
<del> expect(res.length).toBe(3);
<del> expect(res.keyAtIndex(0)).toBe('two');
<del> expect(res.keyAtIndex(1)).toBe('three');
<del> expect(res.keyAtIndex(2)).toBe('one');
<del> expect(res.keyAtIndex(3)).toBe(undefined);
<del> expect(res.indexOfKey('two')).toBe(0);
<del> expect(res.indexOfKey('three')).toBe(1);
<del> expect(res.indexOfKey('one')).toBe(2);
<del> expect(res.indexOfKey('cat')).toBe(undefined);
<del> expect(res.has('two')).toBe(true);
<del> expect(res.has('three')).toBe(true);
<del> expect(res.has('one')).toBe(true);
<del> expect(res.has('dog')).toBe(false);
<del> expect(res.get('one').val).toBe('first');
<del> expect(res.get('two').val).toBe('secondOM1');
<del> expect(res.get('three').val).toBe('third');
<del> expect(res.get('dog')).toBe(undefined);
<del> };
<del> result = OrderedMap.fromArray([
<del> {uniqueID: 'two', val: 'secondOM2'},
<del> {uniqueID: 'three', val: 'third'},
<del> ], extractUniqueID).merge(oneTwo);
<del> testTwoThreeMergedWithOneTwo(result);
<del>
<del> });
<del>
<del> it('should merge mutually exclusive keys to the end.', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'one', val: 'first'},
<del> {uniqueID: 'two', val: 'second'},
<del> ], extractUniqueID);
<del> var om2 = OrderedMap.fromArray([
<del> {uniqueID: 'three', val: 'first'},
<del> {uniqueID: 'four', val: 'second'},
<del> ], extractUniqueID);
<del> var res = om.merge(om2);
<del> expect(res.length).toBe(4);
<del>
<del> });
<del>
<del> it('should map correctly', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'x', val: 'xx'},
<del> {uniqueID: 'y', val: 'yy'},
<del> {uniqueID: 'z', val: 'zz'},
<del> ], extractUniqueID);
<del> var scope = {justToTestScope: 'justTestingScope'};
<del> var verifyResult = function(omResult) {
<del> expect(omResult.length).toBe(3);
<del> expect(omResult.keyAtIndex(0)).toBe('x');
<del> expect(omResult.keyAtIndex(1)).toBe('y');
<del> expect(omResult.keyAtIndex(2)).toBe('z');
<del> expect(omResult.get('x').val).toBe('xxx0justTestingScope');
<del> expect(omResult.get('y').val).toBe('yyy1justTestingScope');
<del> expect(omResult.get('z').val).toBe('zzz2justTestingScope');
<del> };
<del> var resultOM = om.map(function(itm, key, count) {
<del> return {
<del> uniqueID: itm.uniqueID,
<del> val: itm.val + key + count + this.justToTestScope,
<del> };
<del> }, scope);
<del> verifyResult(resultOM);
<del>
<del> var resArray = [];
<del> om.forEach(function(itm, key, count) {
<del> resArray.push({
<del> uniqueID: itm.uniqueID,
<del> val: itm.val + key + count + this.justToTestScope,
<del> });
<del> }, scope);
<del> resultOM = OrderedMap.fromArray(resArray, extractUniqueID);
<del> verifyResult(resultOM);
<del> });
<del>
<del> it('should filter correctly', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'x', val: 'xx'},
<del> {uniqueID: 'y', val: 'yy'},
<del> {uniqueID: 'z', val: 'zz'},
<del> ], extractUniqueID);
<del> var scope = {justToTestScope: 'justTestingScope'};
<del>
<del> var filteringCallback = function(item, key, indexInOriginal) {
<del> expect(this).toBe(scope);
<del> expect(key === 'x' || key === 'y' || key === 'z').toBe(true);
<del> if (key === 'x') {
<del> expect(item.val).toBe('xx');
<del> expect(indexInOriginal).toBe(0);
<del> return false;
<del> } else if (key === 'y') {
<del> expect(item.val).toBe('yy');
<del> expect(indexInOriginal).toBe(1);
<del> return true;
<del> } else {
<del> expect(item.val).toBe('zz');
<del> expect(indexInOriginal).toBe(2);
<del> return true;
<del> }
<del> };
<del>
<del> var verifyResult = function(omResult) {
<del> expect(omResult.length).toBe(2);
<del> expect(omResult.keyAtIndex(0)).toBe('y');
<del> expect(omResult.keyAtIndex(1)).toBe('z');
<del> expect(omResult.has('x')).toBe(false);
<del> expect(omResult.has('z')).toBe(true);
<del> expect(omResult.get('z').val).toBe('zz');
<del> expect(omResult.has('y')).toBe(true);
<del> expect(omResult.get('y').val).toBe('yy');
<del> };
<del>
<del> var resultOM = om.filter(filteringCallback, scope);
<del> verifyResult(resultOM);
<del> });
<del>
<del> it('should throw when providing invalid ranges to ranging', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'x', val: 'xx'},
<del> {uniqueID: 'y', val: 'yy'},
<del> {uniqueID: 'z', val: 'zz'},
<del> ], extractUniqueID);
<del> var scope = {justToTestScope: 'justTestingScope'};
<del>
<del> expect(function() {
<del> om.mapRange(duplicate, 0, 3, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.filterRange(duplicate, 0, 3, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.forEachRange(duplicate, 0, 3, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.mapKeyRange(duplicate, 'x', 3, scope);
<del> }).toThrow(
<del> 'mapKeyRange must be given keys that are present.'
<del> );
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, 'x', 3, scope);
<del> }).toThrow(
<del> 'forEachKeyRange must be given keys that are present.'
<del> );
<del>
<del> expect(function() {
<del> om.mapRange(duplicate, 0, 4, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.filterRange(duplicate, 0, 4, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachRange(duplicate, 0, 4, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.mapKeyRange(duplicate, 'x', null, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, 'x', null, scope);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> om.mapRange(duplicate, -1, 1, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.filterRange(duplicate, -1, 1, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachRange(duplicate, -1, 1, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.mapKeyRange(duplicate, null, 'y', scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, null, 'y', scope);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> om.mapRange(duplicate, 0, 0, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.filterRange(duplicate, 0, 0, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.forEachRange(duplicate, 0, 0, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.mapKeyRange(duplicate, 'x', 'x', scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, 'x', 'x', scope);
<del> }).not.toThrow();
<del>
<del> expect(function() {
<del> om.mapRange(duplicate, 0, -1, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.filterRange(duplicate, 0, -1, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachRange(duplicate, 0, -1, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.mapKeyRange(duplicate, 'x', null, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, 'x', null, scope);
<del> }).toThrow();
<del>
<del> expect(function() {
<del> om.mapRange(duplicate, 2, 1, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.filterRange(duplicate, 2, 1, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.forEachRange(duplicate, 2, 1, scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.mapKeyRange(duplicate, 'z', 'z', scope);
<del> }).not.toThrow();
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, 'z', 'z', scope);
<del> }).not.toThrow();
<del>
<del> expect(function() {
<del> om.mapRange(duplicate, 2, 2, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.filterRange(duplicate, 2, 2, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachRange(duplicate, 2, 2, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.mapKeyRange(duplicate, 'z', null, scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, 'z', null, scope);
<del> }).toThrow();
<del>
<del> // Provide keys in reverse order - should throw.
<del> expect(function() {
<del> om.mapKeyRange(duplicate, 'y', 'x', scope);
<del> }).toThrow();
<del> expect(function() {
<del> om.forEachKeyRange(duplicate, 'y', 'x', scope);
<del> }).toThrow();
<del> });
<del>
<del> // TEST length zero map, or keyrange start===end
<del>
<del> it('should map range correctly', function() {
<del> var om = OrderedMap.fromArray([
<del> {uniqueID: 'x', val: 'xx'},
<del> {uniqueID: 'y', val: 'yy'},
<del> {uniqueID: 'z', val: 'zz'},
<del> ], extractUniqueID);
<del> var scope = {justToTestScope: 'justTestingScope'};
<del> var verifyThreeItems = function(omResult) {
<del> expect(omResult.length).toBe(3);
<del> expect(omResult.keyAtIndex(0)).toBe('x');
<del> expect(omResult.keyAtIndex(1)).toBe('y');
<del> expect(omResult.keyAtIndex(2)).toBe('z');
<del> expect(omResult.get('x').val).toBe('xxx0justTestingScope');
<del> expect(omResult.get('y').val).toBe('yyy1justTestingScope');
<del> expect(omResult.get('z').val).toBe('zzz2justTestingScope');
<del> };
<del> var verifyFirstTwoItems = function(omResult) {
<del> expect(omResult.length).toBe(2);
<del> expect(omResult.keyAtIndex(0)).toBe('x');
<del> expect(omResult.keyAtIndex(1)).toBe('y');
<del> expect(omResult.get('x').val).toBe('xxx0justTestingScope');
<del> expect(omResult.get('y').val).toBe('yyy1justTestingScope');
<del> };
<del>
<del> var verifyLastTwoItems = function(omResult) {
<del> expect(omResult.length).toBe(2);
<del> expect(omResult.keyAtIndex(0)).toBe('y');
<del> expect(omResult.keyAtIndex(1)).toBe('z');
<del> expect(omResult.get('y').val).toBe('yyy1justTestingScope');
<del> expect(omResult.get('z').val).toBe('zzz2justTestingScope');
<del> };
<del>
<del> var verifyMiddleItem = function(omResult) {
<del> expect(omResult.length).toBe(1);
<del> expect(omResult.keyAtIndex(0)).toBe('y');
<del> expect(omResult.get('y').val).toBe('yyy1justTestingScope');
<del> };
<del>
<del> var verifyEmpty = function(omResult) {
<del> expect(omResult.length).toBe(0);
<del> };
<del>
<del> var omResultThree = om.mapRange(duplicate, 0, 3, scope);
<del> verifyThreeItems(omResultThree);
<del> var resArray = [];
<del> var pushToResArray = function(itm, key, count) {
<del> resArray.push({
<del> uniqueID: itm.uniqueID,
<del> val: itm.val + key + count + this.justToTestScope,
<del> });
<del> };
<del>
<del> om.forEachRange(pushToResArray, 0, 3, scope);
<del> omResultThree = OrderedMap.fromArray(resArray, extractUniqueID);
<del> verifyThreeItems(omResultThree);
<del>
<del> var omResultFirstTwo = om.mapRange(duplicate, 0, 2, scope);
<del> verifyFirstTwoItems(omResultFirstTwo);
<del> resArray = [];
<del> om.forEachRange(pushToResArray, 0, 2, scope);
<del> omResultFirstTwo = OrderedMap.fromArray(resArray, extractUniqueID);
<del> verifyFirstTwoItems(omResultFirstTwo);
<del>
<del> var omResultLastTwo = om.mapRange(duplicate, 1, 2, scope);
<del> verifyLastTwoItems(omResultLastTwo);
<del> resArray = [];
<del> om.forEachRange(pushToResArray, 1, 2, scope);
<del> omResultLastTwo = OrderedMap.fromArray(resArray, extractUniqueID);
<del> verifyLastTwoItems(omResultLastTwo);
<del>
<del> var omResultMiddle = om.mapRange(duplicate, 1, 1, scope);
<del> verifyMiddleItem(omResultMiddle);
<del> resArray = [];
<del> om.forEachRange(pushToResArray, 1, 1, scope);
<del> omResultMiddle = OrderedMap.fromArray(resArray, extractUniqueID);
<del> verifyMiddleItem(omResultMiddle);
<del>
<del> var omResultNone = om.mapRange(duplicate, 1, 0, scope);
<del> verifyEmpty(omResultNone);
<del> });
<del>
<del> it('should extract the original array correctly', function() {
<del> var sourceArray = [
<del> {uniqueID: 'x', val: 'xx'},
<del> {uniqueID: 'y', val: 'yy'},
<del> {uniqueID: 'z', val: 'zz'},
<del> ];
<del> var om = OrderedMap.fromArray(sourceArray, extractUniqueID);
<del> expect(om.toArray()).toEqual(sourceArray);
<del> });
<del>}); | 3 |
Python | Python | fix attributeerror when `sig` is not a signature. | dc1d895e00228fb02a08c3788605c4f53d4e4ade | <ide><path>celery/canvas.py
<ide> def link(self, sig):
<ide> return self.tasks[0].link(sig)
<ide>
<ide> def link_error(self, sig):
<del> sig = sig.clone().set(immutable=True)
<add> try:
<add> sig = sig.clone().set(immutable=True)
<add> except AttributeError:
<add> # See issue #5265. I don't use isinstance because current tests
<add> # pass a Mock object as argument.
<add> sig['immutable'] = True
<add> sig = Signature.from_dict(sig)
<ide> return self.tasks[0].link_error(sig)
<ide>
<ide> def _prepared(self, tasks, partial_args, group_id, root_id, app, | 1 |
Text | Text | fix indentation from 1b4b26f [ci skip] | c0d1823d546541295ed914a03c7c889951b02828 | <ide><path>guides/source/form_helpers.md
<ide> output:
<ide>
<ide> ```html
<ide> <form accept-charset="UTF-8" action="/search" method="post">
<del> <input name="_method" type="hidden" value="patch" />
<del> <input name="utf8" type="hidden" value="✓" />
<del> <input name="authenticity_token" type="hidden" value="f755bb0ed134b76c432144748a6d4b7a7ddf2b71" />
<add> <input name="_method" type="hidden" value="patch" />
<add> <input name="utf8" type="hidden" value="✓" />
<add> <input name="authenticity_token" type="hidden" value="f755bb0ed134b76c432144748a6d4b7a7ddf2b71" />
<ide> ...
<ide> </form>
<ide> ``` | 1 |
Javascript | Javascript | fix problems with compiling twice | c23e8ce5c7c381e330ad70dbb999024e0d89bee0 | <ide><path>test/ConfigTestCases.template.js
<ide> const describeCases = config => {
<ide> .catch(done);
<ide> };
<ide> if (config.cache) {
<del> const compiler = require("..")(options);
<del> compiler.run(err => {
<del> if (err) return handleFatalError(err, done);
<del> compiler.run((error, stats) => {
<del> compiler.close(err => {
<del> if (err) return handleFatalError(err, done);
<del> onCompiled(error, stats);
<add> try {
<add> const compiler = require("..")(options);
<add> compiler.run(err => {
<add> if (err) return handleFatalError(err, done);
<add> compiler.run((error, stats) => {
<add> compiler.close(err => {
<add> if (err) return handleFatalError(err, done);
<add> onCompiled(error, stats);
<add> });
<ide> });
<ide> });
<del> });
<add> } catch (e) {
<add> handleFatalError(e, done);
<add> }
<ide> } else {
<ide> require("..")(options, onCompiled);
<ide> }
<ide><path>test/configCases/asset-modules/http-url/server/index.js
<ide> const fs = require("fs");
<ide> * @returns {Promise<import("http").Server>} server instance
<ide> */
<ide> function createServer(port) {
<del> const file = fs.readFileSync("./test/configCases/asset-modules/http-url/server/index.css").toString().trim();
<add> const file = fs
<add> .readFileSync("./test/configCases/asset-modules/http-url/server/index.css")
<add> .toString()
<add> .trim();
<ide>
<ide> const server = http.createServer((req, res) => {
<ide> if (req.url !== "/index.css") {
<ide> function createServer(port) {
<ide> });
<ide>
<ide> return new Promise((resolve, reject) => {
<del> server.listen(port, (err) => {
<add> server.listen(port, err => {
<ide> if (err) {
<ide> reject(err);
<ide> } else {
<ide> class ServerPlugin {
<ide> * @param {import("../../../../../").Compiler} compiler
<ide> */
<ide> apply(compiler) {
<del> const serverPromise = createServer(this.port);
<add> let server;
<ide>
<del> serverPromise
<del> .then(server => server.unref());
<add> compiler.hooks.beforeRun.tapPromise(
<add> "ServerPlugin",
<add> async (compiler, callback) => {
<add> if (!server) {
<add> server = await createServer(this.port);
<add> server.unref();
<add> }
<add> }
<add> );
<ide>
<ide> compiler.hooks.done.tapAsync("ServerPlugin", (stats, callback) => {
<del> serverPromise
<del> .then(server => server.close(callback))
<del> .catch(callback)
<del> });
<del>
<del> compiler.hooks.beforeRun.tapAsync("ServerPlugin", (compiler, callback) => {
<del> serverPromise
<del> .then(() => callback())
<del> .catch(callback)
<add> if (server) {
<add> server.close(callback);
<add> server = undefined;
<add> } else {
<add> callback();
<add> }
<ide> });
<ide> }
<ide> }
<ide><path>test/configCases/clean/enabled/webpack.config.js
<ide> module.exports = {
<ide> },
<ide> plugins: [
<ide> compiler => {
<add> let once = true;
<ide> compiler.hooks.thisCompilation.tap("Test", compilation => {
<ide> compilation.hooks.processAssets.tap("Test", assets => {
<del> const outputPath = compilation.getPath(compiler.outputPath, {});
<del> const customDir = path.join(outputPath, "this/dir/should/be/removed");
<del> fs.mkdirSync(customDir, { recursive: true });
<del> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<add> if (once) {
<add> const outputPath = compilation.getPath(compiler.outputPath, {});
<add> const customDir = path.join(
<add> outputPath,
<add> "this/dir/should/be/removed"
<add> );
<add> fs.mkdirSync(customDir, { recursive: true });
<add> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<add> once = false;
<add> }
<ide> assets["this/dir/should/not/be/removed/file.ext"] = new RawSource("");
<ide> });
<ide> });
<ide><path>test/configCases/clean/ignore-fn/webpack.config.js
<ide> module.exports = {
<ide> },
<ide> plugins: [
<ide> compiler => {
<add> let once = true;
<ide> compiler.hooks.thisCompilation.tap("Test", compilation => {
<ide> compilation.hooks.processAssets.tap("Test", assets => {
<del> const outputPath = compilation.getPath(compiler.outputPath, {});
<del> const customDir = path.join(outputPath, "this/dir/should/be/removed");
<del> const ignoredDir = path.join(
<del> outputPath,
<del> "this/is/ignored/dir/that/should/not/be/removed"
<del> );
<del> fs.mkdirSync(customDir, { recursive: true });
<del> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<del> fs.mkdirSync(ignoredDir, { recursive: true });
<del> fs.writeFileSync(path.join(ignoredDir, "file.ext"), "");
<add> if (once) {
<add> const outputPath = compilation.getPath(compiler.outputPath, {});
<add> const customDir = path.join(
<add> outputPath,
<add> "this/dir/should/be/removed"
<add> );
<add> const ignoredDir = path.join(
<add> outputPath,
<add> "this/is/ignored/dir/that/should/not/be/removed"
<add> );
<add> fs.mkdirSync(customDir, { recursive: true });
<add> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<add> fs.mkdirSync(ignoredDir, { recursive: true });
<add> fs.writeFileSync(path.join(ignoredDir, "file.ext"), "");
<add> once = false;
<add> }
<ide> assets["this/dir/should/not/be/removed/file.ext"] = new RawSource("");
<ide> });
<ide> });
<ide><path>test/configCases/clean/ignore-hook/webpack.config.js
<ide> module.exports = {
<ide> },
<ide> plugins: [
<ide> compiler => {
<add> let once = true;
<ide> compiler.hooks.thisCompilation.tap("Test", compilation => {
<ide> webpack.CleanPlugin.getCompilationHooks(compilation).keep.tap(
<ide> "Test",
<ide> module.exports = {
<ide> }
<ide> );
<ide> compilation.hooks.processAssets.tap("Test", assets => {
<del> const outputPath = compilation.getPath(compiler.outputPath, {});
<del> const customDir = path.join(outputPath, "this/dir/should/be/removed");
<del> const ignoredDir = path.join(
<del> outputPath,
<del> "this/is/ignored/dir/that/should/not/be/removed"
<del> );
<del> const ignoredTooDir = path.join(
<del> outputPath,
<del> "this/is/ignored/too/dir/that/should/not/be/removed"
<del> );
<del> fs.mkdirSync(customDir, { recursive: true });
<del> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<del> fs.mkdirSync(ignoredDir, { recursive: true });
<del> fs.writeFileSync(path.join(ignoredDir, "file.ext"), "");
<del> fs.mkdirSync(ignoredTooDir, { recursive: true });
<del> fs.writeFileSync(path.join(ignoredTooDir, "file.ext"), "");
<add> if (once) {
<add> const outputPath = compilation.getPath(compiler.outputPath, {});
<add> const customDir = path.join(
<add> outputPath,
<add> "this/dir/should/be/removed"
<add> );
<add> const ignoredDir = path.join(
<add> outputPath,
<add> "this/is/ignored/dir/that/should/not/be/removed"
<add> );
<add> const ignoredTooDir = path.join(
<add> outputPath,
<add> "this/is/ignored/too/dir/that/should/not/be/removed"
<add> );
<add> fs.mkdirSync(customDir, { recursive: true });
<add> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<add> fs.mkdirSync(ignoredDir, { recursive: true });
<add> fs.writeFileSync(path.join(ignoredDir, "file.ext"), "");
<add> fs.mkdirSync(ignoredTooDir, { recursive: true });
<add> fs.writeFileSync(path.join(ignoredTooDir, "file.ext"), "");
<add> once = false;
<add> }
<ide> assets["this/dir/should/not/be/removed/file.ext"] = new RawSource("");
<ide> });
<ide> });
<ide><path>test/configCases/clean/ignore-rx/webpack.config.js
<ide> module.exports = {
<ide> },
<ide> plugins: [
<ide> compiler => {
<add> let once = true;
<ide> compiler.hooks.thisCompilation.tap("Test", compilation => {
<ide> compilation.hooks.processAssets.tap("Test", assets => {
<del> const outputPath = compilation.getPath(compiler.outputPath, {});
<del> const customDir = path.join(outputPath, "this/dir/should/be/removed");
<del> const ignoredDir = path.join(
<del> outputPath,
<del> "this/is/ignored/dir/that/should/not/be/removed"
<del> );
<del> fs.mkdirSync(customDir, { recursive: true });
<del> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<del> fs.mkdirSync(ignoredDir, { recursive: true });
<del> fs.writeFileSync(path.join(ignoredDir, "file.ext"), "");
<add> if (once) {
<add> const outputPath = compilation.getPath(compiler.outputPath, {});
<add> const customDir = path.join(
<add> outputPath,
<add> "this/dir/should/be/removed"
<add> );
<add> const ignoredDir = path.join(
<add> outputPath,
<add> "this/is/ignored/dir/that/should/not/be/removed"
<add> );
<add> fs.mkdirSync(customDir, { recursive: true });
<add> fs.writeFileSync(path.join(customDir, "file.ext"), "");
<add> fs.mkdirSync(ignoredDir, { recursive: true });
<add> fs.writeFileSync(path.join(ignoredDir, "file.ext"), "");
<add> once = false;
<add> }
<ide> assets["this/dir/should/not/be/removed/file.ext"] = new RawSource("");
<ide> });
<ide> }); | 6 |
Java | Java | fix border color | 805d06087b8a4bbaa2144f67cfeedde0d306040f | <ide><path>ReactAndroid/src/main/java/com/facebook/react/views/view/ReactViewBackgroundDrawable.java
<ide> public void setBorderColor(int position, float rgb, float alpha) {
<ide> private void setBorderRGB(int position, float rgb) {
<ide> // set RGB component
<ide> if (mBorderRGB == null) {
<del> mBorderRGB = new Spacing();
<del> mBorderRGB.set(Spacing.LEFT, DEFAULT_BORDER_RGB);
<del> mBorderRGB.set(Spacing.TOP, DEFAULT_BORDER_RGB);
<del> mBorderRGB.set(Spacing.RIGHT, DEFAULT_BORDER_RGB);
<del> mBorderRGB.set(Spacing.BOTTOM, DEFAULT_BORDER_RGB);
<add> mBorderRGB = new Spacing(DEFAULT_BORDER_RGB);
<ide> }
<ide> if (!FloatUtil.floatsEqual(mBorderRGB.getRaw(position), rgb)) {
<ide> mBorderRGB.set(position, rgb);
<ide> private void setBorderRGB(int position, float rgb) {
<ide> private void setBorderAlpha(int position, float alpha) {
<ide> // set Alpha component
<ide> if (mBorderAlpha == null) {
<del> mBorderAlpha = new Spacing();
<del> mBorderAlpha.set(Spacing.LEFT, DEFAULT_BORDER_ALPHA);
<del> mBorderAlpha.set(Spacing.TOP, DEFAULT_BORDER_ALPHA);
<del> mBorderAlpha.set(Spacing.RIGHT, DEFAULT_BORDER_ALPHA);
<del> mBorderAlpha.set(Spacing.BOTTOM, DEFAULT_BORDER_ALPHA);
<add> mBorderAlpha = new Spacing(DEFAULT_BORDER_ALPHA);
<ide> }
<ide> if (!FloatUtil.floatsEqual(mBorderAlpha.getRaw(position), alpha)) {
<ide> mBorderAlpha.set(position, alpha); | 1 |
PHP | PHP | add env override for running in console | a36906ab8a141f1f497a0667196935e41970ae51 | <ide><path>src/Illuminate/Foundation/Application.php
<ide> public function detectEnvironment(Closure $callback)
<ide> */
<ide> public function runningInConsole()
<ide> {
<del> return php_sapi_name() === 'cli' || php_sapi_name() === 'phpdbg';
<add> return env(
<add> 'LARAVEL_RUNNING_IN_CONSOLE',
<add> php_sapi_name() === 'cli' || php_sapi_name() === 'phpdbg'
<add> );
<ide> }
<ide>
<ide> /** | 1 |
Python | Python | add sent_start to pattern schema | e7341db5dc16102625d9f0f90545596145968920 | <ide><path>spacy/schemas.py
<ide> class TokenPattern(BaseModel):
<ide> is_currency: Optional[StrictBool] = None
<ide> is_stop: Optional[StrictBool] = None
<ide> is_sent_start: Optional[StrictBool] = None
<add> sent_start: Optional[StrictBool] = None
<ide> like_num: Optional[StrictBool] = None
<ide> like_url: Optional[StrictBool] = None
<ide> like_email: Optional[StrictBool] = None | 1 |
Javascript | Javascript | throw typeerror on non-string args to path.resolve | 089ec586135726e82dc0d25c2e328478d577db24 | <ide><path>lib/path.js
<ide> if (isWindows) {
<ide> }
<ide>
<ide> // Skip empty and invalid entries
<del> if (typeof path !== 'string' || !path) {
<add> if (typeof path !== 'string') {
<add> throw new TypeError('Arguments to path.resolve must be strings');
<add> } else if(!path) {
<ide> continue;
<ide> }
<ide>
<ide> if (isWindows) {
<ide> exports.join = function() {
<ide> function f(p) {
<ide> if (typeof p !== 'string') {
<del> throw new TypeError('Arguments to join must be strings');
<add> throw new TypeError('Arguments to path.join must be strings');
<ide> }
<del> return p && typeof p === 'string';
<add> return p;
<ide> }
<ide>
<ide> var paths = Array.prototype.filter.call(arguments, f);
<ide> if (isWindows) {
<ide> var path = (i >= 0) ? arguments[i] : process.cwd();
<ide>
<ide> // Skip empty and invalid entries
<del> if (typeof path !== 'string' || !path) {
<add> if (typeof path !== 'string') {
<add> throw new TypeError('Arguments to path.resolve must be strings');
<add> } else if (!path) {
<ide> continue;
<ide> }
<ide>
<ide> if (isWindows) {
<ide> var paths = Array.prototype.slice.call(arguments, 0);
<ide> return exports.normalize(paths.filter(function(p, index) {
<ide> if (typeof p !== 'string') {
<del> throw new TypeError('Arguments to join must be strings');
<add> throw new TypeError('Arguments to path.join must be strings');
<ide> }
<del> return p && typeof p === 'string';
<add> return p;
<ide> }).join('/'));
<ide> };
<ide>
<ide> exports.existsSync = util.deprecate(function(path) {
<ide>
<ide> if (isWindows) {
<ide> exports._makeLong = function(path) {
<add> // Note: this will *probably* throw somewhere.
<ide> if (typeof path !== 'string')
<ide> return path;
<ide>
<ide><path>test/simple/test-path.js
<ide> joinThrowTests.forEach(function(test) {
<ide> assert.throws(function() {
<ide> path.join(test);
<ide> }, TypeError);
<add> assert.throws(function() {
<add> path.resolve(test);
<add> }, TypeError);
<ide> });
<ide>
<ide> | 2 |
Javascript | Javascript | add benchmark for buf.compare() | ffdc046e5c2e80cdde31db72554dcf5196ed6d4b | <ide><path>benchmark/buffers/buffer-compare-instance-method.js
<add>'use strict';
<add>const common = require('../common.js');
<add>const v8 = require('v8');
<add>
<add>const bench = common.createBenchmark(main, {
<add> size: [16, 512, 1024, 4096, 16386],
<add> millions: [1]
<add>});
<add>
<add>function main(conf) {
<add> const iter = (conf.millions >>> 0) * 1e6;
<add> const size = (conf.size >>> 0);
<add> const b0 = new Buffer(size).fill('a');
<add> const b1 = new Buffer(size).fill('a');
<add>
<add> b1[size - 1] = 'b'.charCodeAt(0);
<add>
<add> // Force optimization before starting the benchmark
<add> b0.compare(b1);
<add> v8.setFlagsFromString('--allow_natives_syntax');
<add> eval('%OptimizeFunctionOnNextCall(b0.compare)');
<add> b0.compare(b1);
<add>
<add> bench.start();
<add> for (var i = 0; i < iter; i++) {
<add> b0.compare(b1);
<add> }
<add> bench.end(iter / 1e6);
<add>} | 1 |
Python | Python | fix padding_idx of roberta model | a6a6d9e6382961dc92a1a08d1bab05a52dc815f9 | <ide><path>transformers/modeling_roberta.py
<ide> class RobertaEmbeddings(BertEmbeddings):
<ide> def __init__(self, config):
<ide> super(RobertaEmbeddings, self).__init__(config)
<ide> self.padding_idx = 1
<add> self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=self.padding_idx)
<add> self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size,
<add> padding_idx=self.padding_idx)
<ide>
<ide> def forward(self, input_ids, token_type_ids=None, position_ids=None):
<ide> seq_length = input_ids.size(1) | 1 |
Javascript | Javascript | allow callback or usercallback in the time scale | ebffa52dc2ad3c2fc5a1b3f48e4fb8596334a9ee | <ide><path>src/scales/scale.time.js
<ide> module.exports = function(Chart) {
<ide> // Function to format an individual tick mark
<ide> tickFormatFunction: function tickFormatFunction(tick, index, ticks) {
<ide> var formattedTick = tick.format(this.displayFormat);
<add> var tickOpts = this.options.ticks;
<add> var callback = helpers.getValueOrDefault(tickOpts.callback, tickOpts.userCallback);
<ide>
<del> if (this.options.ticks.userCallback) {
<del> return this.options.ticks.userCallback(formattedTick, index, ticks);
<add> if (callback) {
<add> return callback(formattedTick, index, ticks);
<ide> } else {
<ide> return formattedTick;
<ide> } | 1 |
Ruby | Ruby | show trailing / for dirs with brew list | 396ec2efae0cb275d5a52c05167d9e1da2875c4c | <ide><path>Library/Homebrew/brew.h.rb
<ide> def print_remaining_files files, root, other = ''
<ide> when 1
<ide> puts *files
<ide> else
<del> puts "#{root} (#{files.length} #{other}files)"
<add> puts "#{root}/ (#{files.length} #{other}files)"
<ide> end
<ide> end
<ide> end | 1 |
Python | Python | change more with statements to execute_context() | 15bb091e22f44755cf8a9006c51ce8e5171a796d | <ide><path>celery/tests/test_backends/test_amqp.py
<del>from __future__ import with_statement
<del>
<ide> import sys
<ide> import errno
<ide> import unittest
<ide><path>celery/tests/test_backends/test_redis.py
<del>from __future__ import with_statement
<del>
<ide> import sys
<ide> import errno
<ide> import socket
<ide>
<ide> from celery import states
<ide> from celery.utils import gen_unique_id
<add>from celery.tests.utils import execute_context
<ide> from celery.backends import pyredis
<ide> from celery.backends.pyredis import RedisBackend
<ide>
<ide> class TestTyrantBackendNoTyrant(unittest.TestCase):
<ide> def test_tyrant_None_if_tyrant_not_installed(self):
<ide> from celery.tests.utils import mask_modules
<ide> prev = sys.modules.pop("celery.backends.pyredis")
<del> with mask_modules("redis"):
<del> from celery.backends.pyredis import redis
<del> self.assertTrue(redis is None)
<del> sys.modules["celery.backends.pyredis"] = prev
<add> try:
<add> def with_redis_masked():
<add> from celery.backends.pyredis import redis
<add> self.assertTrue(redis is None)
<add> context = mask_modules("redis")
<add> execute_context(context, with_redis_masked)
<add> finally:
<add> sys.modules["celery.backends.pyredis"] = prev
<ide>
<ide> def test_constructor_raises_if_tyrant_not_installed(self):
<ide> from celery.backends import pyredis
<ide><path>celery/tests/test_log.py
<ide> def test_setup_logger(self):
<ide> logger = setup_logger(loglevel=logging.ERROR, logfile=None)
<ide> logger.handlers = [] # Reset previously set logger.
<ide> logger = setup_logger(loglevel=logging.ERROR, logfile=None)
<del> self.assertTrue(logger.handlers[0].stream is sys.stderr,
<add> self.assertTrue(logger.handlers[0].stream is sys.__stderr__,
<ide> "setup_logger logs to stderr without logfile argument.")
<ide> #self.assertTrue(logger._process_aware,
<ide> # "setup_logger() returns process aware logger.")
<del> self.assertDidLogTrue(logger, "Logging something",
<del> "Logger logs error when loglevel is ERROR",
<del> loglevel=logging.ERROR)
<add> #self.assertDidLogTrue(logger, "Logging something",
<add> # "Logger logs error when loglevel is ERROR",
<add> # loglevel=logging.ERROR)
<ide> self.assertDidLogFalse(logger, "Logging something",
<ide> "Logger doesn't info when loglevel is ERROR",
<ide> loglevel=logging.INFO)
<ide> def test_setup_logger_no_handlers_file(self):
<ide> self.assertTrue(isinstance(l.handlers[0], logging.FileHandler))
<ide>
<ide> def test_emergency_error_stderr(self):
<del> outs = override_stdouts()
<del>
<ide> def with_override_stdouts(outs):
<ide> stdout, stderr = outs
<ide> emergency_error(None, "The lazy dog crawls under the fast fox")
<ide><path>celery/tests/test_serialization.py
<del>from __future__ import with_statement
<ide> import sys
<ide> import unittest
<ide>
<add>from celery.tests.utils import execute_context
<add>
<ide>
<ide> class TestAAPickle(unittest.TestCase):
<ide>
<ide> def test_no_cpickle(self):
<ide> from celery.tests.utils import mask_modules
<ide> prev = sys.modules.pop("billiard.serialization")
<del> mask_modules("cPickle")
<del> from billiard.serialization import pickle
<del> import pickle as orig_pickle
<del> self.assertTrue(pickle.dumps is orig_pickle.dumps)
<del> sys.modules["billiard.serialization"] = prev
<add> try:
<add> def with_cPickle_masked():
<add> from billiard.serialization import pickle
<add> import pickle as orig_pickle
<add> self.assertTrue(pickle.dumps is orig_pickle.dumps)
<add>
<add> context = mask_modules("cPickle")
<add> execute_context(context, with_cPickle_masked)
<add>
<add> finally:
<add> sys.modules["billiard.serialization"] = prev
<ide><path>celery/tests/test_task_http.py
<ide> # -*- coding: utf-8 -*-
<del>from __future__ import with_statement, generators
<add>from __future__ import generators
<ide>
<ide> import sys
<ide> import logging
<ide> from anyjson import serialize
<ide>
<ide> from celery.task import http
<del>from celery.tests.utils import eager_tasks
<add>from celery.tests.utils import eager_tasks, execute_context
<ide>
<ide>
<ide> @contextmanager
<ide> class TestHttpDispatch(unittest.TestCase):
<ide>
<ide> def test_dispatch_success(self):
<ide> logger = logging.getLogger("celery.unittest")
<del> with mock_urlopen(success_response(100)):
<add>
<add> def with_mock_urlopen():
<ide> d = http.HttpDispatch("http://example.com/mul", "GET", {
<ide> "x": 10, "y": 10}, logger)
<ide> self.assertEquals(d.dispatch(), 100)
<ide>
<add> context = mock_urlopen(success_response(100))
<add> execute_context(context, with_mock_urlopen)
<add>
<ide> def test_dispatch_failure(self):
<ide> logger = logging.getLogger("celery.unittest")
<del> with mock_urlopen(fail_response("Invalid moon alignment")):
<add>
<add> def with_mock_urlopen():
<ide> d = http.HttpDispatch("http://example.com/mul", "GET", {
<ide> "x": 10, "y": 10}, logger)
<ide> self.assertRaises(http.RemoteExecuteError, d.dispatch)
<ide>
<add> context = mock_urlopen(fail_response("Invalid moon alignment"))
<add> execute_context(context, with_mock_urlopen)
<add>
<ide> def test_dispatch_empty_response(self):
<ide> logger = logging.getLogger("celery.unittest")
<del> with mock_urlopen(_response("")):
<add>
<add> def with_mock_urlopen():
<ide> d = http.HttpDispatch("http://example.com/mul", "GET", {
<ide> "x": 10, "y": 10}, logger)
<ide> self.assertRaises(http.InvalidResponseError, d.dispatch)
<ide>
<add> context = mock_urlopen(_response(""))
<add> execute_context(context, with_mock_urlopen)
<add>
<ide> def test_dispatch_non_json(self):
<ide> logger = logging.getLogger("celery.unittest")
<del> with mock_urlopen(_response("{'#{:'''")):
<add>
<add> def with_mock_urlopen():
<ide> d = http.HttpDispatch("http://example.com/mul", "GET", {
<ide> "x": 10, "y": 10}, logger)
<ide> self.assertRaises(http.InvalidResponseError, d.dispatch)
<ide>
<add> context = mock_urlopen(_response("{'#{:'''"))
<add> execute_context(context, with_mock_urlopen)
<add>
<ide> def test_dispatch_unknown_status(self):
<ide> logger = logging.getLogger("celery.unittest")
<del> with mock_urlopen(unknown_response()):
<add>
<add> def with_mock_urlopen():
<ide> d = http.HttpDispatch("http://example.com/mul", "GET", {
<ide> "x": 10, "y": 10}, logger)
<ide> self.assertRaises(http.UnknownStatusError, d.dispatch)
<ide>
<add> context = mock_urlopen(unknown_response())
<add> execute_context(context, with_mock_urlopen)
<add>
<ide> def test_dispatch_POST(self):
<ide> logger = logging.getLogger("celery.unittest")
<del> with mock_urlopen(success_response(100)):
<add>
<add> def with_mock_urlopen():
<ide> d = http.HttpDispatch("http://example.com/mul", "POST", {
<ide> "x": 10, "y": 10}, logger)
<ide> self.assertEquals(d.dispatch(), 100)
<ide>
<add> context = mock_urlopen(success_response(100))
<add> execute_context(context, with_mock_urlopen)
<ide>
<ide> class TestURL(unittest.TestCase):
<ide>
<ide> def test_URL_get_async(self):
<del> with eager_tasks():
<del> with mock_urlopen(success_response(100)):
<add> def with_eager_tasks():
<add>
<add> def with_mock_urlopen():
<ide> d = http.URL("http://example.com/mul").get_async(x=10, y=10)
<ide> self.assertEquals(d.get(), 100)
<ide>
<add> context = mock_urlopen(success_response(100))
<add> execute_context(context, with_mock_urlopen)
<add>
<add> execute_context(eager_tasks(), with_eager_tasks)
<add>
<ide> def test_URL_post_async(self):
<del> with eager_tasks():
<del> with mock_urlopen(success_response(100)):
<add> def with_eager_tasks():
<add>
<add> def with_mock_urlopen():
<ide> d = http.URL("http://example.com/mul").post_async(x=10, y=10)
<ide> self.assertEquals(d.get(), 100)
<add>
<add> context = mock_urlopen(success_response(100))
<add> execute_context(context, with_mock_urlopen)
<add>
<add> execute_context(eager_tasks(), with_eager_tasks)
<ide><path>celery/tests/test_utils.py
<del>from __future__ import with_statement
<del>
<ide> import sys
<ide> import socket
<ide> import unittest
<ide>
<ide> from billiard.utils.functional import wraps
<ide>
<ide> from celery import utils
<del>from celery.tests.utils import sleepdeprived
<add>from celery.tests.utils import sleepdeprived, execute_context
<ide>
<ide>
<ide> class TestChunks(unittest.TestCase):
<ide> def test_gen_unique_id_without_ctypes(self):
<ide> from celery.tests.utils import mask_modules
<ide> old_utils = sys.modules.pop("celery.utils")
<ide> try:
<del> mask_modules("ctypes")
<del> from celery.utils import ctypes, gen_unique_id
<del> self.assertTrue(ctypes is None)
<del> uuid = gen_unique_id()
<del> self.assertTrue(uuid)
<del> self.assertTrue(isinstance(uuid, basestring))
<add> def with_ctypes_masked():
<add> from celery.utils import ctypes, gen_unique_id
<add> self.assertTrue(ctypes is None)
<add> uuid = gen_unique_id()
<add> self.assertTrue(uuid)
<add> self.assertTrue(isinstance(uuid, basestring))
<add>
<add> context = mask_modules("ctypes")
<add> execute_context(context, with_ctypes_masked)
<add>
<ide> finally:
<ide> sys.modules["celery.utils"] = old_utils
<ide>
<ide><path>celery/tests/utils.py
<del>from __future__ import with_statement, generators
<add>from __future__ import generators
<ide>
<ide> import os
<ide> import sys
<ide><path>contrib/release/sphinx-to-rst.py
<ide> #!/usr/bin/even/python
<del>from __future__ import with_statement
<del>
<ide> import os
<ide> import re
<ide> import sys
<ide> def include_file(lines, pos, match):
<ide> global dirname
<ide> orig_filename = match.groups()[0]
<ide> filename = os.path.join(dirname, orig_filename)
<del> with file(filename) as fh:
<add> fh = open(filename)
<add> try:
<ide> old_dirname = dirname
<ide> dirname = os.path.dirname(orig_filename)
<ide> try:
<ide> lines[pos] = sphinx_to_rst(fh)
<ide> finally:
<ide> dirname = old_dirname
<add> finally:
<add> fh.close()
<ide>
<ide>
<ide> def replace_code_block(lines, pos, match):
<ide> def sphinx_to_rst(fh):
<ide> if __name__ == "__main__":
<ide> global dirname
<ide> dirname = os.path.dirname(sys.argv[1])
<del> with open(sys.argv[1]) as fh:
<add> fh = open(sys.argv[1])
<add> try:
<ide> print(sphinx_to_rst(fh))
<add> finally:
<add> fh.close() | 8 |
Python | Python | push queryexecutionid to xcom | 90f08aaa0065f4f1a8cb01a1a491bf3705dd0f07 | <ide><path>airflow/contrib/operators/aws_athena_operator.py
<ide> class AWSAthenaOperator(BaseOperator):
<ide> """
<ide> An operator that submit presto query to athena.
<ide>
<add> If ``do_xcom_push`` is True, the QueryExecutionID assigned to the
<add> query will be pushed to an XCom when it successfuly completes.
<add>
<ide> :param query: Presto to be run on athena. (templated)
<ide> :type query: str
<ide> :param database: Database to select. (templated)
<ide> def execute(self, context):
<ide> 'Max tries of poll status exceeded, query_execution_id is {}.'
<ide> .format(query_status, self.query_execution_id))
<ide>
<add> return self.query_execution_id
<add>
<ide> def on_kill(self):
<ide> """
<ide> Cancel the submitted athena query
<ide><path>tests/contrib/operators/test_aws_athena_operator.py
<ide>
<ide> from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator
<ide> from airflow.contrib.hooks.aws_athena_hook import AWSAthenaHook
<add>from airflow.models import DAG, TaskInstance
<add>from airflow.utils import timezone
<add>from airflow.utils.timezone import datetime
<ide> from airflow import configuration
<ide> from tests.compat import mock
<ide>
<add>TEST_DAG_ID = 'unit_tests'
<add>DEFAULT_DATE = datetime(2018, 1, 1)
<add>ATHENA_QUERY_ID = 'eac29bf8-daa1-4ffc-b19a-0db31dc3b784'
<add>
<ide> MOCK_DATA = {
<ide> 'task_id': 'test_aws_athena_operator',
<ide> 'query': 'SELECT * FROM TEST_TABLE',
<ide> class TestAWSAthenaOperator(unittest.TestCase):
<ide> def setUp(self):
<ide> configuration.load_test_config()
<ide>
<add> args = {
<add> 'owner': 'airflow',
<add> 'start_date': DEFAULT_DATE,
<add> 'provide_context': True
<add> }
<add>
<add> self.dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once',
<add> default_args=args,
<add> schedule_interval='@once')
<ide> self.athena = AWSAthenaOperator(task_id='test_aws_athena_operator', query='SELECT * FROM TEST_TABLE',
<ide> database='TEST_DATABASE', output_location='s3://test_s3_bucket/',
<ide> client_request_token='eac427d0-1c6d-4dfb-96aa-2835d3ac6595',
<del> sleep_time=1, max_tries=3)
<add> sleep_time=1, max_tries=3, dag=self.dag)
<ide>
<ide> def test_init(self):
<ide> self.assertEqual(self.athena.task_id, MOCK_DATA['task_id'])
<ide> def test_init(self):
<ide> self.assertEqual(self.athena.sleep_time, 1)
<ide>
<ide> @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("SUCCESS",))
<del> @mock.patch.object(AWSAthenaHook, 'run_query', return_value='1234')
<add> @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
<ide> @mock.patch.object(AWSAthenaHook, 'get_conn')
<ide> def test_hook_run_small_success_query(self, mock_conn, mock_run_query, mock_check_query_status):
<ide> self.athena.execute(None)
<ide> def test_hook_run_small_success_query(self, mock_conn, mock_run_query, mock_chec
<ide> self.assertEqual(mock_check_query_status.call_count, 1)
<ide>
<ide> @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "RUNNING", "SUCCESS",))
<del> @mock.patch.object(AWSAthenaHook, 'run_query', return_value='1234')
<add> @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
<ide> @mock.patch.object(AWSAthenaHook, 'get_conn')
<ide> def test_hook_run_big_success_query(self, mock_conn, mock_run_query, mock_check_query_status):
<ide> self.athena.execute(None)
<ide> def test_hook_run_big_success_query(self, mock_conn, mock_run_query, mock_check_
<ide> self.assertEqual(mock_check_query_status.call_count, 3)
<ide>
<ide> @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=(None, None,))
<del> @mock.patch.object(AWSAthenaHook, 'run_query', return_value='1234')
<add> @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
<ide> @mock.patch.object(AWSAthenaHook, 'get_conn')
<ide> def test_hook_run_failed_query_with_none(self, mock_conn, mock_run_query, mock_check_query_status):
<ide> with self.assertRaises(Exception):
<ide> def test_hook_run_failed_query_with_none(self, mock_conn, mock_run_query, mock_c
<ide> self.assertEqual(mock_check_query_status.call_count, 3)
<ide>
<ide> @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "FAILED",))
<del> @mock.patch.object(AWSAthenaHook, 'run_query', return_value='1234')
<add> @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
<ide> @mock.patch.object(AWSAthenaHook, 'get_conn')
<ide> def test_hook_run_failure_query(self, mock_conn, mock_run_query, mock_check_query_status):
<ide> with self.assertRaises(Exception):
<ide> def test_hook_run_failure_query(self, mock_conn, mock_run_query, mock_check_quer
<ide> self.assertEqual(mock_check_query_status.call_count, 2)
<ide>
<ide> @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "RUNNING", "CANCELLED",))
<del> @mock.patch.object(AWSAthenaHook, 'run_query', return_value='1234')
<add> @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
<ide> @mock.patch.object(AWSAthenaHook, 'get_conn')
<ide> def test_hook_run_cancelled_query(self, mock_conn, mock_run_query, mock_check_query_status):
<ide> with self.assertRaises(Exception):
<ide> def test_hook_run_cancelled_query(self, mock_conn, mock_run_query, mock_check_qu
<ide> self.assertEqual(mock_check_query_status.call_count, 3)
<ide>
<ide> @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("RUNNING", "RUNNING", "RUNNING",))
<del> @mock.patch.object(AWSAthenaHook, 'run_query', return_value='1234')
<add> @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
<ide> @mock.patch.object(AWSAthenaHook, 'get_conn')
<ide> def test_hook_run_failed_query_with_max_tries(self, mock_conn, mock_run_query, mock_check_query_status):
<ide> with self.assertRaises(Exception):
<ide> def test_hook_run_failed_query_with_max_tries(self, mock_conn, mock_run_query, m
<ide> MOCK_DATA['client_request_token'])
<ide> self.assertEqual(mock_check_query_status.call_count, 3)
<ide>
<add> @mock.patch.object(AWSAthenaHook, 'check_query_status', side_effect=("SUCCESS",))
<add> @mock.patch.object(AWSAthenaHook, 'run_query', return_value=ATHENA_QUERY_ID)
<add> @mock.patch.object(AWSAthenaHook, 'get_conn')
<add> def test_xcom_push_and_pull(self, mock_conn, mock_run_query, mock_check_query_status):
<add> ti = TaskInstance(task=self.athena, execution_date=timezone.utcnow())
<add> ti.run()
<add>
<add> self.assertEqual(ti.xcom_pull(task_ids='test_aws_athena_operator'),
<add> ATHENA_QUERY_ID)
<add>
<ide>
<ide> if __name__ == '__main__':
<ide> unittest.main() | 2 |
Ruby | Ruby | fix stubbed methods in test cases | d2d72966c30e8a537ad1c5dbab52b148645d7b37 | <ide><path>activerecord/test/cases/fixtures_test.rb
<ide> def rollback_transaction(*args); end
<ide> end.new
<ide>
<ide> connection.pool = Class.new do
<del> def lock_thread=(lock_thread); false; end
<add> def lock_thread=(lock_thread); end
<ide> end.new
<ide>
<ide> connection.expects(:begin_transaction).with(joinable: false)
<ide> def rollback_transaction(*args)
<ide> end.new
<ide>
<ide> connection.pool = Class.new do
<del> def lock_thread=(lock_thread); false; end
<add> def lock_thread=(lock_thread); end
<ide> end.new
<ide>
<ide> fire_connection_notification(connection)
<ide><path>activerecord/test/cases/tasks/mysql_rake_test.rb
<ide> def test_raises_error
<ide>
<ide> class MySQLDBDropTest < ActiveRecord::TestCase
<ide> def setup
<del> @connection = Class.new { def drop_database(name); true end }.new
<add> @connection = Class.new { def drop_database(name); end }.new
<ide> @configuration = {
<ide> "adapter" => "mysql2",
<ide> "database" => "my-app-db" | 2 |
Javascript | Javascript | fix bad caching in reactid.getid | 3266818b42fbb035ea8d9b48c8a97da5f3e9d756 | <ide><path>src/core/ReactID.js
<ide> function setID(node, id) {
<ide> * @internal
<ide> */
<ide> function getNode(id) {
<del> if (!nodeCache.hasOwnProperty(id)) {
<add> if (!nodeCache[id]) {
<ide> nodeCache[id] =
<ide> document.getElementById(id) || // TODO Quit using getElementById.
<ide> ReactMount.findReactRenderedDOMNodeSlow(id); | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.