content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Ruby | Ruby | use the columns hash for primary key lookup | d21f38aaa204ee8df81bd34a6dad3d6de8ad63d9 | <ide><path>activerecord/lib/active_record/relation/finder_methods.rb
<ide> def find_with_ids(*ids)
<ide> def find_one(id)
<ide> id = id.id if ActiveRecord::Base === id
<ide>
<del> column = primary_key.column
<add> column = columns_hash[primary_key.name.to_s]
<ide>
<ide> substitute = connection.substitute_for(column, @bind_values)
<ide> relation = where(primary_key.eq(substitute)) | 1 |
PHP | PHP | add hasreplyto method | fbf0e9e0a17704068fc6158438455f8dde82e748 | <ide><path>src/Illuminate/Mail/Mailable.php
<ide> public function replyTo($address, $name = null)
<ide> return $this->setAddress($address, $name, 'replyTo');
<ide> }
<ide>
<add> /**
<add> * Determine if the given recipient is set on the mailable.
<add> *
<add> * @param object|array|string $address
<add> * @param string|null $name
<add> * @return bool
<add> */
<add> public function hasReplyTo($address, $name = null)
<add> {
<add> return $this->hasRecipient($address, $name, 'replyTo');
<add> }
<add>
<ide> /**
<ide> * Set the recipients of the message.
<ide> *
<ide><path>tests/Mail/MailMailableTest.php
<ide> public function testMailableSetsRecipientsCorrectly()
<ide> $this->assertTrue($mailable->hasTo('[email protected]'));
<ide> }
<ide>
<add> public function testMailableSetsReplyToCorrectly()
<add> {
<add> $mailable = new WelcomeMailableStub;
<add> $mailable->replyTo('[email protected]');
<add> $this->assertEquals([['name' => null, 'address' => '[email protected]']], $mailable->replyTo);
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]'));
<add>
<add> $mailable = new WelcomeMailableStub;
<add> $mailable->replyTo('[email protected]', 'Taylor Otwell');
<add> $this->assertEquals([['name' => 'Taylor Otwell', 'address' => '[email protected]']], $mailable->replyTo);
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]', 'Taylor Otwell'));
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]'));
<add>
<add> $mailable = new WelcomeMailableStub;
<add> $mailable->replyTo(['[email protected]']);
<add> $this->assertEquals([['name' => null, 'address' => '[email protected]']], $mailable->replyTo);
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]'));
<add> $this->assertFalse($mailable->hasReplyTo('[email protected]', 'Taylor Otwell'));
<add>
<add> $mailable = new WelcomeMailableStub;
<add> $mailable->replyTo([['name' => 'Taylor Otwell', 'email' => '[email protected]']]);
<add> $this->assertEquals([['name' => 'Taylor Otwell', 'address' => '[email protected]']], $mailable->replyTo);
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]', 'Taylor Otwell'));
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]'));
<add>
<add> $mailable = new WelcomeMailableStub;
<add> $mailable->replyTo(new MailableTestUserStub);
<add> $this->assertEquals([['name' => 'Taylor Otwell', 'address' => '[email protected]']], $mailable->replyTo);
<add> $this->assertTrue($mailable->hasReplyTo(new MailableTestUserStub));
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]'));
<add>
<add> $mailable = new WelcomeMailableStub;
<add> $mailable->replyTo(collect([new MailableTestUserStub]));
<add> $this->assertEquals([['name' => 'Taylor Otwell', 'address' => '[email protected]']], $mailable->replyTo);
<add> $this->assertTrue($mailable->hasReplyTo(new MailableTestUserStub));
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]'));
<add>
<add> $mailable = new WelcomeMailableStub;
<add> $mailable->replyTo(collect([new MailableTestUserStub, new MailableTestUserStub]));
<add> $this->assertEquals([
<add> ['name' => 'Taylor Otwell', 'address' => '[email protected]'],
<add> ['name' => 'Taylor Otwell', 'address' => '[email protected]'],
<add> ], $mailable->replyTo);
<add> $this->assertTrue($mailable->hasReplyTo(new MailableTestUserStub));
<add> $this->assertTrue($mailable->hasReplyTo('[email protected]'));
<add> }
<add>
<ide> public function testMailableBuildsViewData()
<ide> {
<ide> $mailable = new WelcomeMailableStub; | 2 |
Javascript | Javascript | add specs for jqlite wrapping/node creation | 2d9dd1c17270d8300b909fcb2f36964662000acf | <ide><path>test/jqLiteSpec.js
<ide> describe('jqLite', function(){
<ide> expect(selected.length).toEqual(1);
<ide> expect(selected[0]).toEqual(text);
<ide> });
<add>
<ide> it('should allow construction with html', function(){
<ide> var nodes = jqLite('<div>1</div><span>2</span>');
<ide> expect(nodes.length).toEqual(2);
<ide> expect(nodes[0].innerHTML).toEqual('1');
<ide> expect(nodes[1].innerHTML).toEqual('2');
<ide> });
<add>
<add> it('should allow creation of comment tags', function() {
<add> var nodes = jqLite('<!-- foo -->');
<add> expect(nodes.length).toBe(1);
<add> expect(nodes[0].nodeType).toBe(8);
<add> });
<add>
<add> it('should allow creation of script tags', function() {
<add> var nodes = jqLite('<script></script>');
<add> expect(nodes.length).toBe(1);
<add> expect(nodes[0].tagName.toUpperCase()).toBe('SCRIPT');
<add> });
<add>
<add> it('should wrap document fragment', function() {
<add> var fragment = jqLite(document.createDocumentFragment());
<add> expect(fragment.length).toBe(1);
<add> expect(fragment[0].nodeType).toBe(11);
<add> });
<ide> });
<ide>
<ide> describe('scope', function() { | 1 |
Javascript | Javascript | add packagetranspilationregistry spec | 934ab30a0d3a14f55de2672001982860fda54aa8 | <ide><path>spec/package-transpilation-registry-spec.js
<add>/** @babel */
<add>import fs from 'fs'
<add>import path from 'path'
<add>
<add>import {it, fit, ffit, fffit, beforeEach, afterEach} from './async-spec-helpers'
<add>
<add>import PackageTranspilationRegistry from '../src/package-transpilation-registry'
<add>
<add>let originalCompiler = {
<add> getCachePath: (sourceCode, filePath) => {
<add> return "orig-cache-path"
<add> },
<add>
<add> compile: (sourceCode, filePath) => {
<add> return sourceCode + "-original-compiler"
<add> },
<add>
<add> shouldCompile: (sourceCode, filePath) => {
<add> return path.extname(filePath) === '.js'
<add> }
<add>}
<add>
<add>describe("PackageTranspilationRegistry", () => {
<add> let registry
<add> let wrappedCompiler
<add>
<add> beforeEach(() => {
<add> registry = new PackageTranspilationRegistry()
<add> wrappedCompiler = registry.wrapTranspiler(originalCompiler)
<add> })
<add>
<add> it('falls through to the original compiler by default', () => {
<add> spyOn(originalCompiler, 'getCachePath')
<add> spyOn(originalCompiler, 'compile')
<add> spyOn(originalCompiler, 'shouldCompile')
<add>
<add> wrappedCompiler.getCachePath('source', '/path/to/file.js')
<add> wrappedCompiler.compile('source', '/path/to/filejs')
<add> wrappedCompiler.shouldCompile('source', '/path/to/file.js')
<add>
<add> expect(originalCompiler.getCachePath).toHaveBeenCalled()
<add> expect(originalCompiler.compile).toHaveBeenCalled()
<add> expect(originalCompiler.shouldCompile).toHaveBeenCalled()
<add> })
<add>
<add> describe('when a file is contained in a path that has custom transpilation', () => {
<add> let hitPath = '/path/to/lib/file.js'
<add> let hitPathCoffee = '/path/to/file2.coffee'
<add> let missPath = '/path/other/file3.js'
<add> let hitPathMissSubdir = '/path/to/file4.js'
<add> let hitPathMissExt = '/path/to/file5.ts'
<add>
<add> let jsSpec = { glob: "lib/**/*.js", transpiler: './transpiler-js', options: { type: 'js' } }
<add> let coffeeSpec = { glob: "*.coffee", transpiler: './transpiler-coffee', options: { type: 'coffee' } }
<add>
<add> let jsTranspiler = {
<add> transpile: (sourceCode, filePath, options) => {
<add> return sourceCode + "-transpiler-js"
<add> },
<add>
<add> getCacheKeyData: (sourceCode, filePath, options) => {
<add> return 'js-transpiler-cache-data'
<add> }
<add> }
<add>
<add> let coffeeTranspiler = {
<add> transpile: (sourceCode, filePath, options) => {
<add> return sourceCode + "-transpiler-coffee"
<add> },
<add>
<add> getCacheKeyData: (sourceCode, filePath, options) => {
<add> return 'coffee-transpiler-cache-data'
<add> }
<add> }
<add>
<add> beforeEach(() => {
<add> jsSpec._transpilerSource = "js-transpiler-source"
<add> coffeeSpec._transpilerSource = "coffee-transpiler-source"
<add>
<add> const oldFsRealpathSync = fs.realpathSync.bind(fs)
<add> spyOn(fs, 'realpathSync').andCallFake(thePath => {
<add> if (thePath === '/path/to') return thePath
<add> if (thePath === '/path/other') return thePath
<add> return oldFsRealpathSync(thePath)
<add> })
<add>
<add> spyOn(registry, "getTranspiler").andCallFake(spec => {
<add> if (spec.transpiler === './transpiler-js') return jsTranspiler
<add> if (spec.transpiler === './transpiler-coffee') return coffeeTranspiler
<add> throw new Error('bad transpiler path ' + spec.transpiler)
<add> })
<add>
<add> registry.addTranspilerConfigForPath('/path/to', [
<add> jsSpec, coffeeSpec
<add> ])
<add> })
<add>
<add> it('always returns true from shouldCompile for a file in that dir that match a glob', () => {
<add> spyOn(originalCompiler, 'shouldCompile').andReturn(false)
<add> expect(wrappedCompiler.shouldCompile('source', hitPath)).toBe(true)
<add> expect(wrappedCompiler.shouldCompile('source', hitPathCoffee)).toBe(true)
<add> expect(wrappedCompiler.shouldCompile('source', hitPathMissExt)).toBe(false)
<add> expect(wrappedCompiler.shouldCompile('source', hitPathMissSubdir)).toBe(false)
<add> expect(wrappedCompiler.shouldCompile('source', missPath)).toBe(false)
<add> })
<add>
<add> it('calls getCacheKeyData on the transpiler to get additional cache key data', () => {
<add> spyOn(registry, "getTranspilerPath").andReturn("./transpiler-js")
<add> spyOn(jsTranspiler, 'getCacheKeyData').andCallThrough()
<add>
<add> wrappedCompiler.getCachePath('source', missPath, jsSpec)
<add> expect(jsTranspiler.getCacheKeyData).not.toHaveBeenCalled()
<add> wrappedCompiler.getCachePath('source', hitPath, jsSpec)
<add> expect(jsTranspiler.getCacheKeyData).toHaveBeenCalled()
<add> })
<add>
<add> it('compiles files matching a glob with the associated transpiler, and the old one otherwise', () => {
<add> spyOn(jsTranspiler, "transpile").andCallThrough()
<add> spyOn(coffeeTranspiler, "transpile").andCallThrough()
<add>
<add> expect(wrappedCompiler.compile('source', hitPath)).toEqual('source-transpiler-js')
<add> expect(wrappedCompiler.compile('source', hitPathCoffee)).toEqual('source-transpiler-coffee')
<add> expect(wrappedCompiler.compile('source', missPath)).toEqual('source-original-compiler')
<add> expect(wrappedCompiler.compile('source', hitPathMissExt)).toEqual('source-original-compiler')
<add> expect(wrappedCompiler.compile('source', hitPathMissSubdir)).toEqual('source-original-compiler')
<add> })
<add> })
<add>}) | 1 |
Python | Python | add electra tf 2.x pretrainer | b708fd68e312ddc9a18b6b553c508b8e19507ee0 | <ide><path>official/nlp/modeling/models/electra_pretrainer.py
<add># Copyright 2020 The TensorFlow Authors. All Rights Reserved.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add># ==============================================================================
<add>"""Trainer network for ELECTRA models."""
<add># pylint: disable=g-classes-have-attributes
<add>from __future__ import absolute_import
<add>from __future__ import division
<add># from __future__ import google_type_annotations
<add>from __future__ import print_function
<add>
<add>import copy
<add>import tensorflow as tf
<add>
<add>from official.modeling import tf_utils
<add>from official.nlp.modeling import layers
<add>
<add>
<add>@tf.keras.utils.register_keras_serializable(package='Text')
<add>class ElectraPretrainer(tf.keras.Model):
<add> """ELECTRA network training model.
<add>
<add> This is an implementation of the network structure described in "ELECTRA:
<add> Pre-training Text Encoders as Discriminators Rather Than Generators" (
<add> https://arxiv.org/abs/2003.10555).
<add>
<add> The ElectraPretrainer allows a user to pass in two transformer models, one for
<add> generator, the other for discriminator, and instantiates the masked language
<add> model (at generator side) and classification networks (at discriminator side)
<add> that are used to create the training objectives.
<add>
<add> Arguments:
<add> generator_network: A transformer network for generator, this network should
<add> output a sequence output and an optional classification output.
<add> discriminator_network: A transformer network for discriminator, this network
<add> should output a sequence output
<add> vocab_size: Size of generator output vocabulary
<add> num_classes: Number of classes to predict from the classification network
<add> for the generator network (not used now)
<add> sequence_length: Input sequence length
<add> last_hidden_dim: Last hidden dim of generator transformer output
<add> num_token_predictions: Number of tokens to predict from the masked LM.
<add> mlm_activation: The activation (if any) to use in the masked LM and
<add> classification networks. If None, no activation will be used.
<add> mlm_initializer: The initializer (if any) to use in the masked LM and
<add> classification networks. Defaults to a Glorot uniform initializer.
<add> output_type: The output style for this network. Can be either 'logits' or
<add> 'predictions'.
<add> disallow_correct: Whether to disallow the generator to generate the exact
<add> same token in the original sentence
<add> """
<add>
<add> def __init__(self,
<add> generator_network,
<add> discriminator_network,
<add> vocab_size,
<add> num_classes,
<add> sequence_length,
<add> last_hidden_dim,
<add> num_token_predictions,
<add> mlm_activation=None,
<add> mlm_initializer='glorot_uniform',
<add> output_type='logits',
<add> disallow_correct=False,
<add> **kwargs):
<add> super(ElectraPretrainer, self).__init__()
<add> self._config = {
<add> 'generator_network': generator_network,
<add> 'discriminator_network': discriminator_network,
<add> 'vocab_size': vocab_size,
<add> 'num_classes': num_classes,
<add> 'sequence_length': sequence_length,
<add> 'last_hidden_dim': last_hidden_dim,
<add> 'num_token_predictions': num_token_predictions,
<add> 'mlm_activation': mlm_activation,
<add> 'mlm_initializer': mlm_initializer,
<add> 'output_type': output_type,
<add> 'disallow_correct': disallow_correct,
<add> }
<add> for k, v in kwargs.items():
<add> self._config[k] = v
<add>
<add> self.generator_network = generator_network
<add> self.discriminator_network = discriminator_network
<add> self.vocab_size = vocab_size
<add> self.num_classes = num_classes
<add> self.sequence_length = sequence_length
<add> self.last_hidden_dim = last_hidden_dim
<add> self.num_token_predictions = num_token_predictions
<add> self.mlm_activation = mlm_activation
<add> self.mlm_initializer = mlm_initializer
<add> self.output_type = output_type
<add> self.disallow_correct = disallow_correct
<add> self.masked_lm = layers.MaskedLM(
<add> embedding_table=generator_network.get_embedding_table(),
<add> activation=mlm_activation,
<add> initializer=mlm_initializer,
<add> output=output_type,
<add> name='generator_masked_lm')
<add> self.classification = layers.ClassificationHead(
<add> inner_dim=last_hidden_dim,
<add> num_classes=num_classes,
<add> initializer=mlm_initializer,
<add> name='generator_classification_head')
<add> self.discriminator_head = tf.keras.layers.Dense(
<add> units=1, kernel_initializer=mlm_initializer)
<add>
<add> def call(self, inputs):
<add> input_word_ids = inputs['input_word_ids']
<add> input_mask = inputs['input_mask']
<add> input_type_ids = inputs['input_type_ids']
<add> masked_lm_positions = inputs['masked_lm_positions']
<add>
<add> ### Generator ###
<add> sequence_output, cls_output = self.generator_network(
<add> [input_word_ids, input_mask, input_type_ids])
<add>
<add> # The generator encoder network may get outputs from all layers.
<add> if isinstance(sequence_output, list):
<add> sequence_output = sequence_output[-1]
<add> if isinstance(cls_output, list):
<add> cls_output = cls_output[-1]
<add>
<add> lm_outputs = self.masked_lm(sequence_output, masked_lm_positions)
<add> sentence_outputs = self.classification(sequence_output)
<add>
<add> ### Sampling from generator ###
<add> fake_data = self._get_fake_data(inputs, lm_outputs, duplicate=True)
<add>
<add> ### Discriminator ###
<add> disc_input = fake_data['inputs']
<add> disc_label = fake_data['is_fake_tokens']
<add> disc_sequence_output, _ = self.discriminator_network([
<add> disc_input['input_word_ids'], disc_input['input_mask'],
<add> disc_input['input_type_ids']
<add> ])
<add>
<add> # The discriminator encoder network may get outputs from all layers.
<add> if isinstance(disc_sequence_output, list):
<add> disc_sequence_output = disc_sequence_output[-1]
<add>
<add> disc_logits = self.discriminator_head(disc_sequence_output)
<add> disc_logits = tf.squeeze(disc_logits, axis=-1)
<add>
<add> return lm_outputs, sentence_outputs, disc_logits, disc_label
<add>
<add> def _get_fake_data(self, inputs, mlm_logits, duplicate=True):
<add> """Generate corrupted data for discriminator.
<add>
<add> Args:
<add> inputs: A dict of all inputs, same as the input of call() function
<add> mlm_logits: The generator's output logits
<add> duplicate: Whether to copy the original inputs dict during modifications
<add>
<add> Returns:
<add> A dict of generated fake data
<add> """
<add> inputs = unmask(inputs, duplicate)
<add>
<add> if self.disallow_correct:
<add> disallow = tf.one_hot(
<add> inputs['masked_lm_ids'], depth=self.vocab_size, dtype=tf.float32)
<add> else:
<add> disallow = None
<add>
<add> sampled_tokens = tf.stop_gradient(
<add> sample_from_softmax(mlm_logits, disallow=disallow))
<add> sampled_tokids = tf.argmax(sampled_tokens, -1, output_type=tf.int32)
<add> updated_input_ids, masked = scatter_update(inputs['input_word_ids'],
<add> sampled_tokids,
<add> inputs['masked_lm_positions'])
<add> labels = masked * (1 - tf.cast(
<add> tf.equal(updated_input_ids, inputs['input_word_ids']), tf.int32))
<add>
<add> updated_inputs = get_updated_inputs(
<add> inputs, duplicate, input_word_ids=updated_input_ids)
<add>
<add> return {
<add> 'inputs': updated_inputs,
<add> 'is_fake_tokens': labels,
<add> 'sampled_tokens': sampled_tokens
<add> }
<add>
<add> def get_config(self):
<add> return self._config
<add>
<add> @classmethod
<add> def from_config(cls, config, custom_objects=None):
<add> return cls(**config)
<add>
<add>
<add>def scatter_update(sequence, updates, positions):
<add> """Scatter-update a sequence.
<add>
<add> Args:
<add> sequence: A [batch_size, seq_len] or [batch_size, seq_len, depth] tensor
<add> updates: A tensor of size batch_size*seq_len(*depth)
<add> positions: A [batch_size, n_positions] tensor
<add>
<add> Returns:
<add> updated_sequence: A [batch_size, seq_len] or [batch_size, seq_len, depth]
<add> tensor of "sequence" with elements at "positions" replaced by the values
<add> at "updates". Updates to index 0 are ignored. If there are duplicated
<add> positions the update is only applied once.
<add> updates_mask: A [batch_size, seq_len] mask tensor of which inputs were
<add> updated.
<add> """
<add> shape = tf_utils.get_shape_list(sequence, expected_rank=[2, 3])
<add> depth_dimension = (len(shape) == 3)
<add> if depth_dimension:
<add> batch_size, seq_len, depth = shape
<add> else:
<add> batch_size, seq_len = shape
<add> depth = 1
<add> sequence = tf.expand_dims(sequence, -1)
<add> n_positions = tf_utils.get_shape_list(positions)[1]
<add>
<add> shift = tf.expand_dims(seq_len * tf.range(batch_size), -1)
<add> flat_positions = tf.reshape(positions + shift, [-1, 1])
<add> flat_updates = tf.reshape(updates, [-1, depth])
<add> updates = tf.scatter_nd(flat_positions, flat_updates,
<add> [batch_size * seq_len, depth])
<add> updates = tf.reshape(updates, [batch_size, seq_len, depth])
<add>
<add> flat_updates_mask = tf.ones([batch_size * n_positions], tf.int32)
<add> updates_mask = tf.scatter_nd(flat_positions, flat_updates_mask,
<add> [batch_size * seq_len])
<add> updates_mask = tf.reshape(updates_mask, [batch_size, seq_len])
<add> not_first_token = tf.concat([
<add> tf.zeros((batch_size, 1), tf.int32),
<add> tf.ones((batch_size, seq_len - 1), tf.int32)
<add> ], -1)
<add> updates_mask *= not_first_token
<add> updates_mask_3d = tf.expand_dims(updates_mask, -1)
<add>
<add> # account for duplicate positions
<add> if sequence.dtype == tf.float32:
<add> updates_mask_3d = tf.cast(updates_mask_3d, tf.float32)
<add> updates /= tf.maximum(1.0, updates_mask_3d)
<add> else:
<add> assert sequence.dtype == tf.int32
<add> updates = tf.math.floordiv(updates, tf.maximum(1, updates_mask_3d))
<add> updates_mask = tf.minimum(updates_mask, 1)
<add> updates_mask_3d = tf.minimum(updates_mask_3d, 1)
<add>
<add> updated_sequence = (((1 - updates_mask_3d) * sequence) +
<add> (updates_mask_3d * updates))
<add> if not depth_dimension:
<add> updated_sequence = tf.squeeze(updated_sequence, -1)
<add>
<add> return updated_sequence, updates_mask
<add>
<add>
<add>def sample_from_softmax(logits, disallow=None):
<add> """Implement softmax sampling using gumbel softmax trick.
<add>
<add> Args:
<add> logits: A [batch_size, num_token_predictions, vocab_size] tensor indicating
<add> the generator output logits for each masked position.
<add> disallow: If `None`, we directly sample tokens from the logits. Otherwise,
<add> this is a tensor of size [batch_size, num_token_predictions, vocab_size]
<add> indicating the true word id in each masked position.
<add>
<add> Returns:
<add> sampled_tokens: A [batch_size, num_token_predictions, vocab_size] one hot
<add> tensor indicating the sampled word id in each masked position.
<add> """
<add> if disallow is not None:
<add> logits -= 1000.0 * disallow
<add> uniform_noise = tf.random.uniform(
<add> tf_utils.get_shape_list(logits), minval=0, maxval=1)
<add> gumbel_noise = -tf.math.log(-tf.math.log(uniform_noise + 1e-9) + 1e-9)
<add>
<add> # Here we essentially follow the original paper and use temperature 1.0 for
<add> # generator output logits.
<add> sampled_tokens = tf.one_hot(
<add> tf.argmax(tf.nn.softmax(logits + gumbel_noise), -1, output_type=tf.int32),
<add> logits.shape[-1])
<add> return sampled_tokens
<add>
<add>
<add>def unmask(inputs, duplicate):
<add> unmasked_input_word_ids, _ = scatter_update(inputs['input_word_ids'],
<add> inputs['masked_lm_ids'],
<add> inputs['masked_lm_positions'])
<add> return get_updated_inputs(
<add> inputs, duplicate, input_word_ids=unmasked_input_word_ids)
<add>
<add>
<add>def get_updated_inputs(inputs, duplicate, **kwargs):
<add> if duplicate:
<add> new_inputs = copy.copy(inputs)
<add> else:
<add> new_inputs = inputs
<add> for k, v in kwargs.items():
<add> new_inputs[k] = v
<add> return new_inputs
<ide><path>official/nlp/modeling/models/electra_pretrainer_test.py
<add># Copyright 2020 The TensorFlow Authors. All Rights Reserved.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add># ==============================================================================
<add>"""Tests for ELECTRA pre trainer network."""
<add>
<add>from __future__ import absolute_import
<add>from __future__ import division
<add>from __future__ import print_function
<add>
<add>import tensorflow as tf
<add>
<add>from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import
<add>from official.nlp.modeling import networks
<add>from official.nlp.modeling.models import electra_pretrainer
<add>
<add>
<add># This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It
<add># guarantees forward compatibility of this code for the V2 switchover.
<add>@keras_parameterized.run_all_keras_modes
<add>class ElectraPretrainerTest(keras_parameterized.TestCase):
<add>
<add> def test_electra_pretrainer(self):
<add> """Validate that the Keras object can be created."""
<add> # Build a transformer network to use within the ELECTRA trainer.
<add> vocab_size = 100
<add> sequence_length = 512
<add> test_generator_network = networks.TransformerEncoder(
<add> vocab_size=vocab_size, num_layers=2, sequence_length=sequence_length)
<add> test_discriminator_network = networks.TransformerEncoder(
<add> vocab_size=vocab_size, num_layers=2, sequence_length=sequence_length)
<add>
<add> # Create a ELECTRA trainer with the created network.
<add> num_classes = 3
<add> num_token_predictions = 2
<add> eletrca_trainer_model = electra_pretrainer.ElectraPretrainer(
<add> generator_network=test_generator_network,
<add> discriminator_network=test_discriminator_network,
<add> vocab_size=vocab_size,
<add> num_classes=num_classes,
<add> sequence_length=sequence_length,
<add> last_hidden_dim=768,
<add> num_token_predictions=num_token_predictions,
<add> disallow_correct=True)
<add>
<add> # Create a set of 2-dimensional inputs (the first dimension is implicit).
<add> word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
<add> mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
<add> type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
<add> lm_positions = tf.keras.Input(
<add> shape=(num_token_predictions,), dtype=tf.int32)
<add> lm_ids = tf.keras.Input(shape=(num_token_predictions,), dtype=tf.int32)
<add> inputs = {
<add> 'input_word_ids': word_ids,
<add> 'input_mask': mask,
<add> 'input_type_ids': type_ids,
<add> 'masked_lm_positions': lm_positions,
<add> 'masked_lm_ids': lm_ids
<add> }
<add>
<add> # Invoke the trainer model on the inputs. This causes the layer to be built.
<add> lm_outs, cls_outs, disc_logits, disc_label = eletrca_trainer_model(inputs)
<add>
<add> # Validate that the outputs are of the expected shape.
<add> expected_lm_shape = [None, num_token_predictions, vocab_size]
<add> expected_classification_shape = [None, num_classes]
<add> expected_disc_logits_shape = [None, sequence_length]
<add> expected_disc_label_shape = [None, sequence_length]
<add> self.assertAllEqual(expected_lm_shape, lm_outs.shape.as_list())
<add> self.assertAllEqual(expected_classification_shape, cls_outs.shape.as_list())
<add> self.assertAllEqual(expected_disc_logits_shape, disc_logits.shape.as_list())
<add> self.assertAllEqual(expected_disc_label_shape, disc_label.shape.as_list())
<add>
<add> def test_electra_trainer_tensor_call(self):
<add> """Validate that the Keras object can be invoked."""
<add> # Build a transformer network to use within the ELECTRA trainer. (Here, we
<add> # use a short sequence_length for convenience.)
<add> test_generator_network = networks.TransformerEncoder(
<add> vocab_size=100, num_layers=4, sequence_length=3)
<add> test_discriminator_network = networks.TransformerEncoder(
<add> vocab_size=100, num_layers=4, sequence_length=3)
<add>
<add> # Create a ELECTRA trainer with the created network.
<add> eletrca_trainer_model = electra_pretrainer.ElectraPretrainer(
<add> generator_network=test_generator_network,
<add> discriminator_network=test_discriminator_network,
<add> vocab_size=100,
<add> num_classes=2,
<add> sequence_length=3,
<add> last_hidden_dim=768,
<add> num_token_predictions=2)
<add>
<add> # Create a set of 2-dimensional data tensors to feed into the model.
<add> word_ids = tf.constant([[1, 1, 1], [2, 2, 2]], dtype=tf.int32)
<add> mask = tf.constant([[1, 1, 1], [1, 0, 0]], dtype=tf.int32)
<add> type_ids = tf.constant([[1, 1, 1], [2, 2, 2]], dtype=tf.int32)
<add> lm_positions = tf.constant([[0, 1], [0, 2]], dtype=tf.int32)
<add> lm_ids = tf.constant([[10, 20], [20, 30]], dtype=tf.int32)
<add> inputs = {
<add> 'input_word_ids': word_ids,
<add> 'input_mask': mask,
<add> 'input_type_ids': type_ids,
<add> 'masked_lm_positions': lm_positions,
<add> 'masked_lm_ids': lm_ids
<add> }
<add>
<add> # Invoke the trainer model on the tensors. In Eager mode, this does the
<add> # actual calculation. (We can't validate the outputs, since the network is
<add> # too complex: this simply ensures we're not hitting runtime errors.)
<add> _, _, _, _ = eletrca_trainer_model(inputs)
<add>
<add> def test_serialize_deserialize(self):
<add> """Validate that the ELECTRA trainer can be serialized and deserialized."""
<add> # Build a transformer network to use within the BERT trainer. (Here, we use
<add> # a short sequence_length for convenience.)
<add> test_generator_network = networks.TransformerEncoder(
<add> vocab_size=100, num_layers=4, sequence_length=3)
<add> test_discriminator_network = networks.TransformerEncoder(
<add> vocab_size=100, num_layers=4, sequence_length=3)
<add>
<add> # Create a ELECTRA trainer with the created network. (Note that all the args
<add> # are different, so we can catch any serialization mismatches.)
<add> electra_trainer_model = electra_pretrainer.ElectraPretrainer(
<add> generator_network=test_generator_network,
<add> discriminator_network=test_discriminator_network,
<add> vocab_size=100,
<add> num_classes=2,
<add> sequence_length=3,
<add> last_hidden_dim=768,
<add> num_token_predictions=2)
<add>
<add> # Create another BERT trainer via serialization and deserialization.
<add> config = electra_trainer_model.get_config()
<add> new_electra_trainer_model = electra_pretrainer.ElectraPretrainer.from_config(
<add> config)
<add>
<add> # Validate that the config can be forced to JSON.
<add> _ = new_electra_trainer_model.to_json()
<add>
<add> # If the serialization was successful, the new config should match the old.
<add> self.assertAllEqual(electra_trainer_model.get_config(),
<add> new_electra_trainer_model.get_config())
<add>
<add>
<add>if __name__ == '__main__':
<add> tf.test.main()
<ide><path>official/nlp/modeling/networks/transformer_encoder.py
<ide> class TransformerEncoder(tf.keras.Model):
<ide> initializer: The initialzer to use for all weights in this encoder.
<ide> return_all_encoder_outputs: Whether to output sequence embedding outputs of
<ide> all encoder transformer layers.
<del> output_range: the sequence output range, [0, output_range), by slicing the
<add> output_range: The sequence output range, [0, output_range), by slicing the
<ide> target sequence of the last transformer layer. `None` means the entire
<ide> target sequence will attend to the source sequence, which yeilds the full
<ide> output.
<ide> class TransformerEncoder(tf.keras.Model):
<ide> two matrices in the shape of ['vocab_size', 'embedding_width'] and
<ide> ['embedding_width', 'hidden_size'] ('embedding_width' is usually much
<ide> smaller than 'hidden_size').
<add> embedding_layer: The word embedding layer. `None` means we will create a new
<add> embedding layer. Otherwise, we will reuse the given embedding layer. This
<add> parameter is originally added for ELECTRA model which needs to tie the
<add> generator embeddings with the discriminator embeddings.
<ide> """
<ide>
<ide> def __init__(self,
<ide> def __init__(self,
<ide> return_all_encoder_outputs=False,
<ide> output_range=None,
<ide> embedding_width=None,
<add> embedding_layer=None,
<ide> **kwargs):
<ide> activation = tf.keras.activations.get(activation)
<ide> initializer = tf.keras.initializers.get(initializer)
<ide> def __init__(self,
<ide>
<ide> if embedding_width is None:
<ide> embedding_width = hidden_size
<del> self._embedding_layer = layers.OnDeviceEmbedding(
<del> vocab_size=vocab_size,
<del> embedding_width=embedding_width,
<del> initializer=initializer,
<del> name='word_embeddings')
<add> if embedding_layer is None:
<add> self._embedding_layer = layers.OnDeviceEmbedding(
<add> vocab_size=vocab_size,
<add> embedding_width=embedding_width,
<add> initializer=initializer,
<add> name='word_embeddings')
<add> else:
<add> self._embedding_layer = embedding_layer
<ide> word_embeddings = self._embedding_layer(word_ids)
<ide>
<ide> # Always uses dynamic slicing for simplicity.
<ide> def __init__(self,
<ide> def get_embedding_table(self):
<ide> return self._embedding_layer.embeddings
<ide>
<add> def get_embedding_layer(self):
<add> return self._embedding_layer
<add>
<ide> def get_config(self):
<ide> return self._config_dict
<ide> | 3 |
Ruby | Ruby | simplify namespace assignment in fields_for | 4e8fbc0c229327b1d2986c30faa497f699a4b122 | <ide><path>actionpack/lib/action_view/helpers/form_helper.rb
<ide> def fields_for(record_name, record_object = nil, fields_options = {}, &block)
<ide> fields_options, record_object = record_object, nil if record_object.is_a?(Hash) && record_object.extractable_options?
<ide> fields_options[:builder] ||= options[:builder]
<ide> fields_options[:parent_builder] = self
<del> fields_options[:namespace] = fields_options[:parent_builder].options[:namespace]
<add> fields_options[:namespace] = options[:namespace]
<ide>
<ide> case record_name
<ide> when String, Symbol | 1 |
PHP | PHP | fix documentation block | fd2c341fc1f1feef8933cf9e2054989bd6878da0 | <ide><path>src/Illuminate/Http/Request.php
<ide> public function session()
<ide> /**
<ide> * Get the user making the request.
<ide> *
<del> * @return \Closure
<add> * @return mixed
<ide> */
<ide> public function user()
<ide> { | 1 |
Ruby | Ruby | fix bug with eager_load in development environment | 7dbc6d6979aec1ce7364269360d277fa2499e919 | <ide><path>actionpack/lib/action_dispatch/journey/routes.rb
<ide> def partition_route(route)
<ide> def ast
<ide> @ast ||= begin
<ide> asts = anchored_routes.map(&:ast)
<del> Nodes::Or.new(asts) unless asts.empty?
<add> Nodes::Or.new(asts)
<ide> end
<ide> end
<ide>
<ide><path>actionpack/test/journey/router_test.rb
<ide> def test_multi_verb_recognition
<ide> assert_not called
<ide> end
<ide>
<add> def test_eager_load_with_routes
<add> get "/foo-bar", to: "foo#bar"
<add> assert_nil router.eager_load!
<add> end
<add>
<add> def test_eager_load_without_routes
<add> assert_nil router.eager_load!
<add> end
<add>
<ide> private
<ide>
<ide> def get(*args) | 2 |
Ruby | Ruby | remove test ordering bug by using another class | 34497c0b3638e7fd298da4a1107333fe534f9ca4 | <ide><path>activerecord/test/cases/callbacks_test.rb
<ide> def history
<ide> end
<ide> end
<ide>
<add>class CallbackDeveloperWithFalseValidation < CallbackDeveloper
<add> before_validation proc { |model| model.history << [:before_validation, :returning_false]; return false }
<add> before_validation proc { |model| model.history << [:before_validation, :should_never_get_here] }
<add>end
<add>
<ide> class ParentDeveloper < ActiveRecord::Base
<ide> set_table_name 'developers'
<ide> attr_accessor :after_save_called
<ide> def assert_save_callbacks_not_called(someone)
<ide> end
<ide> private :assert_save_callbacks_not_called
<ide>
<del> def test_zzz_callback_returning_false # must be run last since we modify CallbackDeveloper
<del> david = CallbackDeveloper.find(1)
<del> CallbackDeveloper.before_validation proc { |model| model.history << [:before_validation, :returning_false]; return false }
<del> CallbackDeveloper.before_validation proc { |model| model.history << [:before_validation, :should_never_get_here] }
<add> def test_callback_returning_false
<add> david = CallbackDeveloperWithFalseValidation.find(1)
<ide> david.save
<ide> assert_equal [
<ide> [ :after_find, :method ], | 1 |
Text | Text | add bengl to collaborators | 38c8fd4afbfbe174b285d21af78782d47ee4964e | <ide><path>README.md
<ide> information about the governance of the Node.js project, see
<ide>
<ide> * [addaleax](https://github.com/addaleax) - **Anna Henningsen** <[email protected]>
<ide> * [AndreasMadsen](https://github.com/AndreasMadsen) - **Andreas Madsen** <[email protected]>
<add>* [bengl](https://github.com/bengl) - **Bryan English** <[email protected]>
<ide> * [benjamingr](https://github.com/benjamingr) - **Benjamin Gruenbaum** <[email protected]>
<ide> * [brendanashworth](https://github.com/brendanashworth) - **Brendan Ashworth** <[email protected]>
<ide> * [calvinmetcalf](https://github.com/calvinmetcalf) - **Calvin Metcalf** <[email protected]> | 1 |
PHP | PHP | add container flush test | 22126ac39c371adca21698535020bc61aa19a3ad | <ide><path>tests/Container/ContainerTest.php
<ide> public function testForgetInstancesForgetsAllInstances()
<ide> $this->assertFalse($container->isShared('Instance2'));
<ide> $this->assertFalse($container->isShared('Instance3'));
<ide> }
<add>
<add> public function testContainerFlushFlushesAllBindingsAliasesAndResolvedInstances()
<add> {
<add> $container = new Container;
<add> $container->bind('ConcreteStub', function () { return new ContainerConcreteStub; }, true);
<add> $container->alias('ConcreteStub', 'ContainerConcreteStub');
<add> $concreteStubInstance = $container->make('ConcreteStub');
<add> $this->assertTrue($container->resolved('ConcreteStub'));
<add> $this->assertTrue($container->isAlias('ContainerConcreteStub'));
<add> $this->assertArrayHasKey('ConcreteStub', $container->getBindings());
<add> $this->assertTrue($container->isShared('ConcreteStub'));
<add> $container->flush();
<add> $this->assertFalse($container->resolved('ConcreteStub'));
<add> $this->assertFalse($container->isAlias('ContainerConcreteStub'));
<add> $this->assertEmpty($container->getBindings());
<add> $this->assertFalse($container->isShared('ConcreteStub'));
<add> }
<ide> }
<ide>
<ide> class ContainerConcreteStub | 1 |
Python | Python | find max function in python | 67d409b6be5f90c33e73ddf73ba2966d8f2c44f4 | <ide><path>Maths/FindMax.py
<add># NguyenU
<add>
<add>import math
<add>
<add>def find_max(nums):
<add> max = 0
<add> for x in nums:
<add> if x > max:
<add> max = x
<add> print max | 1 |
Ruby | Ruby | add guard for calling `finalize` on nullsession | d21345baf0c5ebfa42452979229c7d20a092c7f4 | <ide><path>activerecord/lib/active_record/asynchronous_queries_tracker.rb
<ide> def start_session
<ide> end
<ide>
<ide> def finalize_session
<del> @current_session&.finalize
<add> @current_session.finalize if @current_session.respond_to?(:finalize)
<ide> @current_session = NullSession
<ide> end
<ide> end
<ide><path>activerecord/test/cases/adapter_test.rb
<ide> def test_async_query_foreground_fallback
<ide> ensure
<ide> ActiveSupport::Notifications.unsubscribe(subscriber) if subscriber
<ide> end
<add>
<add> def test_async_query_finalize_with_null_session
<add> assert_nothing_raised do
<add> @connection.select_all "SELECT * FROM posts", async: true
<add> end
<add>
<add> @connection.transaction do
<add> assert_raises AsynchronousQueryInsideTransactionError do
<add> @connection.select_all "SELECT * FROM posts", async: true
<add> end
<add> end
<add> ensure
<add> ActiveRecord::Base.asynchronous_queries_tracker.finalize_session
<add> end
<ide> end
<ide>
<ide> class AsynchronousQueriesTest < ActiveRecord::TestCase | 2 |
Go | Go | use formatter in docker diff | 3dad39b957a008127890ddf9037c063d69b99347 | <ide><path>cli/command/container/diff.go
<ide> package container
<ide>
<ide> import (
<del> "fmt"
<del>
<ide> "github.com/docker/docker/cli"
<ide> "github.com/docker/docker/cli/command"
<del> "github.com/docker/docker/pkg/archive"
<add> "github.com/docker/docker/cli/command/formatter"
<ide> "github.com/pkg/errors"
<ide> "github.com/spf13/cobra"
<ide> "golang.org/x/net/context"
<ide> func runDiff(dockerCli *command.DockerCli, opts *diffOptions) error {
<ide> if err != nil {
<ide> return err
<ide> }
<del>
<del> for _, change := range changes {
<del> var kind string
<del> switch change.Kind {
<del> case archive.ChangeModify:
<del> kind = "C"
<del> case archive.ChangeAdd:
<del> kind = "A"
<del> case archive.ChangeDelete:
<del> kind = "D"
<del> }
<del> fmt.Fprintln(dockerCli.Out(), kind, change.Path)
<add> diffCtx := formatter.Context{
<add> Output: dockerCli.Out(),
<add> Format: formatter.NewDiffFormat("{{.Type}} {{.Path}}"),
<ide> }
<del>
<del> return nil
<add> return formatter.DiffWrite(diffCtx, changes)
<ide> }
<ide><path>cli/command/formatter/diff.go
<add>package formatter
<add>
<add>import (
<add> "github.com/docker/docker/api/types/container"
<add> "github.com/docker/docker/pkg/archive"
<add>)
<add>
<add>const (
<add> defaultDiffTableFormat = "table {{.Type}}\t{{.Path}}"
<add>
<add> changeTypeHeader = "CHANGE TYPE"
<add> pathHeader = "PATH"
<add>)
<add>
<add>// NewDiffFormat returns a format for use with a diff Context
<add>func NewDiffFormat(source string) Format {
<add> switch source {
<add> case TableFormatKey:
<add> return defaultDiffTableFormat
<add> }
<add> return Format(source)
<add>}
<add>
<add>// DiffWrite writes formatted diff using the Context
<add>func DiffWrite(ctx Context, changes []container.ContainerChangeResponseItem) error {
<add>
<add> render := func(format func(subContext subContext) error) error {
<add> for _, change := range changes {
<add> if err := format(&diffContext{c: change}); err != nil {
<add> return err
<add> }
<add> }
<add> return nil
<add> }
<add> return ctx.Write(newDiffContext(), render)
<add>}
<add>
<add>type diffContext struct {
<add> HeaderContext
<add> c container.ContainerChangeResponseItem
<add>}
<add>
<add>func newDiffContext() *diffContext {
<add> diffCtx := diffContext{}
<add> diffCtx.header = map[string]string{
<add> "Type": changeTypeHeader,
<add> "Path": pathHeader,
<add> }
<add> return &diffCtx
<add>}
<add>
<add>func (d *diffContext) MarshalJSON() ([]byte, error) {
<add> return marshalJSON(d)
<add>}
<add>
<add>func (d *diffContext) Type() string {
<add> var kind string
<add> switch d.c.Kind {
<add> case archive.ChangeModify:
<add> kind = "C"
<add> case archive.ChangeAdd:
<add> kind = "A"
<add> case archive.ChangeDelete:
<add> kind = "D"
<add> }
<add> return kind
<add>
<add>}
<add>
<add>func (d *diffContext) Path() string {
<add> return d.c.Path
<add>}
<ide><path>cli/command/formatter/diff_test.go
<add>package formatter
<add>
<add>import (
<add> "bytes"
<add> "testing"
<add>
<add> "github.com/docker/docker/api/types/container"
<add> "github.com/docker/docker/pkg/archive"
<add> "github.com/docker/docker/pkg/testutil/assert"
<add>)
<add>
<add>func TestDiffContextFormatWrite(t *testing.T) {
<add> // Check default output format (verbose and non-verbose mode) for table headers
<add> cases := []struct {
<add> context Context
<add> expected string
<add> }{
<add> {
<add> Context{Format: NewDiffFormat("table")},
<add> `CHANGE TYPE PATH
<add>C /var/log/app.log
<add>A /usr/app/app.js
<add>D /usr/app/old_app.js
<add>`,
<add> },
<add> {
<add> Context{Format: NewDiffFormat("table {{.Path}}")},
<add> `PATH
<add>/var/log/app.log
<add>/usr/app/app.js
<add>/usr/app/old_app.js
<add>`,
<add> },
<add> {
<add> Context{Format: NewDiffFormat("{{.Type}}: {{.Path}}")},
<add> `C: /var/log/app.log
<add>A: /usr/app/app.js
<add>D: /usr/app/old_app.js
<add>`,
<add> },
<add> }
<add>
<add> diffs := []container.ContainerChangeResponseItem{
<add> {archive.ChangeModify, "/var/log/app.log"},
<add> {archive.ChangeAdd, "/usr/app/app.js"},
<add> {archive.ChangeDelete, "/usr/app/old_app.js"},
<add> }
<add>
<add> for _, testcase := range cases {
<add> out := bytes.NewBufferString("")
<add> testcase.context.Output = out
<add> err := DiffWrite(testcase.context, diffs)
<add> if err != nil {
<add> assert.Error(t, err, testcase.expected)
<add> } else {
<add> assert.Equal(t, out.String(), testcase.expected)
<add> }
<add> }
<add>} | 3 |
Javascript | Javascript | fix executionenvironment.canusedom for ie8 | eebcf9f888b8a8fc3ed1f31c2789584a235aa089 | <ide><path>src/vendor/core/ExecutionEnvironment.js
<ide>
<ide> "use strict";
<ide>
<del>var canUseDOM =
<add>var canUseDOM = !!(
<ide> typeof window !== 'undefined' &&
<ide> window.document &&
<del> typeof window.document.createElement === 'function';
<add> window.document.createElement
<add>);
<ide>
<ide> /**
<ide> * Simple, lightweight module assisting with the detection and context of | 1 |
Javascript | Javascript | remove unnecessary console instantiation | c8d4ff1d52e639434dcd09d2ee41e9f7926313c9 | <ide><path>lib/events.js
<ide> Object.defineProperty(EventEmitter, 'defaultMaxListeners', {
<ide> return defaultMaxListeners;
<ide> },
<ide> set: function(arg) {
<del> // force global console to be compiled.
<del> // see https://github.com/nodejs/node/issues/4467
<del> console;
<ide> // check whether the input is a positive number (whose value is zero or
<ide> // greater and not a NaN).
<ide> if (typeof arg !== 'number' || arg < 0 || arg !== arg) { | 1 |
PHP | PHP | fix assertion order | d3e6620931a8401f1292aaa14d215959ae721812 | <ide><path>tests/TestCase/Network/Http/ClientTest.php
<ide> public function testGetQuerystringString()
<ide> 'Category' => ['id' => [2, 3]]
<ide> ];
<ide> $result = $http->get('/search', http_build_query($data));
<del> $this->assertSame($result, $response);
<add> $this->assertSame($response, $result);
<ide> }
<ide>
<ide> /** | 1 |
Javascript | Javascript | remove unused variable | 85cc66ac39baccb4b33af824d67ebf41008db9cb | <ide><path>packages/ember-views/lib/views/view.js
<ide> Ember.View = Ember.Object.extend(Ember.Evented,
<ide> @property _context
<ide> */
<ide> _context: Ember.computed(function(key, value) {
<del> var parentView, controller, context;
<add> var parentView, controller;
<ide>
<ide> if (arguments.length === 2) {
<ide> return value; | 1 |
Javascript | Javascript | make async iterator .next() always resolve | fa1535aed7f8dcfe0400f6359c068253032f5973 | <ide><path>lib/internal/streams/async_iterator.js
<ide> function onReadable(iter) {
<ide> function wrapForNext(lastPromise, iter) {
<ide> return (resolve, reject) => {
<ide> lastPromise.then(() => {
<add> if (iter[kEnded]) {
<add> resolve(createIterResult(undefined, true));
<add> return;
<add> }
<add>
<ide> iter[kHandlePromise](resolve, reject);
<ide> }, reject);
<ide> };
<ide> const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({
<ide> }
<ide>
<ide> if (this[kEnded]) {
<del> return Promise.resolve(createIterResult(null, true));
<add> return Promise.resolve(createIterResult(undefined, true));
<ide> }
<ide>
<ide> if (this[kStream].destroyed) {
<ide> const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({
<ide> if (this[kError]) {
<ide> reject(this[kError]);
<ide> } else {
<del> resolve(createIterResult(null, true));
<add> resolve(createIterResult(undefined, true));
<ide> }
<ide> });
<ide> });
<ide> const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({
<ide> reject(err);
<ide> return;
<ide> }
<del> resolve(createIterResult(null, true));
<add> resolve(createIterResult(undefined, true));
<ide> });
<ide> });
<ide> },
<ide> const createReadableStreamAsyncIterator = (stream) => {
<ide> value: stream._readableState.endEmitted,
<ide> writable: true
<ide> },
<del> [kLastPromise]: { value: null, writable: true },
<ide> // the function passed to new Promise
<ide> // is cached so we avoid allocating a new
<ide> // closure at every run
<ide> const createReadableStreamAsyncIterator = (stream) => {
<ide> writable: true,
<ide> },
<ide> });
<add> iterator[kLastPromise] = null;
<ide>
<ide> finished(stream, (err) => {
<ide> if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
<ide> const createReadableStreamAsyncIterator = (stream) => {
<ide> iterator[kLastPromise] = null;
<ide> iterator[kLastResolve] = null;
<ide> iterator[kLastReject] = null;
<del> resolve(createIterResult(null, true));
<add> resolve(createIterResult(undefined, true));
<ide> }
<ide> iterator[kEnded] = true;
<ide> });
<ide><path>test/parallel/test-stream-readable-async-iterators.js
<ide> async function tests() {
<ide> r.destroy(null);
<ide> }
<ide> })();
<add>
<add> await (async () => {
<add> console.log('all next promises must be resolved on end');
<add> const r = new Readable({
<add> objectMode: true,
<add> read() {
<add> }
<add> });
<add>
<add> const b = r[Symbol.asyncIterator]();
<add> const c = b.next();
<add> const d = b.next();
<add> r.push(null);
<add> assert.deepStrictEqual(await c, { done: true, value: undefined });
<add> assert.deepStrictEqual(await d, { done: true, value: undefined });
<add> })();
<add>
<add> await (async () => {
<add> console.log('all next promises must be resolved on destroy');
<add> const r = new Readable({
<add> objectMode: true,
<add> read() {
<add> }
<add> });
<add>
<add> const b = r[Symbol.asyncIterator]();
<add> const c = b.next();
<add> const d = b.next();
<add> r.destroy();
<add> assert.deepStrictEqual(await c, { done: true, value: undefined });
<add> assert.deepStrictEqual(await d, { done: true, value: undefined });
<add> })();
<add>
<add> await (async () => {
<add> console.log('all next promises must be resolved on destroy with error');
<add> const r = new Readable({
<add> objectMode: true,
<add> read() {
<add> }
<add> });
<add>
<add> const b = r[Symbol.asyncIterator]();
<add> const c = b.next();
<add> const d = b.next();
<add> const err = new Error('kaboom');
<add> r.destroy(err);
<add>
<add> await Promise.all([(async () => {
<add> let e;
<add> try {
<add> await c;
<add> } catch (_e) {
<add> e = _e;
<add> }
<add> assert.strictEqual(e, err);
<add> })(), (async () => {
<add> let e;
<add> try {
<add> await d;
<add> } catch (_e) {
<add> e = _e;
<add> }
<add> assert.strictEqual(e, err);
<add> })()]);
<add> })();
<ide> }
<ide>
<ide> // to avoid missing some tests if a promise does not resolve | 2 |
Text | Text | add pronouns to readme | 6fe740e014d9eddf3fc177720eeee63d29cafcaf | <ide><path>README.md
<ide> For more information about the governance of the Node.js project, see
<ide> * [MylesBorins](https://github.com/MylesBorins) -
<ide> **Myles Borins** <[email protected]> (he/him)
<ide> * [not-an-aardvark](https://github.com/not-an-aardvark) -
<del>**Teddy Katz** <[email protected]>
<add>**Teddy Katz** <[email protected]> (he/him)
<ide> * [ofrobots](https://github.com/ofrobots) -
<ide> **Ali Ijaz Sheikh** <[email protected]> (he/him)
<ide> * [orangemocha](https://github.com/orangemocha) - | 1 |
Javascript | Javascript | restore no-op function in test | 5b1d12a092c2c38ecb3da0d9b59c702c625e7ae3 | <ide><path>test/parallel/test-http-hostname-typechecking.js
<ide> 'use strict';
<del>const common = require('../common');
<add>require('../common');
<ide>
<ide> const assert = require('assert');
<ide> const http = require('http');
<ide> vals.forEach((v) => {
<ide> // These values are OK and should not throw synchronously
<ide> ['', undefined, null].forEach((v) => {
<ide> assert.doesNotThrow(() => {
<del> http.request({hostname: v}).on('error', common.mustCall()).end();
<del> http.request({host: v}).on('error', common.mustCall()).end();
<add> http.request({hostname: v}).on('error', () => {}).end();
<add> http.request({host: v}).on('error', () => {}).end();
<ide> });
<ide> }); | 1 |
Javascript | Javascript | add optional arg to define custom indentation | 1191edba4eaa15f675fa4ed047949a150843971b | <ide><path>src/Angular.js
<ide> function toJsonReplacer(key, value) {
<ide> * stripped since angular uses this notation internally.
<ide> *
<ide> * @param {Object|Array|Date|string|number} obj Input to be serialized into JSON.
<del> * @param {boolean=} pretty If set to true, the JSON output will contain newlines and whitespace.
<add> * @param {boolean|number=} pretty If set to true, the JSON output will contain newlines and whitespace.
<add> * If set to an integer, the JSON output will contain that many spaces per indentation (the default is 2).
<ide> * @returns {string|undefined} JSON-ified string representing `obj`.
<ide> */
<ide> function toJson(obj, pretty) {
<ide> if (typeof obj === 'undefined') return undefined;
<del> return JSON.stringify(obj, toJsonReplacer, pretty ? ' ' : null);
<add> return JSON.stringify(obj, toJsonReplacer, pretty === true ? 2 : pretty);
<ide> }
<ide>
<ide>
<ide><path>src/ng/filter/filters.js
<ide> function dateFilter($locale) {
<ide> * the binding is automatically converted to JSON.
<ide> *
<ide> * @param {*} object Any JavaScript object (including arrays and primitive types) to filter.
<add> * @param {number=} spacing The number of spaces to use per indentation, defaults to 2.
<ide> * @returns {string} JSON string.
<ide> *
<ide> *
<ide> * @example
<ide> <example>
<ide> <file name="index.html">
<del> <pre>{{ {'name':'value'} | json }}</pre>
<add> <pre id="default-spacing">{{ {'name':'value'} | json }}</pre>
<add> <pre id="custom-spacing">{{ {'name':'value'} | json:4 }}</pre>
<ide> </file>
<ide> <file name="protractor.js" type="protractor">
<ide> it('should jsonify filtered objects', function() {
<del> expect(element(by.binding("{'name':'value'}")).getText()).toMatch(/\{\n "name": ?"value"\n}/);
<add> expect(element(by.id('default-spacing')).getText()).toMatch(/\{\n "name": ?"value"\n}/);
<add> expect(element(by.id('custom-spacing')).getText()).toMatch(/\{\n "name": ?"value"\n}/);
<ide> });
<ide> </file>
<ide> </example>
<ide> *
<ide> */
<ide> function jsonFilter() {
<del> return function(object) {
<del> return toJson(object, true);
<add> return function(object, spacing) {
<add> if (isUndefined(spacing)) {
<add> spacing = 2;
<add> }
<add> return toJson(object, spacing);
<ide> };
<ide> }
<ide>
<ide><path>test/ng/filter/filtersSpec.js
<ide> describe('filters', function() {
<ide> it('should do basic filter', function() {
<ide> expect(filter('json')({a:"b"})).toEqual(toJson({a:"b"}, true));
<ide> });
<add> it('should allow custom indentation', function() {
<add> expect(filter('json')({a:"b"}, 4)).toEqual(toJson({a:"b"}, 4));
<add> });
<ide> });
<ide>
<ide> describe('lowercase', function() { | 3 |
Javascript | Javascript | add loopbackmigrationgrandfathered flag | 9cb33e409c162c7714bf2bf4ff3b80c634846957 | <ide><path>loopbackMigration.js
<ide> var users = dbObservable
<ide> user.username = 'fcc' + uuid.v4().slice(0, 8);
<ide> if (user.github) {
<ide> user.isGithubCool = true;
<add> } else {
<add> user.isMigrationGrandfathered = true;
<ide> }
<ide> return user;
<ide> }) | 1 |
Javascript | Javascript | fix durations not being cloned properly | b864420eab59b45dde8a4b9d33f9b6f81fc7b9b0 | <ide><path>moment.js
<ide> seconds = duration.seconds || duration.second || duration.s || 0,
<ide> milliseconds = duration.milliseconds || duration.millisecond || duration.ms || 0;
<ide>
<add> // store reference to input for deterministic cloning
<add> this._input = duration;
<add>
<ide> // representation for dateAddRemove
<ide> this._milliseconds = milliseconds +
<ide> seconds * 1e3 + // 1000
<ide> moment.duration = function (input, key) {
<ide> var isDuration = moment.isDuration(input),
<ide> isNumber = (typeof input === 'number'),
<del> duration = (isDuration ? input._data : (isNumber ? {} : input)),
<add> duration = (isDuration ? input._input : (isNumber ? {} : input)),
<ide> matched = aspNetTimeSpanJsonRegex.exec(input),
<ide> sign,
<ide> ret;
<ide><path>test/moment/duration.js
<ide> exports.duration = {
<ide>
<ide> "instantiation from another duration" : function(test) {
<ide> var simple = moment.duration(1234),
<add> lengthy = moment.duration(60 * 60 * 24 * 360 * 1e3),
<ide> complicated = moment.duration({
<ide> years: 2,
<ide> months: 3,
<ide> exports.duration = {
<ide> milliseconds: 12
<ide> });
<ide>
<del> test.expect(2);
<add> test.expect(3);
<ide> test.deepEqual(moment.duration(simple), simple, "simple clones are equal");
<add> test.deepEqual(moment.duration(lengthy), lengthy, "lengthy clones are equal");
<ide> test.deepEqual(moment.duration(complicated), complicated, "complicated clones are equal");
<ide> test.done();
<ide> }, | 2 |
Text | Text | fix typos in custom geometry article | 564d38a4318b4c27f7346cc6cec5fd2ecf24d379 | <ide><path>threejs/lessons/threejs-custom-geometry.md
<ide> A [previous article](threejs-primitives.html) gave a tour of
<ide> the various built in primitives included in THREE.js. In this
<ide> article we'll cover making our own geometry.
<ide>
<del>Just to be clear, if you are serious about making 3D content
<add>Just to be clear, if you are serious about making 3D content,
<ide> the most common way is to use a 3D modeling package like
<del>[blender](https://blender.org),
<add>[Blender](https://blender.org),
<ide> [Maya](https://www.autodesk.com/products/maya/overview),
<ide> [3D Studio Max](https://www.autodesk.com/products/3ds-max/overview),
<ide> [Cinema4D](https://www.maxon.net/en-us/), etc...
<ide> You'd build a model and then export to [gLTF](threejs-load-gltf.html)
<ide> or [.obj](threejs-load-obj.html) and load them up.
<del>Which ever one you choose expect to spend 2 or 3 weeks going through
<add>Whichever one you choose, expect to spend 2 or 3 weeks going through
<ide> their respective tutorials as all of them have a learning curve
<ide> to be useful.
<ide>
<ide> First let's just make a cube. Even though three.js already
<ide> provides us with `BoxGeometry` and `BoxBufferGeometry` a
<ide> cube is easy to understand so let's start there.
<ide>
<del>There are 2 ways to make custom geometry in THREE.js one
<add>There are 2 ways to make custom geometry in THREE.js. One
<ide> is with the `Geometry` class, the other is `BufferGeometry`.
<ide> Each has their advantages. `Geometry` is arguably easier to
<ide> use but slower and uses more memory. For few 1000s triangles
<ide> consider using `BufferGeometry`.
<ide>
<ide> Note when I say `Geometry` is slower I mean it is slower to
<ide> start and slower to modify but it is not slower to draw so
<del>if you're not planning on modifying your geometry then
<add>if you're not planning on modifying your geometry then
<ide> as long as it's not too large there will only be slightly more
<ide> delay for your program to start using `Geometry` vs using
<ide> `BufferGeometry`. We'll go over both eventually. For now
<ide> and we need to tell the material to use vertex colors
<ide>
<ide> To use lighting we need normals. Normals are vectors that specify direction.
<ide> Just like the colors we can specify a normal for the face by setting the `normal`
<del>property on each face with
<add>property on each face with
<ide>
<ide> ```js
<del>face.normal = new THREE.Vector3(...)`
<add>face.normal = new THREE.Vector3(...)
<ide> ```
<ide>
<ide> or we can specify a normal for each vertex by setting the `vertexNormals`
<ide> vertex normals for a smoother look by calling `Geometry.computeVertexNormals`
<ide> +geometry.computeVertexNormals();
<ide> ```
<ide>
<del>Unfortunately a cube is not a good candidate for vertex normals since it
<add>Unfortunately a cube is not a good candidate for vertex normals since it
<ide> means each vertex gets its normal from the
<del>normals of all the faces it shares.
<add>normals of all the faces it shares.
<ide>
<ide> {{{example url="../threejs-custom-geometry-cube-vertex-normals.html" }}}
<ide>
<del>Adding texture coordinates, sometimes called UVs, is done via an array of
<add>Adding texture coordinates, sometimes called UVs, is done via an array of
<ide> layers of parallel arrays to the `faces` array which is set via `Geometry.faceVertexUvs`.
<ide> For our cube we could do something like
<ide>
<ide> function makeInstance(geometry, color, x) {
<ide>
<ide> {{{example url="../threejs-custom-geometry-cube-texcoords.html" }}}
<ide>
<del>Putting that all together lets make a simple heightmap based
<add>Putting that all together, let's make a simple heightmap based
<ide> terrain mesh.
<ide>
<ide> A heightmap based terrain is where you have a 2D array of heights
<ide> It's 64x64 pixels
<ide>
<ide> <div class="threejs_center"><img src="../resources/images/heightmap-64x64.png" style="width: 512px; image-rendering: pixelated;"></div>
<ide>
<del>We'll load that and then generate a heightmap mesh from it.
<add>We'll load that and then generate a heightmap mesh from it.
<ide> We can use the `ImageLoader` to load the image.
<ide>
<ide> ```js
<ide> from the image
<ide>
<ide> For each cell we'll generate 5 vertices. One for each corner of the cell
<ide> and one at the center point of the cell with the average height of the 4
<del>corner heights.
<add>corner heights.
<ide>
<ide> ```js
<ide> const cellsAcross = width - 1;
<ide> We'll then make 4 triangles from those 5 vertices
<ide> ```js
<ide> // create 4 triangles
<ide> geometry.faces.push(
<del> new THREE.Face3(ndx , ndx + 4, ndx + 1),
<add> new THREE.Face3(ndx + 0, ndx + 4, ndx + 1),
<ide> new THREE.Face3(ndx + 1, ndx + 4, ndx + 3),
<ide> new THREE.Face3(ndx + 3, ndx + 4, ndx + 2),
<ide> new THREE.Face3(ndx + 2, ndx + 4, ndx + 0),
<ide> );
<ide>
<del> // add the texture coordinates for each vertex of each face.
<add> // add the texture coordinates for each vertex of each face
<ide> const u0 = x / cellsAcross;
<ide> const v0 = z / cellsAcross;
<ide> const u1 = (x + 1) / cellsDeep;
<ide> and we deleted the code related to spinning the cubes.
<ide> I hope that was a useful instruction to making your own
<ide> geometry using `Geometry`.
<ide>
<del>In [another article](threejs-custom-buffergeometry.html) we'll go over `BufferGeometry`.
<ide>\ No newline at end of file
<add>In [another article](threejs-custom-buffergeometry.html) we'll go over `BufferGeometry`. | 1 |
Python | Python | fix error in test_saving_without_compilation | 75519651bbfd0a31a382eee92a982d9338e0e6d5 | <ide><path>tests/test_model_saving.py
<ide> def test_sequential_model_saving_2():
<ide>
<ide>
<ide> @keras_test
<del>def test_fuctional_model_saving():
<add>def test_functional_model_saving():
<ide> input = Input(shape=(3,))
<ide> x = Dense(2)(input)
<ide> output = Dense(3)(x)
<ide> def test_saving_multiple_metrics_outputs():
<ide>
<ide> @keras_test
<ide> def test_saving_without_compilation():
<add> """Test saving model without compiling.
<add> """
<ide> model = Sequential()
<ide> model.add(Dense(2, input_shape=(3,)))
<ide> model.add(Dense(3))
<del> model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
<ide>
<ide> _, fname = tempfile.mkstemp('.h5')
<ide> save_model(model, fname) | 1 |
Text | Text | add long overdue link to the google trc project | 51e5eca612d24896165ba8f7c83ecd0e8f695aa4 | <ide><path>examples/flax/README.md
<ide> module abstraction using Python dataclasses that leads to concise and explicit c
<ide> All of our JAX/Flax models are designed to run efficiently on Google
<ide> Cloud TPUs. Here is [a guide for running JAX on Google Cloud TPU](https://cloud.google.com/tpu/docs/jax-quickstart-tpu-vm).
<ide>
<add>Consider applying for the [Google TPU Research Cloud project](https://sites.research.google/trc/) for free TPU compute.
<add>
<ide> Each example README contains more details on the specific model and training
<ide> procedure.
<ide> | 1 |
Javascript | Javascript | fix duplicate errors and warnings | c27b46ac8b8f67274a2a38f91a92efd6d729d806 | <ide><path>lib/Compilation.js
<ide> class Compilation {
<ide> if (err) return callback(err);
<ide>
<ide> if (!needBuild) {
<del> for (const err of module.errors) {
<del> this.errors.push(err);
<del> }
<del> for (const err of module.warnings) {
<del> this.warnings.push(err);
<del> }
<ide> if (currentProfile !== undefined) {
<ide> currentProfile.markBuildingEnd();
<ide> } | 1 |
Python | Python | move more reduction ops to forward graph | af1a2eb1f556f97ed2dd94074339e5a1ec159c86 | <ide><path>keras/layers/gru_v2_test.py
<ide> from absl.testing import parameterized
<ide> import numpy as np
<ide> from tensorflow.core.protobuf import rewriter_config_pb2
<add>from tensorflow.python.framework import test_util as tf_test_util
<ide> import keras
<ide> from keras import combinations
<ide> from keras import keras_parameterized
<ide> def test_explicit_device_with_go_backward_and_mask(self):
<ide> outputs_trimmed = lstm(inputs[:, :masksteps])
<ide> self.assertAllClose(outputs_masked[:, -masksteps:], outputs_trimmed)
<ide>
<add> @tf_test_util.enable_output_all_intermediates
<ide> def test_v1_session_behavior(self):
<ide> with tf.compat.v1.get_default_graph().as_default():
<ide> # See b/139132348 for more details.
<ide><path>keras/layers/lstm_v2_test.py
<ide> from absl.testing import parameterized
<ide> import numpy as np
<ide> from tensorflow.core.protobuf import rewriter_config_pb2
<add>from tensorflow.python.framework import test_util as tf_test_util
<ide> import keras
<ide> from keras import keras_parameterized
<ide> from keras import testing_utils
<ide> def test_explicit_device_with_go_backward_and_mask(self):
<ide> outputs_trimmed = lstm(inputs[:, :masksteps])
<ide> self.assertAllClose(outputs_masked[:, -masksteps:], outputs_trimmed)
<ide>
<add> @tf_test_util.enable_output_all_intermediates
<ide> def test_v1_session_behavior(self):
<ide> with tf.compat.v1.get_default_graph().as_default():
<ide> # See b/139132348 for more details.
<ide><path>keras/layers/wrappers_test.py
<ide> from keras.layers import core
<ide> from keras.layers.rnn_cell_wrapper_v2 import ResidualWrapper
<ide> from keras.utils import generic_utils
<add>from tensorflow.python.eager import context
<add>from tensorflow.python.framework import test_util as tf_test_util
<ide> from tensorflow.python.ops.ragged import ragged_tensor
<ide> from tensorflow.python.training.tracking import util as trackable_util
<ide>
<ide> def test_bidirectional_stacked(self):
<ide>
<ide> def test_bidirectional_statefulness(self):
<ide> # Bidirectional and stateful
<del> rnn = keras.layers.SimpleRNN
<del> samples = 2
<del> dim = 2
<del> timesteps = 2
<del> output_dim = 2
<del> mode = 'sum'
<del>
<del> with self.cached_session():
<del> x = np.random.random((samples, timesteps, dim))
<del> target_dim = 2 * output_dim if mode == 'concat' else output_dim
<del> y = np.random.random((samples, target_dim))
<del>
<del> inputs = keras.layers.Input(batch_shape=(1, timesteps, dim))
<del> bidi_rnn = keras.layers.Bidirectional(
<del> rnn(output_dim, stateful=True), merge_mode=mode)
<del> self.assertTrue(bidi_rnn.stateful)
<del> output = bidi_rnn(inputs)
<del> model = keras.models.Model(inputs, output)
<del>
<del> y_1 = model.predict(x, batch_size=1)
<del> model.reset_states()
<del> y_2 = model.predict(x, batch_size=1)
<del>
<del> self.assertAllClose(y_1, y_2)
<del>
<del> model.compile(loss='mse', optimizer='sgd')
<del> model.fit(x, y, epochs=1, batch_size=1)
<add> def run_test():
<add> rnn = keras.layers.SimpleRNN
<add> samples = 2
<add> dim = 2
<add> timesteps = 2
<add> output_dim = 2
<add> mode = 'sum'
<add>
<add> with self.cached_session():
<add> x = np.random.random((samples, timesteps, dim))
<add> target_dim = 2 * output_dim if mode == 'concat' else output_dim
<add> y = np.random.random((samples, target_dim))
<add>
<add> inputs = keras.layers.Input(batch_shape=(1, timesteps, dim))
<add> bidi_rnn = keras.layers.Bidirectional(
<add> rnn(output_dim, stateful=True), merge_mode=mode)
<add> self.assertTrue(bidi_rnn.stateful)
<add> output = bidi_rnn(inputs)
<add> model = keras.models.Model(inputs, output)
<add>
<add> y_1 = model.predict(x, batch_size=1)
<add> model.reset_states()
<add> y_2 = model.predict(x, batch_size=1)
<add>
<add> self.assertAllClose(y_1, y_2)
<add>
<add> model.compile(loss='mse', optimizer='sgd')
<add> model.fit(x, y, epochs=1, batch_size=1)
<add>
<add> if context.executing_eagerly():
<add> run_test()
<add> else:
<add> tf_test_util.enable_output_all_intermediates(run_test)()
<ide>
<ide> @parameterized.parameters(['sum', 'mul', 'ave', 'concat', None])
<ide> def test_Bidirectional_merged_value(self, merge_mode): | 3 |
Ruby | Ruby | leave default_asset_host_protocol unset | 88237daae48c9867fca3b0e14e779d4f4cdd88d0 | <ide><path>actionpack/lib/sprockets/railtie.rb
<ide> module Sprockets
<ide>
<ide> # TODO: Get rid of config.assets.enabled
<ide> class Railtie < ::Rails::Railtie
<del> config.action_controller.default_asset_host_protocol = :relative
<del>
<ide> rake_tasks do
<ide> load "sprockets/assets.rake"
<ide> end
<ide><path>railties/test/application/assets_test.rb
<ide> class ::PostsController < ActionController::Base ; end
<ide> assert_equal 0, files.length, "Expected application.js asset to be removed, but still exists"
<ide> end
<ide>
<add> test "asset urls should use the request's protocol by default" do
<add> app_with_assets_in_view
<add> add_to_config "config.asset_host = 'example.com'"
<add> require "#{app_path}/config/environment"
<add> class ::PostsController < ActionController::Base; end
<add>
<add> get '/posts', {}, {'HTTPS'=>'off'}
<add> assert_match('src="http://example.com/assets/application.js', last_response.body)
<add> get '/posts', {}, {'HTTPS'=>'on'}
<add> assert_match('src="https://example.com/assets/application.js', last_response.body)
<add> end
<add>
<add> test "asset urls should be protocol-relative if no request is in scope" do
<add> app_file "app/assets/javascripts/image_loader.js.erb", 'var src="<%= image_path("rails.png") %>";'
<add> add_to_config "config.assets.precompile = %w{image_loader.js}"
<add> add_to_config "config.asset_host = 'example.com'"
<add> precompile!
<add>
<add> assert_match 'src="//example.com/assets/rails.png"', File.read("#{app_path}/public/assets/image_loader.js")
<add> end
<add>
<add>
<ide> private
<ide>
<ide> def app_with_assets_in_view | 2 |
Ruby | Ruby | pull parent and alias tacker from the nodes | 085bb239f8476003fa06f81e06a7b4a0402401fc | <ide><path>activerecord/lib/active_record/associations/join_dependency.rb
<ide> def make_joins(node)
<ide> }
<ide> end
<ide>
<add> def construct_tables!(parent, node)
<add> node.tables = node.reflection.chain.map { |reflection|
<add> alias_tracker.aliased_table_for(
<add> reflection.table_name,
<add> table_alias_for(reflection, parent, reflection != node.reflection)
<add> )
<add> }.reverse
<add> end
<add>
<add> def table_alias_for(reflection, parent, join)
<add> name = "#{reflection.plural_name}_#{parent.table_name}"
<add> name << "_join" if join
<add> name
<add> end
<add>
<ide> def merge_node(left, right)
<ide> intersection, missing = right.children.map { |node1|
<ide> [left.children.find { |node2| node1.match? node2 }, node1]
<ide> def build_join_association(reflection, parent, join_type)
<ide> raise EagerLoadPolymorphicError.new(reflection)
<ide> end
<ide>
<del> JoinAssociation.new(reflection, join_root.to_a.length, parent, join_type, alias_tracker)
<add> node = JoinAssociation.new(reflection, join_root.to_a.length, join_type)
<add> construct_tables!(parent, node)
<add> node
<ide> end
<ide>
<ide> def construct(ar_parent, parent, row, rs)
<ide><path>activerecord/lib/active_record/associations/join_dependency/join_association.rb
<ide> class JoinAssociation < JoinPart # :nodoc:
<ide> # These implement abstract methods from the superclass
<ide> attr_reader :aliased_prefix
<ide>
<del> attr_reader :tables
<del> attr_reader :alias_tracker
<add> attr_accessor :tables
<ide>
<ide> delegate :options, :through_reflection, :source_reflection, :chain, :to => :reflection
<ide>
<del> def initialize(reflection, index, parent, join_type, alias_tracker)
<del> super(reflection.klass, parent)
<add> def initialize(reflection, index, join_type)
<add> super(reflection.klass)
<ide>
<ide> @reflection = reflection
<del> @alias_tracker = alias_tracker
<ide> @join_type = join_type
<ide> @aliased_prefix = "t#{ index }"
<del> @tables = construct_tables.reverse
<add> @tables = nil
<ide> end
<ide>
<del> def parent_table_name; parent.table_name; end
<del> alias :alias_suffix :parent_table_name
<del>
<ide> def match?(other)
<ide> return true if self == other
<ide> super && reflection == other.reflection
<ide><path>activerecord/lib/active_record/associations/join_dependency/join_base.rb
<ide> module ActiveRecord
<ide> module Associations
<ide> class JoinDependency # :nodoc:
<ide> class JoinBase < JoinPart # :nodoc:
<del> def initialize(klass)
<del> super(klass, nil)
<del> end
<del>
<ide> def match?(other)
<ide> return true if self == other
<ide> super && base_klass == other.base_klass
<ide><path>activerecord/lib/active_record/associations/join_dependency/join_part.rb
<ide> class JoinDependency # :nodoc:
<ide> class JoinPart # :nodoc:
<ide> include Enumerable
<ide>
<del> # A JoinBase instance representing the active record we are joining onto.
<del> # (So in Author.has_many :posts, the Author would be that base record.)
<del> attr_reader :parent
<del>
<ide> # The Active Record class which this join part is associated 'about'; for a JoinBase
<ide> # this is the actual base model, for a JoinAssociation this is the target model of the
<ide> # association.
<ide> attr_reader :base_klass, :children
<ide>
<ide> delegate :table_name, :column_names, :primary_key, :arel_engine, :to => :base_klass
<ide>
<del> def initialize(base_klass, parent)
<add> def initialize(base_klass)
<ide> @base_klass = base_klass
<del> @parent = parent
<ide> @cached_record = {}
<ide> @column_names_with_alias = nil
<ide> @children = [] | 4 |
PHP | PHP | set default charset for sqlsrv driver to utf8 | 2c4964e159f290a6fed48a902cdfdce5a8c1f8dd | <ide><path>config/database.php
<ide> 'database' => env('DB_DATABASE', 'forge'),
<ide> 'username' => env('DB_USERNAME', 'forge'),
<ide> 'password' => env('DB_PASSWORD', ''),
<add> 'charset' => 'utf8',
<ide> 'prefix' => '',
<ide> ],
<ide> | 1 |
PHP | PHP | add test for eventmanager reset | 190b47cf367af22d8b72fa7e9d19c3a6a8846f9a | <ide><path>tests/TestCase/TestSuite/IntegrationTestCaseTest.php
<ide> namespace Cake\Test\TestCase\TestSuite;
<ide>
<ide> use Cake\Core\Configure;
<add>use Cake\Event\EventManager;
<ide> use Cake\Network\Response;
<ide> use Cake\Routing\DispatcherFactory;
<ide> use Cake\Routing\Router;
<ide> public function testAssertResponseContains() {
<ide> $this->assertResponseContains('content');
<ide> }
<ide>
<add>/**
<add> * Test that works in tandem with testEventManagerReset2 to
<add> * test the EventManager reset.
<add> *
<add> * The return value is passed to testEventManagerReset2 as
<add> * an arguments.
<add> *
<add> * @return \Cake\Event\EventManager
<add> */
<add> public function testEventManagerReset1() {
<add> return EventManager::instance();
<add> }
<add>
<add>/**
<add> * Test if the EventManager is reset between tests.
<add> *
<add> * @depends testEventManagerReset1
<add> * @return void
<add> */
<add> public function testEventManagerReset2($prevEventManager) {
<add> $this->assertNotSame($prevEventManager, EventManager::instance());
<add> }
<add>
<ide> } | 1 |
Ruby | Ruby | add tests for macos check | 77105b809a83583e3da5726105dc9cd913112913 | <ide><path>Library/Homebrew/rubocops/lines_cop.rb
<ide> def audit_formula(_node, _class_node, _parent_class_node, body_node)
<ide> next unless method_called?(m, :new)
<ide> problem "`depends_on` can take requirement classes instead of instances"
<ide> end
<del> #
<del> # os = [:leopard?, :snow_leopard?, :lion?, :mountain_lion?]
<del> # os.each do |version|
<del> # find_instance_method_call(body_node, :MacOS, version) do |m|
<del> # problem "\"#{m.source}\" is deprecated, use a comparison to MacOS.version instead"
<del> # end
<del> # end
<add>
<add> os = [:leopard?, :snow_leopard?, :lion?, :mountain_lion?]
<add> os.each do |version|
<add> find_instance_method_call(body_node, "MacOS", version) do |m|
<add> problem "\"#{m.source}\" is deprecated, use a comparison to MacOS.version instead"
<add> end
<add> end
<ide> #
<ide> # dirPattern(body_node) do |m|
<ide> # next unless m =~ /\[("[^\*{},]+")\]/
<ide><path>Library/Homebrew/test/rubocops/lines_cop_spec.rb
<ide> class Foo < Formula
<ide>
<ide> inspect_source(cop, source)
<ide>
<add> expected_offenses.zip(cop.offenses).each do |expected, actual|
<add> expect_offense(expected, actual)
<add> end
<add> end
<add> it "with old style OS check" do
<add> source = <<-EOS.undent
<add> class Foo < Formula
<add> desc "foo"
<add> url 'http://example.com/foo-1.0.tgz'
<add> depends_on :foo if MacOS.snow_leopard?
<add> end
<add> EOS
<add>
<add> expected_offenses = [{ message: "\"MacOS.snow_leopard?\" is deprecated, use a comparison to MacOS.version instead",
<add> severity: :convention,
<add> line: 4,
<add> column: 21,
<add> source: source }]
<add>
<add> inspect_source(cop, source)
<add>
<ide> expected_offenses.zip(cop.offenses).each do |expected, actual|
<ide> expect_offense(expected, actual)
<ide> end | 2 |
Go | Go | transfer uid and gid to volume. fixes | 9cfbaecfe5b1ccb1ab21c66400f3a1ba1b33da1e | <ide><path>container.go
<ide> func (container *Container) Start(hostConfig *HostConfig) error {
<ide> }
<ide> }
<ide> }
<add> var stat syscall.Stat_t
<add> if err := syscall.Stat(rootVolPath, &stat); err != nil {
<add> return err
<add> }
<add> var srcStat syscall.Stat_t
<add> if err := syscall.Stat(srcPath, &srcStat); err != nil {
<add> return err
<add> }
<add> if stat.Uid != srcStat.Uid || stat.Gid != srcStat.Gid {
<add> if err := os.Chown(srcPath, int(stat.Uid), int(stat.Gid)); err != nil {
<add> return err
<add> }
<add> }
<ide> }
<ide> }
<ide>
<ide><path>container_test.go
<ide> func tempDir(t *testing.T) string {
<ide> return tmpDir
<ide> }
<ide>
<add>// Test for #1737
<add>func TestCopyVolumeUidGid(t *testing.T) {
<add> r := mkRuntime(t)
<add> defer nuke(r)
<add>
<add> // Add directory not owned by root
<add> container1, _, _ := mkContainer(r, []string{"_", "/bin/sh", "-c", "mkdir -p /hello && chown daemon.daemon /hello"}, t)
<add> defer r.Destroy(container1)
<add>
<add> if container1.State.Running {
<add> t.Errorf("Container shouldn't be running")
<add> }
<add> if err := container1.Run(); err != nil {
<add> t.Fatal(err)
<add> }
<add> if container1.State.Running {
<add> t.Errorf("Container shouldn't be running")
<add> }
<add>
<add> rwTar, err := container1.ExportRw()
<add> if err != nil {
<add> t.Error(err)
<add> }
<add> img, err := r.graph.Create(rwTar, container1, "unit test commited image", "", nil)
<add> if err != nil {
<add> t.Error(err)
<add> }
<add>
<add> // Test that the uid and gid is copied from the image to the volume
<add> tmpDir1 := tempDir(t)
<add> defer os.RemoveAll(tmpDir1)
<add> stdout1, _ := runContainer(r, []string{"-v", fmt.Sprintf("%s:/hello", tmpDir1), img.ID, "stat", "-c", "%U %G", "/hello"}, t)
<add> if !strings.Contains(stdout1, "daemon daemon") {
<add> t.Fatal("Container failed to transfer uid and gid to volume")
<add> }
<add>
<add> // Test that the uid and gid is not copied from the image when the volume is read only
<add> tmpDir2 := tempDir(t)
<add> defer os.RemoveAll(tmpDir1)
<add> stdout2, _ := runContainer(r, []string{"-v", fmt.Sprintf("%s:/hello:ro", tmpDir2), img.ID, "stat", "-c", "%U %G", "/hello"}, t)
<add> if strings.Contains(stdout2, "daemon daemon") {
<add> t.Fatal("Container transfered uid and gid to volume")
<add> }
<add>}
<add>
<ide> // Test for #1582
<ide> func TestCopyVolumeContent(t *testing.T) {
<ide> r := mkRuntime(t) | 2 |
Java | Java | reduce platformtransactionmanager lookups | 4e257243f2dd2fdd5625286d01976b346a74f5d7 | <ide><path>spring-tx/src/main/java/org/springframework/transaction/interceptor/TransactionAspectSupport.java
<ide>
<ide> import java.lang.reflect.Method;
<ide> import java.util.Properties;
<add>import java.util.concurrent.ConcurrentHashMap;
<ide>
<ide> /**
<ide> * Base class for transactional aspects, such as the {@link TransactionInterceptor}
<ide> public abstract class TransactionAspectSupport implements BeanFactoryAware, Init
<ide> new NamedThreadLocal<TransactionInfo>("Current aspect-driven transaction");
<ide>
<ide>
<add> private final ConcurrentHashMap<String, PlatformTransactionManager> transactionManagerCache =
<add> new ConcurrentHashMap<String, PlatformTransactionManager>();
<add>
<ide> /**
<ide> * Subclasses can use this to return the current TransactionInfo.
<ide> * Only subclasses that cannot handle all operations in one method,
<ide> protected PlatformTransactionManager determineTransactionManager(TransactionAttr
<ide> }
<ide> String qualifier = txAttr.getQualifier();
<ide> if (StringUtils.hasLength(qualifier)) {
<del> return BeanFactoryAnnotationUtils.qualifiedBeanOfType(this.beanFactory, PlatformTransactionManager.class, qualifier);
<add> PlatformTransactionManager txManager = this.transactionManagerCache.get(qualifier);
<add> if (txManager == null) {
<add> txManager = BeanFactoryAnnotationUtils.qualifiedBeanOfType(
<add> this.beanFactory, PlatformTransactionManager.class, qualifier);
<add> this.transactionManagerCache.putIfAbsent(qualifier, txManager);
<add> }
<add> return txManager;
<ide> }
<ide> else if (this.transactionManagerBeanName != null) {
<del> return this.beanFactory.getBean(this.transactionManagerBeanName, PlatformTransactionManager.class);
<add> PlatformTransactionManager txManager = this.transactionManagerCache.get(this.transactionManagerBeanName);
<add> if (txManager == null) {
<add> txManager = this.beanFactory.getBean(
<add> this.transactionManagerBeanName, PlatformTransactionManager.class);
<add> this.transactionManagerCache.putIfAbsent(this.transactionManagerBeanName, txManager);
<add> }
<add> return txManager;
<ide> }
<ide> else {
<del> return this.beanFactory.getBean(PlatformTransactionManager.class);
<add> // Lookup the default transaction manager and store it for next call
<add> this.transactionManager = this.beanFactory.getBean(PlatformTransactionManager.class);
<add> return this.transactionManager;
<ide> }
<ide> }
<ide>
<ide><path>spring-tx/src/test/java/org/springframework/transaction/interceptor/AbstractTransactionAspectTests.java
<ide>
<ide> import java.lang.reflect.Method;
<ide>
<del>import junit.framework.TestCase;
<add>import org.junit.Test;
<ide>
<ide> import org.springframework.dao.OptimisticLockingFailureException;
<ide> import org.springframework.tests.sample.beans.ITestBean;
<ide> import org.springframework.transaction.UnexpectedRollbackException;
<ide> import org.springframework.transaction.interceptor.TransactionAspectSupport.TransactionInfo;
<ide>
<add>import static org.junit.Assert.*;
<ide> import static org.mockito.BDDMockito.*;
<ide>
<ide> /**
<ide> * @author Rod Johnson
<ide> * @since 16.03.2003
<ide> */
<del>public abstract class AbstractTransactionAspectTests extends TestCase {
<add>public abstract class AbstractTransactionAspectTests {
<ide>
<ide> protected Method exceptionalMethod;
<ide>
<ide> public AbstractTransactionAspectTests() {
<ide> }
<ide>
<ide>
<del> public void testNoTransaction() throws Exception {
<add> @Test
<add> public void noTransaction() throws Exception {
<ide> PlatformTransactionManager ptm = mock(PlatformTransactionManager.class);
<ide>
<ide> TestBean tb = new TestBean();
<ide> public void testNoTransaction() throws Exception {
<ide> /**
<ide> * Check that a transaction is created and committed.
<ide> */
<del> public void testTransactionShouldSucceed() throws Exception {
<add> @Test
<add> public void transactionShouldSucceed() throws Exception {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> MapTransactionAttributeSource tas = new MapTransactionAttributeSource();
<ide> public void testTransactionShouldSucceed() throws Exception {
<ide> * Check that a transaction is created and committed using
<ide> * CallbackPreferringPlatformTransactionManager.
<ide> */
<del> public void testTransactionShouldSucceedWithCallbackPreference() throws Exception {
<add> @Test
<add> public void transactionShouldSucceedWithCallbackPreference() throws Exception {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> MapTransactionAttributeSource tas = new MapTransactionAttributeSource();
<ide> public void testTransactionShouldSucceedWithCallbackPreference() throws Exceptio
<ide> assertFalse(ptm.getStatus().isRollbackOnly());
<ide> }
<ide>
<del> public void testTransactionExceptionPropagatedWithCallbackPreference() throws Throwable {
<add> @Test
<add> public void transactionExceptionPropagatedWithCallbackPreference() throws Throwable {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> MapTransactionAttributeSource tas = new MapTransactionAttributeSource();
<ide> public void testTransactionExceptionPropagatedWithCallbackPreference() throws Th
<ide> /**
<ide> * Check that two transactions are created and committed.
<ide> */
<del> public void testTwoTransactionsShouldSucceed() throws Exception {
<add> @Test
<add> public void twoTransactionsShouldSucceed() throws Exception {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> MapTransactionAttributeSource tas1 = new MapTransactionAttributeSource();
<ide> public void testTwoTransactionsShouldSucceed() throws Exception {
<ide> /**
<ide> * Check that a transaction is created and committed.
<ide> */
<del> public void testTransactionShouldSucceedWithNotNew() throws Exception {
<add> @Test
<add> public void transactionShouldSucceedWithNotNew() throws Exception {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> MapTransactionAttributeSource tas = new MapTransactionAttributeSource();
<ide> public void testTransactionShouldSucceedWithNotNew() throws Exception {
<ide> verify(ptm).commit(status);
<ide> }
<ide>
<del> public void testEnclosingTransactionWithNonTransactionMethodOnAdvisedInside() throws Throwable {
<add> @Test
<add> public void enclosingTransactionWithNonTransactionMethodOnAdvisedInside() throws Throwable {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> MapTransactionAttributeSource tas = new MapTransactionAttributeSource();
<ide> public String getName() {
<ide> verify(ptm).commit(status);
<ide> }
<ide>
<del> public void testEnclosingTransactionWithNestedTransactionOnAdvisedInside() throws Throwable {
<add> @Test
<add> public void enclosingTransactionWithNestedTransactionOnAdvisedInside() throws Throwable {
<ide> final TransactionAttribute outerTxatt = new DefaultTransactionAttribute();
<ide> final TransactionAttribute innerTxatt = new DefaultTransactionAttribute(TransactionDefinition.PROPAGATION_NESTED);
<ide>
<ide> public String getName() {
<ide> verify(ptm).commit(outerStatus);
<ide> }
<ide>
<del> public void testRollbackOnCheckedException() throws Throwable {
<add> @Test
<add> public void rollbackOnCheckedException() throws Throwable {
<ide> doTestRollbackOnException(new Exception(), true, false);
<ide> }
<ide>
<del> public void testNoRollbackOnCheckedException() throws Throwable {
<add> @Test
<add> public void noRollbackOnCheckedException() throws Throwable {
<ide> doTestRollbackOnException(new Exception(), false, false);
<ide> }
<ide>
<del> public void testRollbackOnUncheckedException() throws Throwable {
<add> @Test
<add> public void rollbackOnUncheckedException() throws Throwable {
<ide> doTestRollbackOnException(new RuntimeException(), true, false);
<ide> }
<ide>
<del> public void testNoRollbackOnUncheckedException() throws Throwable {
<add> @Test
<add> public void noRollbackOnUncheckedException() throws Throwable {
<ide> doTestRollbackOnException(new RuntimeException(), false, false);
<ide> }
<ide>
<del> public void testRollbackOnCheckedExceptionWithRollbackException() throws Throwable {
<add> @Test
<add> public void rollbackOnCheckedExceptionWithRollbackException() throws Throwable {
<ide> doTestRollbackOnException(new Exception(), true, true);
<ide> }
<ide>
<del> public void testNoRollbackOnCheckedExceptionWithRollbackException() throws Throwable {
<add> @Test
<add> public void noRollbackOnCheckedExceptionWithRollbackException() throws Throwable {
<ide> doTestRollbackOnException(new Exception(), false, true);
<ide> }
<ide>
<del> public void testRollbackOnUncheckedExceptionWithRollbackException() throws Throwable {
<add> @Test
<add> public void rollbackOnUncheckedExceptionWithRollbackException() throws Throwable {
<ide> doTestRollbackOnException(new RuntimeException(), true, true);
<ide> }
<ide>
<del> public void testNoRollbackOnUncheckedExceptionWithRollbackException() throws Throwable {
<add> @Test
<add> public void noRollbackOnUncheckedExceptionWithRollbackException() throws Throwable {
<ide> doTestRollbackOnException(new RuntimeException(), false, true);
<ide> }
<ide>
<ide> public boolean rollbackOn(Throwable t) {
<ide> /**
<ide> * Test that TransactionStatus.setRollbackOnly works.
<ide> */
<del> public void testProgrammaticRollback() throws Exception {
<add> @Test
<add> public void programmaticRollback() throws Exception {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> Method m = getNameMethod;
<ide> public String getName() {
<ide> * Simulate a transaction infrastructure failure.
<ide> * Shouldn't invoke target method.
<ide> */
<del> public void testCannotCreateTransaction() throws Exception {
<add> @Test
<add> public void cannotCreateTransaction() throws Exception {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> Method m = getNameMethod;
<ide> public String getName() {
<ide> * Check that the target method was invoked, but that the transaction
<ide> * infrastructure exception was thrown to the client
<ide> */
<del> public void testCannotCommitTransaction() throws Exception {
<add> @Test
<add> public void cannotCommitTransaction() throws Exception {
<ide> TransactionAttribute txatt = new DefaultTransactionAttribute();
<ide>
<ide> Method m = setNameMethod;
<ide><path>spring-tx/src/test/java/org/springframework/transaction/interceptor/TransactionInterceptorTests.java
<ide>
<ide> package org.springframework.transaction.interceptor;
<ide>
<add>import static org.junit.Assert.*;
<add>import static org.mockito.BDDMockito.given;
<add>import static org.mockito.Mockito.*;
<add>
<ide> import java.io.Serializable;
<ide> import java.util.Properties;
<ide>
<add>import org.junit.Rule;
<add>import org.junit.Test;
<add>import org.junit.rules.ExpectedException;
<add>
<ide> import org.springframework.aop.framework.ProxyFactory;
<add>import org.springframework.beans.factory.BeanFactory;
<add>import org.springframework.beans.factory.NoSuchBeanDefinitionException;
<ide> import org.springframework.transaction.PlatformTransactionManager;
<ide> import org.springframework.transaction.TransactionDefinition;
<ide> import org.springframework.transaction.TransactionException;
<ide> */
<ide> public class TransactionInterceptorTests extends AbstractTransactionAspectTests {
<ide>
<add> @Rule
<add> public final ExpectedException thrown = ExpectedException.none();
<add>
<ide> @Override
<ide> protected Object advised(Object target, PlatformTransactionManager ptm, TransactionAttributeSource[] tas) throws Exception {
<ide> TransactionInterceptor ti = new TransactionInterceptor();
<ide> protected Object advised(Object target, PlatformTransactionManager ptm, Transact
<ide> return pf.getProxy();
<ide> }
<ide>
<del>/**
<add> /**
<ide> * A TransactionInterceptor should be serializable if its
<ide> * PlatformTransactionManager is.
<ide> */
<del> public void testSerializableWithAttributeProperties() throws Exception {
<add> @Test
<add> public void serializableWithAttributeProperties() throws Exception {
<ide> TransactionInterceptor ti = new TransactionInterceptor();
<ide> Properties props = new Properties();
<ide> props.setProperty("methodName", "PROPAGATION_REQUIRED");
<ide> public void testSerializableWithAttributeProperties() throws Exception {
<ide> assertNotNull(ti.getTransactionAttributeSource());
<ide> }
<ide>
<del> public void testSerializableWithCompositeSource() throws Exception {
<add> @Test
<add> public void serializableWithCompositeSource() throws Exception {
<ide> NameMatchTransactionAttributeSource tas1 = new NameMatchTransactionAttributeSource();
<ide> Properties props = new Properties();
<ide> props.setProperty("methodName", "PROPAGATION_REQUIRED");
<ide> public void testSerializableWithCompositeSource() throws Exception {
<ide> assertTrue(ctas.getTransactionAttributeSources()[1] instanceof NameMatchTransactionAttributeSource);
<ide> }
<ide>
<add> @Test
<add> public void determineTransactionManagerWithQualifierUnknown() {
<add> BeanFactory beanFactory = mock(BeanFactory.class);
<add> TransactionInterceptor ti = createTestTransactionInterceptor(beanFactory);
<add> DefaultTransactionAttribute attribute = new DefaultTransactionAttribute();
<add> attribute.setQualifier("fooTransactionManager");
<add>
<add> thrown.expect(NoSuchBeanDefinitionException.class);
<add> thrown.expectMessage("'fooTransactionManager'");
<add> ti.determineTransactionManager(attribute);
<add> }
<add>
<add> @Test
<add> public void determineTransactionManagerWithQualifierSeveralTimes() {
<add> BeanFactory beanFactory = mock(BeanFactory.class);
<add> TransactionInterceptor ti = createTestTransactionInterceptor(beanFactory);
<add>
<add> PlatformTransactionManager txManager = mock(PlatformTransactionManager.class);
<add> given(beanFactory.containsBean("fooTransactionManager")).willReturn(true);
<add> given(beanFactory.getBean("fooTransactionManager", PlatformTransactionManager.class)).willReturn(txManager);
<add>
<add> DefaultTransactionAttribute attribute = new DefaultTransactionAttribute();
<add> attribute.setQualifier("fooTransactionManager");
<add> PlatformTransactionManager actual = ti.determineTransactionManager(attribute);
<add> assertSame(txManager, actual);
<add>
<add> // Call again, should be cached
<add> PlatformTransactionManager actual2 = ti.determineTransactionManager(attribute);
<add> assertSame(txManager, actual2);
<add> verify(beanFactory, times(1)).containsBean("fooTransactionManager");
<add> verify(beanFactory, times(1)).getBean("fooTransactionManager", PlatformTransactionManager.class);
<add> }
<add>
<add> @Test
<add> public void determineTransactionManagerWithBeanNameSeveralTimes() {
<add> BeanFactory beanFactory = mock(BeanFactory.class);
<add> TransactionInterceptor ti = createTestTransactionInterceptor(beanFactory);
<add> ti.setTransactionManagerBeanName("fooTransactionManager");
<add>
<add> PlatformTransactionManager txManager = mock(PlatformTransactionManager.class);
<add> given(beanFactory.getBean("fooTransactionManager", PlatformTransactionManager.class)).willReturn(txManager);
<add>
<add> DefaultTransactionAttribute attribute = new DefaultTransactionAttribute();
<add> PlatformTransactionManager actual = ti.determineTransactionManager(attribute);
<add> assertSame(txManager, actual);
<add>
<add> // Call again, should be cached
<add> PlatformTransactionManager actual2 = ti.determineTransactionManager(attribute);
<add> assertSame(txManager, actual2);
<add> verify(beanFactory, times(1)).getBean("fooTransactionManager", PlatformTransactionManager.class);
<add> }
<add>
<add> @Test
<add> public void determineTransactionManagerDefaultSeveralTimes() {
<add> BeanFactory beanFactory = mock(BeanFactory.class);
<add> TransactionInterceptor ti = createTestTransactionInterceptor(beanFactory);
<add>
<add> PlatformTransactionManager txManager = mock(PlatformTransactionManager.class);
<add> given(beanFactory.getBean(PlatformTransactionManager.class)).willReturn(txManager);
<add>
<add> DefaultTransactionAttribute attribute = new DefaultTransactionAttribute();
<add> PlatformTransactionManager actual = ti.determineTransactionManager(attribute);
<add> assertSame(txManager, actual);
<add>
<add> // Call again, should be cached
<add> PlatformTransactionManager actual2 = ti.determineTransactionManager(attribute);
<add> assertSame(txManager, actual2);
<add> verify(beanFactory, times(1)).getBean(PlatformTransactionManager.class);
<add> }
<add>
<add> private TransactionInterceptor createTestTransactionInterceptor(BeanFactory beanFactory) {
<add> TransactionInterceptor ti = new TransactionInterceptor();
<add> ti.setBeanFactory(beanFactory);
<add> ti.setTransactionAttributeSource(new NameMatchTransactionAttributeSource());
<add> ti.afterPropertiesSet();
<add> return ti;
<add> }
<add>
<ide>
<ide> /**
<ide> * We won't use this: we just want to know it's serializable. | 3 |
Javascript | Javascript | expose fastfs on the depencenygraph instance | 700b848826e7ec5fb836b1e69e872ee52b0913c1 | <ide><path>packager/react-packager/src/DependencyResolver/DependencyGraph/index.js
<ide> class DependencyGraph {
<ide> return this._moduleCache.getModule(entryPath).getDependencies();
<ide> }
<ide>
<del> stat(filePath) {
<del> return this._fastfs.stat(filePath);
<add> getFS() {
<add> return this._fastfs;
<ide> }
<ide>
<ide> /**
<ide><path>packager/react-packager/src/Resolver/index.js
<ide> class Resolver {
<ide> }
<ide>
<ide> stat(filePath) {
<del> return this._depGraph.stat(filePath);
<add> return this._depGraph.getFS().stat(filePath);
<ide> }
<ide>
<ide> getModuleForPath(entryFile) { | 2 |
Java | Java | remove redundant cast in exceptions | 620981ad1e57eac5d13b94ac5cde4078660ffac4 | <ide><path>src/main/java/rx/exceptions/Exceptions.java
<ide> public static void throwIfFatal(Throwable t) {
<ide> if (t instanceof OnErrorNotImplementedException) {
<ide> throw (OnErrorNotImplementedException) t;
<ide> } else if (t instanceof OnErrorFailedException) {
<del> Throwable cause = ((OnErrorFailedException) t).getCause();
<add> Throwable cause = t.getCause();
<ide> if (cause instanceof RuntimeException) {
<ide> throw (RuntimeException) cause;
<ide> } else { | 1 |
Ruby | Ruby | fix broken proc syntax for 1.9.3 | b769b5fc1405659f8e425e174e26509ba9ea73ed | <ide><path>activerecord/test/models/author.rb
<ide> def ratings
<ide> has_many :posts_with_default_include, :class_name => 'PostWithDefaultInclude'
<ide> has_many :comments_on_posts_with_default_include, :through => :posts_with_default_include, :source => :comments
<ide>
<del> has_many :posts_with_signature, -> (record) { where("posts.title LIKE ?", "%by #{record.name.downcase}%") }, class_name: "Post"
<add> has_many :posts_with_signature, ->(record) { where("posts.title LIKE ?", "%by #{record.name.downcase}%") }, class_name: "Post"
<ide>
<ide> scope :relation_include_posts, -> { includes(:posts) }
<ide> scope :relation_include_tags, -> { includes(:tags) } | 1 |
Go | Go | fix a race in daemon/logger.testcopier | ab533f06510a14a023c056604f9520741073acd3 | <ide><path>daemon/logger/copier_test.go
<ide> import (
<ide> "bytes"
<ide> "encoding/json"
<ide> "io"
<add> "sync"
<ide> "testing"
<ide> "time"
<ide> )
<ide>
<ide> type TestLoggerJSON struct {
<ide> *json.Encoder
<add> mu sync.Mutex
<ide> delay time.Duration
<ide> }
<ide>
<ide> func (l *TestLoggerJSON) Log(m *Message) error {
<ide> if l.delay > 0 {
<ide> time.Sleep(l.delay)
<ide> }
<add> l.mu.Lock()
<add> defer l.mu.Unlock()
<ide> return l.Encode(m)
<ide> }
<ide> | 1 |
Javascript | Javascript | unify build of challenges | 2168b6115161908121ec493d2119ecbfc58603e3 | <ide><path>client/src/client/workers/test-evaluator.js
<ide> import chai from 'chai';
<ide> import '@babel/polyfill';
<add>import __toString from 'lodash/toString';
<ide>
<ide> const oldLog = self.console.log.bind(self.console);
<ide> self.console.log = function proxyConsole(...args) {
<ide> self.console.log = function proxyConsole(...args) {
<ide> };
<ide>
<ide> onmessage = async e => {
<del> const { script: __test, code } = e.data;
<ide> /* eslint-disable no-unused-vars */
<add> const { code = '' } = e.data;
<ide> const assert = chai.assert;
<ide> // Fake Deep Equal dependency
<ide> const DeepEqual = (a, b) => JSON.stringify(a) === JSON.stringify(b);
<ide> /* eslint-enable no-unused-vars */
<ide> try {
<ide> // eslint-disable-next-line no-eval
<del> const testResult = eval(__test);
<add> const testResult = eval(e.data.script);
<ide> if (typeof testResult === 'function') {
<del> await testResult(() => code);
<add> await testResult(fileName => __toString(e.data.sources[fileName]));
<ide> }
<ide> self.postMessage({ pass: true });
<ide> } catch (err) {
<ide><path>client/src/templates/Challenges/rechallenge/builders.js
<ide> export const cssToHtml = cond([
<ide>
<ide> // FileStream::concatHtml(
<ide> // required: [ ...Object ],
<del>// template: String
<del>// ) => Observable[{ build: String, sources: Dictionary }]
<del>export function concatHtml(required, template, files) {
<add>// template: String,
<add>// files: [ polyVinyl ]
<add>// ) => String
<add>export function concatHtml(required, template, files = []) {
<ide> const createBody = template ? _template(template) : defaultTemplate;
<del> const sourceMap = Promise.all(files).then(files =>
<del> files.reduce((sources, file) => {
<del> sources[file.name] = file.source || file.contents;
<del> return sources;
<del> }, {})
<del> );
<ide>
<ide> const head = required
<ide> .map(({ link, src }) => {
<ide> A required file can not have both a src and a link: src = ${src}, link = ${link}
<ide> }
<ide> return '';
<ide> })
<del> .reduce((head, required) => [...head, required], [])
<del> .reduce((head, element, index, thisArray) => {
<del> if (index + 1 === thisArray.length) {
<del> return `<head>${head.concat(element)}</head>`;
<del> }
<del> return head.concat(element);
<del> }, '');
<add> .reduce((head, element) => head.concat(element), '');
<ide>
<del> const body = Promise.all(files).then(files =>
<del> files
<del> .reduce((body, file) => [...body, file.contents + htmlCatch], [])
<del> .map(source => createBody({ source }))
<add> const source = files.reduce(
<add> (source, file) => source.concat(file.contents, htmlCatch),
<add> ''
<ide> );
<ide>
<del> const frameRunner =
<del> '<script src="/js/frame-runner.js" type="text/javascript"></script>';
<del>
<del> return (
<del> Promise.all([head, body, frameRunner, sourceMap]).then(
<del> ([head, body, frameRunner, sourceMap]) => ({
<del> build: head + frameRunner + body,
<del> sources: sourceMap
<del> })
<del> )
<del> );
<add> return `<head>${head}</head>${createBody({ source })}`;
<ide> }
<ide><path>client/src/templates/Challenges/redux/execute-challenge-saga.js
<ide> import {
<ide> initLogs,
<ide> updateLogs,
<ide> logsToConsole,
<del> updateTests,
<del> challengeFilesSelector
<add> updateTests
<ide> } from './';
<ide>
<ide> import {
<del> buildJSFromFiles,
<del> buildHtmlFromFiles,
<add> buildJSChallenge,
<add> buildDOMChallenge,
<ide> buildBackendChallenge
<ide> } from '../utils/build';
<ide>
<ide> function* ExecuteChallengeSaga() {
<ide> yield put(initConsole('// running tests'));
<ide> yield fork(logToConsole, consoleProxy);
<ide>
<add> const state = yield select();
<add>
<ide> let testResults;
<ide> switch (challengeType) {
<ide> case js:
<ide> case bonfire:
<del> testResults = yield ExecuteJSChallengeSaga(consoleProxy);
<add> testResults = yield ExecuteJSChallengeSaga(state, consoleProxy);
<ide> break;
<ide> case backend:
<del> testResults = yield ExecuteBackendChallengeSaga(consoleProxy);
<add> testResults = yield ExecuteBackendChallengeSaga(state, consoleProxy);
<ide> break;
<ide> default:
<del> testResults = yield ExecuteDOMChallengeSaga(consoleProxy);
<add> testResults = yield ExecuteDOMChallengeSaga(state, consoleProxy);
<ide> }
<ide>
<ide> yield put(updateTests(testResults));
<ide> function* logToConsole(channel) {
<ide> });
<ide> }
<ide>
<del>function* ExecuteJSChallengeSaga(proxyLogger) {
<del> const files = yield select(challengeFilesSelector);
<del> const { code, solution } = yield call(buildJSFromFiles, files);
<add>function* ExecuteJSChallengeSaga(state, proxyLogger) {
<add> const { build, sources } = yield call(buildJSChallenge, state);
<add> const code = sources && 'index' in sources ? sources['index'] : '';
<ide>
<ide> const log = args => proxyLogger.put(args);
<ide> testWorker.on('LOG', log);
<ide>
<ide> try {
<ide> return yield call(executeTests, (testString, testTimeout) =>
<ide> testWorker
<del> .execute({ script: solution + '\n' + testString, code }, testTimeout)
<add> .execute(
<add> { script: build + '\n' + testString, code, sources },
<add> testTimeout
<add> )
<ide> .then(result => {
<ide> testWorker.killWorker();
<ide> return result;
<ide> function createTestFrame(state, ctx, proxyLogger) {
<ide> });
<ide> }
<ide>
<del>function* ExecuteDOMChallengeSaga(proxyLogger) {
<del> const state = yield select();
<del> const ctx = yield call(buildHtmlFromFiles, state);
<add>function* ExecuteDOMChallengeSaga(state, proxyLogger) {
<add> const ctx = yield call(buildDOMChallenge, state);
<ide>
<ide> yield call(createTestFrame, state, ctx, proxyLogger);
<ide> // wait for a code execution on a "ready" event in jQuery challenges
<ide> function* ExecuteDOMChallengeSaga(proxyLogger) {
<ide> }
<ide>
<ide> // TODO: use a web worker
<del>function* ExecuteBackendChallengeSaga(proxyLogger) {
<del> const state = yield select();
<add>function* ExecuteBackendChallengeSaga(state, proxyLogger) {
<ide> const ctx = yield call(buildBackendChallenge, state);
<ide>
<ide> yield call(createTestFrame, state, ctx, proxyLogger);
<ide> function* updateMainSaga() {
<ide> }
<ide> const state = yield select();
<ide> const frameMain = yield call(createMainFramer, document, state);
<del> const ctx = yield call(buildHtmlFromFiles, state);
<add> const ctx = yield call(buildDOMChallenge, state);
<ide> yield call(frameMain, ctx);
<ide> } catch (err) {
<ide> console.error(err);
<ide><path>client/src/templates/Challenges/utils/build.js
<ide> import { transformers, testJS$JSX } from '../rechallenge/transformers';
<ide> import { cssToHtml, jsToHtml, concatHtml } from '../rechallenge/builders.js';
<ide> import { isPromise } from './polyvinyl';
<ide>
<del>const frameRunner =
<del> "<script src='/js/frame-runner.js' type='text/javascript'></script>";
<add>const frameRunner = [
<add> {
<add> src: '/js/frame-runner.js'
<add> }
<add>];
<ide>
<ide> const globalRequires = [
<ide> {
<ide> const pipeLine = flow(
<ide> applyFunctions(toHtml)
<ide> );
<ide>
<del>export function buildHtmlFromFiles(state) {
<add>function buildSourceMap(files) {
<add> return files.reduce((sources, file) => {
<add> sources[file.name] = file.source || file.contents;
<add> return sources;
<add> }, {});
<add>}
<add>
<add>export function buildDOMChallenge(state) {
<ide> const files = challengeFilesSelector(state);
<ide> const { required = [], template } = challengeMetaSelector(state);
<del> const finalRequires = [...globalRequires, ...required];
<del> const requiredFiles = Object.keys(files)
<add> const finalRequires = [...globalRequires, ...required, ...frameRunner];
<add> const finalFiles = Object.keys(files)
<ide> .map(key => files[key])
<ide> .filter(filterJSIfDisabled(state))
<del> .filter(Boolean);
<del> const finalFiles = requiredFiles.map(pipeLine);
<del> return concatHtml(finalRequires, template, finalFiles);
<add> .filter(Boolean)
<add> .map(pipeLine);
<add> return Promise.all(finalFiles).then(files => ({
<add> build: concatHtml(finalRequires, template, files),
<add> sources: buildSourceMap(files)
<add> }));
<ide> }
<ide>
<del>export function buildJSFromFiles(files) {
<add>export function buildJSChallenge(state) {
<add> const files = challengeFilesSelector(state);
<ide> const pipeLine = flow(
<ide> applyFunctions(throwers),
<ide> applyFunctions(transformers)
<ide> );
<ide> const finalFiles = Object.keys(files)
<ide> .map(key => files[key])
<ide> .map(pipeLine);
<del> const sourceMap = Promise.all(finalFiles).then(files =>
<del> files.reduce((sources, file) => {
<del> sources[file.name] = file.source || file.contents;
<del> return sources;
<del> }, {})
<del> );
<del> const body = Promise.all(finalFiles).then(files =>
<del> files
<add> return Promise.all(finalFiles).then(files => ({
<add> build: files
<ide> .reduce(
<ide> (body, file) => [
<ide> ...body,
<ide> file.head + '\n' + file.contents + '\n' + file.tail
<ide> ],
<ide> []
<ide> )
<del> .join('/n')
<del> );
<del> return Promise.all([body, sourceMap]).then(([body, sources]) => ({
<del> solution: body,
<del> code: sources && 'index' in sources ? sources['index'] : ''
<add> .join('/n'),
<add> sources: buildSourceMap(files)
<ide> }));
<ide> }
<ide>
<ide> export function buildBackendChallenge(state) {
<ide> solution: { value: url }
<ide> } = backendFormValuesSelector(state);
<ide> return {
<del> build: frameRunner,
<add> build: concatHtml(frameRunner, ''),
<ide> sources: { url }
<ide> };
<ide> } | 4 |
Java | Java | add a couple of @see to completable | b338ffe9f469896187e401caee2e9a9a8fffe28d | <ide><path>src/main/java/io/reactivex/Completable.java
<ide> public final Completable compose(CompletableTransformer transformer) {
<ide> * @param other the other Completable, not null
<ide> * @return the new Completable which subscribes to this and then the other Completable
<ide> * @throws NullPointerException if other is null
<add> * @see #andThen(MaybeSource)
<add> * @see #andThen(ObservableSource)
<add> * @see #andThen(SingleSource)
<add> * @see #andThen(Publisher)
<ide> */
<ide> @CheckReturnValue
<ide> @SchedulerSupport(SchedulerSupport.NONE)
<ide> public final Completable delay(final long delay, final TimeUnit unit, final Sche
<ide> * @param onComplete the callback to call when this emits an onComplete event
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if onComplete is null
<add> * @see #doFinally(Action)
<ide> */
<ide> @CheckReturnValue
<ide> @SchedulerSupport(SchedulerSupport.NONE)
<ide> public final Completable doOnDispose(Action onDispose) {
<ide> * @param onError the error callback
<ide> * @return the new Completable instance
<ide> * @throws NullPointerException if onError is null
<add> * @see #doFinally(Action)
<ide> */
<ide> @CheckReturnValue
<ide> @SchedulerSupport(SchedulerSupport.NONE)
<ide> public final Completable doOnSubscribe(Consumer<? super Disposable> onSubscribe)
<ide> * </dl>
<ide> * @param onTerminate the callback to call just before this Completable terminates
<ide> * @return the new Completable instance
<add> * @see #doFinally(Action)
<ide> */
<ide> @CheckReturnValue
<ide> @SchedulerSupport(SchedulerSupport.NONE)
<ide> public final Completable doOnTerminate(final Action onTerminate) {
<ide> * </dl>
<ide> * @param onAfterTerminate the callback to call after this Completable terminates
<ide> * @return the new Completable instance
<add> * @see #doFinally(Action)
<ide> */
<ide> @CheckReturnValue
<ide> @SchedulerSupport(SchedulerSupport.NONE)
<ide><path>src/test/java/io/reactivex/JavadocWording.java
<ide> package io.reactivex;
<ide>
<ide> import java.util.List;
<add>import java.util.regex.Pattern;
<ide>
<ide> import static org.junit.Assert.*;
<ide> import org.junit.Test;
<ide> public void completableDocRefersToCompletableTypes() throws Exception {
<ide> int idx = m.javadoc.indexOf("Flowable", jdx);
<ide> if (idx >= 0) {
<ide> if (!m.signature.contains("Flowable")) {
<del> e.append("java.lang.RuntimeException: Completable doc mentions Flowable but not in the signature\r\n at io.reactivex.")
<del> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Flowable");
<add> if (!p.matcher(m.javadoc).find()) {
<add> e.append("java.lang.RuntimeException: Completable doc mentions Flowable but not in the signature\r\n at io.reactivex.")
<add> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> }
<ide> }
<ide> jdx = idx + 6;
<ide> } else {
<ide> public void completableDocRefersToCompletableTypes() throws Exception {
<ide> int idx = m.javadoc.indexOf("Single", jdx);
<ide> if (idx >= 0) {
<ide> if (!m.signature.contains("Single")) {
<del> e.append("java.lang.RuntimeException: Completable doc mentions Single but not in the signature\r\n at io.reactivex.")
<del> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Single");
<add> if (!p.matcher(m.javadoc).find()) {
<add> e.append("java.lang.RuntimeException: Completable doc mentions Single but not in the signature\r\n at io.reactivex.")
<add> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> }
<ide> }
<ide> jdx = idx + 6;
<ide> } else {
<ide> public void completableDocRefersToCompletableTypes() throws Exception {
<ide> int idx = m.javadoc.indexOf("SingleSource", jdx);
<ide> if (idx >= 0) {
<ide> if (!m.signature.contains("SingleSource")) {
<del> e.append("java.lang.RuntimeException: Completable doc mentions SingleSource but not in the signature\r\n at io.reactivex.")
<del> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*SingleSource");
<add> if (!p.matcher(m.javadoc).find()) {
<add> e.append("java.lang.RuntimeException: Completable doc mentions SingleSource but not in the signature\r\n at io.reactivex.")
<add> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> }
<ide> }
<ide> jdx = idx + 6;
<ide> } else {
<ide> public void completableDocRefersToCompletableTypes() throws Exception {
<ide> int idx = m.javadoc.indexOf(" Observable", jdx);
<ide> if (idx >= 0) {
<ide> if (!m.signature.contains("Observable")) {
<del> e.append("java.lang.RuntimeException: Completable doc mentions Observable but not in the signature\r\n at io.reactivex.")
<del> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*Observable");
<add> if (!p.matcher(m.javadoc).find()) {
<add> e.append("java.lang.RuntimeException: Completable doc mentions Observable but not in the signature\r\n at io.reactivex.")
<add> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> }
<ide> }
<ide> jdx = idx + 6;
<ide> } else {
<ide> public void completableDocRefersToCompletableTypes() throws Exception {
<ide> int idx = m.javadoc.indexOf("ObservableSource", jdx);
<ide> if (idx >= 0) {
<ide> if (!m.signature.contains("ObservableSource")) {
<del> e.append("java.lang.RuntimeException: Completable doc mentions ObservableSource but not in the signature\r\n at io.reactivex.")
<del> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> Pattern p = Pattern.compile("@see\\s+#[A-Za-z0-9 _.,()]*ObservableSource");
<add> if (!p.matcher(m.javadoc).find()) {
<add> e.append("java.lang.RuntimeException: Completable doc mentions ObservableSource but not in the signature\r\n at io.reactivex.")
<add> .append("Completable (Completable.java:").append(m.javadocLine + lineNumber(m.javadoc, idx) - 1).append(")\r\n\r\n");
<add> }
<ide> }
<ide> jdx = idx + 6;
<ide> } else { | 2 |
Ruby | Ruby | remove bundle exec from the exception message | d17fa452ec1993271ccc6e5035fb27b9c89513cf | <ide><path>activerecord/lib/active_record/migration.rb
<ide> def initialize(name)
<ide>
<ide> class PendingMigrationError < ActiveRecordError#:nodoc:
<ide> def initialize
<del> super("Migrations are pending run 'bundle exec rake db:migrate RAILS_ENV=#{ENV['RAILS_ENV']}' to resolve the issue")
<add> super("Migrations are pending run 'rake db:migrate RAILS_ENV=#{ENV['RAILS_ENV']}' to resolve the issue")
<ide> end
<ide> end
<ide> | 1 |
Python | Python | fix typo in documentation (101 instead of 10) | 0f1d2d929a0f7babb6d4471e987be1da34e06e34 | <ide><path>numpy/core/code_generators/ufunc_docstrings.py
<ide> def add_newdoc(place, name, doc):
<ide>
<ide> >>> import matplotlib.pyplot as plt
<ide>
<del> >>> x = np.linspace(-10, 10, 101)
<add> >>> x = np.linspace(-10, 10, 10)
<ide> >>> plt.plot(x, np.absolute(x))
<ide> >>> plt.show()
<ide> | 1 |
PHP | PHP | add tests for | edcfe2d3a8905f5a2972a48a6d8427e85aab2632 | <ide><path>lib/Cake/Test/Case/Utility/HashTest.php
<ide> public function testExtractAttributeEquality() {
<ide> $this->assertEquals(5, $result[3]['id']);
<ide> }
<ide>
<add>/**
<add> * Test that attribute matchers don't cause errors on scalar data.
<add> *
<add> * @return void
<add> */
<add> public function testExtractAttributeEqualityOnScalarValue() {
<add> $data = array(
<add> 'Entity' => array(
<add> 'id' => 1 ,
<add> 'data1' => 'value',
<add> )
<add> );
<add> $result = Hash::extract($data, 'Entity[id=1].data1');
<add> $this->assertEquals(array('value'), $result);
<add>
<add> $data = array('Entity' => false );
<add> $result = Hash::extract($data, 'Entity[id=1].data1');
<add> $this->assertEquals(array(), $result);
<add> }
<add>
<ide> /**
<ide> * Test comparison operators.
<ide> * | 1 |
Python | Python | add extra field to get_connnection rest endpoint | adf7755eaa67bd924f6a4da0498bce804da1dd4b | <ide><path>airflow/api_connexion/endpoints/connection_endpoint.py
<ide> from airflow.api_connexion.parameters import check_limit, format_parameters
<ide> from airflow.api_connexion.schemas.connection_schema import (
<ide> ConnectionCollection,
<del> connection_collection_item_schema,
<ide> connection_collection_schema,
<ide> connection_schema,
<ide> )
<ide> def get_connection(connection_id, session):
<ide> "Connection not found",
<ide> detail=f"The Connection with connection_id: `{connection_id}` was not found",
<ide> )
<del> return connection_collection_item_schema.dump(connection)
<add> return connection_schema.dump(connection)
<ide>
<ide>
<ide> @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_CONNECTION)])
<ide><path>tests/api_connexion/endpoints/test_connection_endpoint.py
<ide> def test_should_respond_200(self, session):
<ide> login='login',
<ide> schema='testschema',
<ide> port=80,
<add> extra="{'param': 'value'}",
<ide> )
<ide> session.add(connection_model)
<ide> session.commit()
<ide> def test_should_respond_200(self, session):
<ide> "login": 'login',
<ide> 'schema': 'testschema',
<ide> 'port': 80,
<add> 'extra': "{'param': 'value'}",
<ide> }
<ide>
<ide> def test_should_respond_404(self): | 2 |
Javascript | Javascript | convert the `domcanvasfactory` to an es6 class | c5f73edcd273f1cd33694243740e248bef0472c9 | <ide><path>examples/node/pdf2png/pdf2png.js
<ide> NodeCanvasFactory.prototype = {
<ide> };
<ide> },
<ide>
<del> reset: function NodeCanvasFactory_reset(canvasAndContextPair, width, height) {
<del> assert(canvasAndContextPair.canvas, 'Canvas is not specified');
<add> reset: function NodeCanvasFactory_reset(canvasAndContext, width, height) {
<add> assert(canvasAndContext.canvas, 'Canvas is not specified');
<ide> assert(width > 0 && height > 0, 'Invalid canvas size');
<del> canvasAndContextPair.canvas.width = width;
<del> canvasAndContextPair.canvas.height = height;
<add> canvasAndContext.canvas.width = width;
<add> canvasAndContext.canvas.height = height;
<ide> },
<ide>
<del> destroy: function NodeCanvasFactory_destroy(canvasAndContextPair) {
<del> assert(canvasAndContextPair.canvas, 'Canvas is not specified');
<add> destroy: function NodeCanvasFactory_destroy(canvasAndContext) {
<add> assert(canvasAndContext.canvas, 'Canvas is not specified');
<ide>
<ide> // Zeroing the width and height cause Firefox to release graphics
<ide> // resources immediately, which can greatly reduce memory consumption.
<del> canvasAndContextPair.canvas.width = 0;
<del> canvasAndContextPair.canvas.height = 0;
<del> canvasAndContextPair.canvas = null;
<del> canvasAndContextPair.context = null;
<add> canvasAndContext.canvas.width = 0;
<add> canvasAndContext.canvas.height = 0;
<add> canvasAndContext.canvas = null;
<add> canvasAndContext.context = null;
<ide> },
<ide> };
<ide>
<ide> pdfjsLib.getDocument(rawData).then(function (pdfDocument) {
<ide> // Render the page on a Node canvas with 100% scale.
<ide> var viewport = page.getViewport(1.0);
<ide> var canvasFactory = new NodeCanvasFactory();
<del> var canvasAndContextPair = canvasFactory.create(viewport.width, viewport.height);
<add> var canvasAndContext = canvasFactory.create(viewport.width, viewport.height);
<ide> var renderContext = {
<del> canvasContext: canvasAndContextPair.context,
<add> canvasContext: canvasAndContext.context,
<ide> viewport: viewport,
<ide> canvasFactory: canvasFactory
<ide> };
<ide>
<ide> page.render(renderContext).then(function () {
<ide> // Convert the canvas to an image buffer.
<del> image = canvasAndContextPair.canvas.toBuffer();
<add> var image = canvasAndContext.canvas.toBuffer();
<ide> fs.writeFile('output.png', image, function (error) {
<ide> if (error) {
<ide> console.error('Error: ' + error);
<ide><path>src/display/dom_utils.js
<ide> import {
<ide>
<ide> var DEFAULT_LINK_REL = 'noopener noreferrer nofollow';
<ide>
<del>function DOMCanvasFactory() {}
<del>DOMCanvasFactory.prototype = {
<del> create: function DOMCanvasFactory_create(width, height) {
<add>class DOMCanvasFactory {
<add> create(width, height) {
<ide> assert(width > 0 && height > 0, 'invalid canvas size');
<del> var canvas = document.createElement('canvas');
<del> var context = canvas.getContext('2d');
<add> let canvas = document.createElement('canvas');
<add> let context = canvas.getContext('2d');
<ide> canvas.width = width;
<ide> canvas.height = height;
<ide> return {
<ide> canvas,
<ide> context,
<ide> };
<del> },
<add> }
<ide>
<del> reset: function DOMCanvasFactory_reset(canvasAndContextPair, width, height) {
<del> assert(canvasAndContextPair.canvas, 'canvas is not specified');
<add> reset(canvasAndContext, width, height) {
<add> assert(canvasAndContext.canvas, 'canvas is not specified');
<ide> assert(width > 0 && height > 0, 'invalid canvas size');
<del> canvasAndContextPair.canvas.width = width;
<del> canvasAndContextPair.canvas.height = height;
<del> },
<add> canvasAndContext.canvas.width = width;
<add> canvasAndContext.canvas.height = height;
<add> }
<ide>
<del> destroy: function DOMCanvasFactory_destroy(canvasAndContextPair) {
<del> assert(canvasAndContextPair.canvas, 'canvas is not specified');
<add> destroy(canvasAndContext) {
<add> assert(canvasAndContext.canvas, 'canvas is not specified');
<ide> // Zeroing the width and height cause Firefox to release graphics
<ide> // resources immediately, which can greatly reduce memory consumption.
<del> canvasAndContextPair.canvas.width = 0;
<del> canvasAndContextPair.canvas.height = 0;
<del> canvasAndContextPair.canvas = null;
<del> canvasAndContextPair.context = null;
<add> canvasAndContext.canvas.width = 0;
<add> canvasAndContext.canvas.height = 0;
<add> canvasAndContext.canvas = null;
<add> canvasAndContext.context = null;
<ide> }
<del>};
<add>}
<ide>
<ide> class DOMCMapReaderFactory {
<ide> constructor({ baseUrl = null, isCompressed = false, }) { | 2 |
Javascript | Javascript | show error details | b6a1c40545cbde4e3ec085e20eb9fbfa1a850f56 | <ide><path>bin/webpack.js
<ide> optimist
<ide>
<ide> .boolean("display-chunks").describe("display-chunks")
<ide>
<add> .boolean("display-error-details").describe("display-error-details")
<add>
<ide> .boolean("display-reasons").alias("display-reasons", "verbose").alias("display-reasons", "v").describe("display-reasons");
<add>
<ide>
<ide> var argv = optimist.argv;
<ide>
<ide> if(!outputOptions.json) {
<ide> ifArg("display-reasons", function(bool) {
<ide> outputOptions.reasons = bool;
<ide> });
<add>
<add> ifArg("display-error-details", function(bool) {
<add> outputOptions.errorDetails = bool;
<add> });
<ide> } else {
<ide> outputOptions.chunks = true;
<ide> outputOptions.modules = true;
<ide><path>lib/ModuleNotFoundError.js
<ide> function ModuleNotFoundError(module, err) {
<ide> Error.captureStackTrace(this, ModuleNotFoundError);
<ide> this.name = "ModuleNotFoundError";
<ide> this.message = "Module not found: " + err;
<add> this.details = err.details;
<ide> this.module = module;
<ide> this.error = err;
<ide> }
<ide><path>lib/Stats.js
<ide> Stats.prototype.toJson = function toJson(options, forToString) {
<ide> var showReasons = d(options.reasons, !forToString);
<ide> var showChildren = d(options.children, true);
<ide> var showSource = d(options.source, !forToString);
<add> var showErrorDetails = d(options.errorDetails, !forToString);
<ide> var sortModules = d(options.modulesSort, "id");
<ide> var sortChunks = d(options.chunksSort, "id");
<ide> var sortAssets = d(options.assetsSort, "");
<ide> Stats.prototype.toJson = function toJson(options, forToString) {
<ide> text += e.file + "\n";
<ide> }
<ide> text += e.message;
<add> if(showErrorDetails && e.details) text += "\n" + e.details;
<ide> if(e.dependencies && e.origin) {
<ide> text += "\n @ " + e.origin.readableIdentifier(requestShortener);
<ide> e.dependencies.forEach(function(dep) {
<ide><path>test/Errors.test.js
<ide> describe("Errors", function() {
<ide> c.run(function(err, stats) {
<ide> if(err) throw err;
<ide> should.strictEqual(typeof stats, "object");
<del> stats = stats.toJson();
<add> stats = stats.toJson({ errorDetails: false });
<ide> should.strictEqual(typeof stats, "object");
<ide> stats.should.have.property("errors");
<ide> stats.should.have.property("warnings"); | 4 |
Javascript | Javascript | ensure presetenv is loaded | eb75f8c8cc2316fbf68a4fa26841e77292c71f3c | <ide><path>client/src/templates/Challenges/rechallenge/transformers.js
<ide> async function loadBabel() {
<ide> }
<ide>
<ide> async function loadPresetEnv() {
<del> if (presetEnv) return;
<add> if (babelOptionsJSBase && babelOptionsJSBase.presets) return;
<ide> /* eslint-disable no-inline-comments */
<del> presetEnv = await import(
<del> /* webpackChunkName: "@babel/preset-env" */ '@babel/preset-env'
<del> );
<add> if (!presetEnv)
<add> presetEnv = await import(
<add> /* webpackChunkName: "@babel/preset-env" */ '@babel/preset-env'
<add> );
<ide> /* eslint-enable no-inline-comments */
<ide>
<ide> babelOptionsJSBase = { | 1 |
PHP | PHP | fix incorrect class type for widgets | e5a9dc8de6fbb7c9dcd3d44c81a01ea7415d7937 | <ide><path>src/View/Widget/WidgetRegistry.php
<ide> protected function _resolveWidget($widget) {
<ide> }
<ide>
<ide> $class = array_shift($widget);
<del> $className = App::className($class, 'View/Input');
<add> $className = App::className($class, 'View/Widget');
<ide> if ($className === false || !class_exists($className)) {
<ide> throw new \RuntimeException(sprintf('Unable to locate widget class "%s"', $class));
<ide> }
<ide><path>tests/TestCase/View/Widget/WidgetRegistryTest.php
<ide> public function setUp() {
<ide> public function testAddInConstructor() {
<ide> $widgets = [
<ide> 'text' => ['Cake\View\Widget\Basic'],
<add> 'label' => ['Label'],
<ide> ];
<ide> $inputs = new WidgetRegistry($this->templates, $this->view, $widgets);
<ide> $result = $inputs->get('text');
<ide> $this->assertInstanceOf('Cake\View\Widget\Basic', $result);
<add>
<add> $result = $inputs->get('label');
<add> $this->assertInstanceOf('Cake\View\Widget\Label', $result);
<ide> }
<ide>
<ide> /** | 2 |
Text | Text | add backticks around string#camelize | ec544350bb59e9e42361d607bde2db689c6bb2d7 | <ide><path>activesupport/CHANGELOG.md
<del>* Update String#camelize to provide feedback when wrong option is passed
<add>* Update `String#camelize` to provide feedback when wrong option is passed
<ide>
<del> String#camelize was returning nil without any feedback when an
<add> `String#camelize` was returning nil without any feedback when an
<ide> invalid option was passed as parameter.
<ide>
<ide> Previously: | 1 |
Python | Python | fix failing yaml tests | 653d626b3c3189260423bec7a1dd4faed9f8708d | <ide><path>tests/test_renderers.py
<ide> from django.utils import six, unittest
<ide> from django.utils.translation import ugettext_lazy as _
<ide> from rest_framework import status, permissions
<del>from rest_framework.compat import yaml, etree, StringIO
<add>from rest_framework.compat import yaml, etree, StringIO, BytesIO
<ide> from rest_framework.response import Response
<ide> from rest_framework.views import APIView
<ide> from rest_framework.renderers import BaseRenderer, JSONRenderer, YAMLRenderer, \
<ide> def test_render(self):
<ide> obj = {'foo': ['bar', 'baz']}
<ide> renderer = YAMLRenderer()
<ide> content = renderer.render(obj, 'application/yaml')
<del> self.assertEqual(content, _yaml_repr)
<add> self.assertEqual(content.decode('utf-8'), _yaml_repr)
<ide>
<ide> def test_render_and_parse(self):
<ide> """
<ide> def test_render_and_parse(self):
<ide> parser = YAMLParser()
<ide>
<ide> content = renderer.render(obj, 'application/yaml')
<del> data = parser.parse(StringIO(content))
<add> data = parser.parse(BytesIO(content))
<ide> self.assertEqual(obj, data)
<ide>
<ide> def test_render_decimal(self):
<ide> def test_render_decimal(self):
<ide> """
<ide> renderer = YAMLRenderer()
<ide> content = renderer.render({'field': Decimal('111.2')}, 'application/yaml')
<del> self.assertYAMLContains(content, "field: '111.2'")
<add> self.assertYAMLContains(content.decode('utf-8'), "field: '111.2'")
<ide>
<ide> def assertYAMLContains(self, content, string):
<ide> self.assertTrue(string in content, '%r not in %r' % (string, content)) | 1 |
Ruby | Ruby | parse tag if detecting version | 11ebc27e0a1659ce23993bfa6a96470fcc76f477 | <ide><path>Library/Homebrew/test/version_spec.rb
<ide> .to be_detected_from("https://php.net/get/php-7.1.10.tar.gz/from/this/mirror")
<ide> end
<ide>
<del> specify "from URL" do
<add> specify "from tag" do
<ide> expect(described_class.create("1.2.3"))
<ide> .to be_detected_from("https://github.com/foo/bar.git", tag: "v1.2.3")
<ide> end
<add>
<add> specify "beta from tag" do
<add> expect(described_class.create("1.2.3-beta1"))
<add> .to be_detected_from("https://github.com/foo/bar.git", tag: "v1.2.3-beta1")
<add> end
<ide> end
<ide> end
<ide>
<ide><path>Library/Homebrew/version.rb
<ide> def detected_from_url?
<ide>
<ide> def self.detect(url, specs)
<ide> if specs.key?(:tag)
<del> FromURL.new(specs[:tag][/((?:\d+\.)*\d+)/, 1])
<add> FromURL.parse(specs[:tag])
<ide> else
<ide> FromURL.parse(url)
<ide> end | 2 |
Javascript | Javascript | fix indentation again | 14bbd82ce53e861333a107c82fe5f8145f6fd284 | <ide><path>web/viewer.js
<ide> var PDFView = {
<ide> var data = (xhr.mozResponseArrayBuffer || xhr.mozResponse ||
<ide> xhr.responseArrayBuffer || xhr.response);
<ide>
<del> document.getElementById('loading').style.display = "none";
<add> document.getElementById('loading').style.display = "none";
<ide> PDFView.load(data, scale);
<ide> }
<ide> }; | 1 |
Python | Python | calculate gradient for entity encoding | 9ffe5437aee37c02db2d32a79bc4a2072448cce3 | <ide><path>examples/pipeline/wiki_entity_linking/train_el.py
<ide> class EL_Model():
<ide>
<ide> INPUT_DIM = 300
<del> OUTPUT_DIM = 5 # 96
<del> PRINT_LOSS = True
<add> OUTPUT_DIM = 96
<add> PRINT_LOSS = False
<ide> PRINT_F = True
<add> EPS = 0.0000000005
<ide>
<ide> labels = ["MATCH", "NOMATCH"]
<ide> name = "entity_linker"
<ide> def train_model(self, training_dir, entity_descr_output, trainlimit=None, devlim
<ide> instance_count = 0
<ide>
<ide> for article_id, inst_cluster_set in train_instances.items():
<del> print("article", article_id)
<add> # print("article", article_id)
<ide> article_doc = train_doc[article_id]
<ide> pos_ex_list = list()
<ide> neg_exs_list = list()
<ide> for inst_cluster in inst_cluster_set:
<del> print("inst_cluster", inst_cluster)
<add> # print("inst_cluster", inst_cluster)
<ide> instance_count += 1
<ide> pos_ex_list.append(train_pos.get(inst_cluster))
<ide> neg_exs_list.append(train_neg.get(inst_cluster, []))
<ide> def _simple_encoder(self, in_width, out_width):
<ide> conv_depth = 1
<ide> cnn_maxout_pieces = 3
<ide> with Model.define_operators({">>": chain, "**": clone}):
<del> encoder = SpacyVectors \
<del> >> flatten_add_lengths \
<del> >> ParametricAttention(in_width)\
<del> >> Pooling(mean_pool) \
<del> >> Residual(zero_init(Maxout(in_width, in_width))) \
<del> >> zero_init(Affine(out_width, in_width, drop_factor=0.0))
<ide> # encoder = SpacyVectors \
<del> # >> flatten_add_lengths \
<del> # >> with_getitem(0, Affine(in_width, in_width)) \
<del> # >> ParametricAttention(in_width) \
<del> # >> Pooling(sum_pool) \
<del> # >> Residual(ReLu(in_width, in_width)) ** conv_depth \
<del> # >> zero_init(Affine(out_width, in_width, drop_factor=0.0))
<add> # >> flatten_add_lengths \
<add> # >> ParametricAttention(in_width)\
<add> # >> Pooling(mean_pool) \
<add> # >> Residual(zero_init(Maxout(in_width, in_width))) \
<add> # >> zero_init(Affine(out_width, in_width, drop_factor=0.0))
<add> encoder = SpacyVectors \
<add> >> flatten_add_lengths \
<add> >> with_getitem(0, Affine(in_width, in_width)) \
<add> >> ParametricAttention(in_width) \
<add> >> Pooling(sum_pool) \
<add> >> Residual(ReLu(in_width, in_width)) ** conv_depth \
<add> >> zero_init(Affine(out_width, in_width, drop_factor=0.0))
<ide>
<ide> # >> zero_init(Affine(nr_class, width, drop_factor=0.0))
<ide> # >> logistic
<ide> def begin_training(self, model):
<ide> return sgd
<ide>
<ide> def update(self, article_doc, true_entity_list, false_entities_list, drop=0., losses=None):
<del>
<add> doc_encoding, article_bp = self.article_encoder.begin_update([article_doc], drop=drop)
<add> doc_encoding = doc_encoding[0]
<add> # print("doc", doc_encoding)
<ide>
<ide> for i, true_entity in enumerate(true_entity_list):
<del> for cnt in range(10):
<del> #try:
<add> try:
<ide> false_vectors = list()
<ide> false_entities = false_entities_list[i]
<ide> if len(false_entities) > 0:
<ide> # TODO: batch per doc
<del> doc_encoding, article_bp = self.article_encoder.begin_update([article_doc], drop=drop)
<del> doc_encoding = doc_encoding[0]
<del> print()
<del> print(cnt)
<del> print("doc", doc_encoding)
<ide>
<ide> for false_entity in false_entities:
<ide> # TODO: one call only to begin_update ?
<ide> def update(self, article_doc, true_entity_list, false_entities_list, drop=0., lo
<ide>
<ide> true_entity_encoding, true_entity_bp = self.entity_encoder.begin_update([true_entity], drop=drop)
<ide> true_entity_encoding = true_entity_encoding[0]
<add> # true_gradient = self._calculate_true_gradient(doc_encoding, true_entity_encoding)
<ide>
<ide> all_vectors = [true_entity_encoding]
<ide> all_vectors.extend(false_vectors)
<ide>
<ide> # consensus_encoding = self._calculate_consensus(doc_encoding, true_entity_encoding)
<ide>
<ide> true_prob = self._calculate_probability(doc_encoding, true_entity_encoding, all_vectors)
<del> print("true", true_prob, true_entity_encoding)
<add> # print("true", true_prob, true_entity_encoding)
<add> # print("true gradient", true_gradient)
<add> # print()
<ide>
<ide> all_probs = [true_prob]
<ide> for false_vector in false_vectors:
<ide> false_prob = self._calculate_probability(doc_encoding, false_vector, all_vectors)
<del> print("false", false_prob, false_vector)
<add> # print("false", false_prob, false_vector)
<add> # print("false gradient", false_gradient)
<add> # print()
<ide> all_probs.append(false_prob)
<ide>
<ide> loss = self._calculate_loss(true_prob, all_probs).astype(np.float32)
<ide> if self.PRINT_LOSS:
<del> print("loss", round(loss, 5))
<add> print(round(loss, 5))
<ide>
<del> doc_gradient = self._calculate_doc_gradient(loss, doc_encoding, true_entity_encoding, false_vectors)
<del> print("doc_gradient", doc_gradient)
<del> article_bp([doc_gradient.astype(np.float32)], sgd=self.sgd_article)
<del> #except Exception as e:
<del> #pass
<add> #doc_gradient = self._calculate_doc_gradient(loss, doc_encoding, true_entity_encoding, false_vectors)
<add> entity_gradient = self._calculate_entity_gradient(doc_encoding, true_entity_encoding, false_vectors)
<add> # print("entity_gradient", entity_gradient)
<add> # print("doc_gradient", doc_gradient)
<add> # article_bp([doc_gradient.astype(np.float32)], sgd=self.sgd_article)
<add> true_entity_bp([entity_gradient.astype(np.float32)], sgd=self.sgd_entity)
<add> #true_entity_bp([true_gradient.astype(np.float32)], sgd=self.sgd_entity)
<add> except Exception as e:
<add> pass
<ide>
<ide>
<ide> # TODO: FIX
<ide> def _calculate_consensus(self, vector1, vector2):
<ide> if len(vector1) != len(vector2):
<del> raise ValueError("To calculate consenus, both vectors should be of equal length")
<add> raise ValueError("To calculate consensus, both vectors should be of equal length")
<ide>
<ide> avg = (vector2 + vector1) / 2
<ide> return avg
<ide> def _calculate_probability(self, vector1, vector2, allvectors):
<ide> for v in allvectors:
<ide> e_sum += self._calculate_dot_exp(v, vector1_t)
<ide>
<del> return float(e / e_sum)
<add> return float(e / (self.EPS + e_sum))
<ide>
<del> @staticmethod
<del> def _calculate_loss(true_prob, all_probs):
<add> def _calculate_loss(self, true_prob, all_probs):
<ide> """ all_probs should include true_prob ! """
<del> return -1 * np.log(true_prob / sum(all_probs))
<add> return -1 * np.log((self.EPS + true_prob) / (self.EPS + sum(all_probs)))
<ide>
<ide> @staticmethod
<ide> def _calculate_doc_gradient(loss, doc_vector, true_vector, false_vectors):
<ide> def _calculate_doc_gradient(loss, doc_vector, true_vector, false_vectors):
<ide>
<ide> return gradient
<ide>
<add> def _calculate_true_gradient(self, doc_vector, entity_vector):
<add> # sum_entity_vector = sum(entity_vector)
<add> # gradient = [-sum_entity_vector/(self.EPS + np.exp(doc_vector[i] * entity_vector[i])) for i in range(len(doc_vector))]
<add> gradient = [1 / (self.EPS + np.exp(doc_vector[i] * entity_vector[i])) for i in range(len(doc_vector))]
<add> return np.asarray(gradient)
<add>
<add> def _calculate_entity_gradient(self, doc_vector, true_vector, false_vectors):
<add> entity_gradient = list()
<add> prob_true = list()
<add> false_prob_list = list()
<add> for i in range(len(true_vector)):
<add> doc_i = np.asarray([doc_vector[i]])
<add> true_i = np.asarray([true_vector[i]])
<add> falses_i = np.asarray([[fv[i]] for fv in false_vectors])
<add> all_i = [true_i]
<add> all_i.extend(falses_i)
<add>
<add> prob_true_i = self._calculate_probability(doc_i, true_i, all_i)
<add> prob_true.append(prob_true_i)
<add>
<add> false_list = list()
<add> all_probs_i = [prob_true_i]
<add> for false_vector in falses_i:
<add> false_prob_i = self._calculate_probability(doc_i, false_vector, all_i)
<add> all_probs_i.append(false_prob_i)
<add> false_list.append(false_prob_i)
<add> false_prob_list.append(false_list)
<add>
<add> sign_loss_i = 1
<add> if doc_vector[i] * true_vector[i] < 0:
<add> sign_loss_i = -1
<add>
<add> loss_i = sign_loss_i * self._calculate_loss(prob_true_i, all_probs_i).astype(np.float32)
<add> entity_gradient.append(loss_i)
<add> # print("prob_true", prob_true)
<add> # print("false_prob_list", false_prob_list)
<add> return np.asarray(entity_gradient)
<add>
<add>
<ide> @staticmethod
<ide> def _calculate_dot_exp(vector1, vector2_transposed):
<del> e = np.exp(vector1.dot(vector2_transposed))
<add> dot_product = vector1.dot(vector2_transposed)
<add> dot_product = min(50, dot_product)
<add> # dot_product = max(-10000, dot_product)
<add> # print("DOT", dot_product)
<add> e = np.exp(dot_product)
<add> # print("E", e)
<ide> return e
<ide>
<ide> def _get_training_data(self, training_dir, entity_descr_output, dev, limit, to_print):
<ide><path>examples/pipeline/wiki_entity_linking/wiki_nel_pipeline.py
<ide> print("STEP 6: training ", datetime.datetime.now())
<ide> my_nlp = spacy.load('en_core_web_md')
<ide> trainer = EL_Model(kb=my_kb, nlp=my_nlp)
<del> trainer.train_model(training_dir=TRAINING_DIR, entity_descr_output=ENTITY_DESCR, trainlimit=1, devlimit=5)
<add> trainer.train_model(training_dir=TRAINING_DIR, entity_descr_output=ENTITY_DESCR, trainlimit=1500, devlimit=50)
<ide> print()
<ide>
<ide> # STEP 7: apply the EL algorithm on the dev dataset | 2 |
PHP | PHP | fix missing newline | 80392e2dbf099ae33bbd2d2180ce6c663e59f72a | <ide><path>src/Database/Query.php
<ide> public function clause($name)
<ide> $clauses = implode(', ', array_keys($this->_parts));
<ide> throw new InvalidArgumentException("The '$name' clause is not defined. Valid clauses are: $clauses");
<ide> }
<add>
<ide> return $this->_parts[$name];
<ide> }
<ide> | 1 |
Mixed | Go | fix volumes-from/bind-mounts passed in on start | d44c9f91472eb3df4c38c669134df04b2ccf9953 | <ide><path>daemon/volumes.go
<ide> type Mount struct {
<ide> volume *volumes.Volume
<ide> Writable bool
<ide> copyData bool
<add> from *Container
<ide> }
<ide>
<ide> func (mnt *Mount) Export(resource string) (io.ReadCloser, error) {
<ide> func (container *Container) prepareVolumes() error {
<ide> if container.Volumes == nil || len(container.Volumes) == 0 {
<ide> container.Volumes = make(map[string]string)
<ide> container.VolumesRW = make(map[string]bool)
<del> if err := container.applyVolumesFrom(); err != nil {
<del> return err
<del> }
<ide> }
<ide>
<ide> return container.createVolumes()
<ide> func (container *Container) createVolumes() error {
<ide> }
<ide> }
<ide>
<del> return nil
<add> // On every start, this will apply any new `VolumesFrom` entries passed in via HostConfig, which may override volumes set in `create`
<add> return container.applyVolumesFrom()
<ide> }
<ide>
<ide> func (m *Mount) initialize() error {
<ide> // No need to initialize anything since it's already been initialized
<del> if _, exists := m.container.Volumes[m.MountToPath]; exists {
<del> return nil
<add> if hostPath, exists := m.container.Volumes[m.MountToPath]; exists {
<add> // If this is a bind-mount/volumes-from, maybe it was passed in at start instead of create
<add> // We need to make sure bind-mounts/volumes-from passed on start can override existing ones.
<add> if !m.volume.IsBindMount && m.from == nil {
<add> return nil
<add> }
<add> if m.volume.Path == hostPath {
<add> return nil
<add> }
<add>
<add> // Make sure we remove these old volumes we don't actually want now.
<add> // Ignore any errors here since this is just cleanup, maybe someone volumes-from'd this volume
<add> v := m.container.daemon.volumes.Get(hostPath)
<add> v.RemoveContainer(m.container.ID)
<add> m.container.daemon.volumes.Delete(v.Path)
<ide> }
<ide>
<ide> // This is the full path to container fs + mntToPath
<ide> func (container *Container) applyVolumesFrom() error {
<ide>
<ide> for _, mounts := range mountGroups {
<ide> for _, mnt := range mounts {
<add> mnt.from = mnt.container
<ide> mnt.container = container
<ide> if err := mnt.initialize(); err != nil {
<ide> return err
<ide><path>docs/sources/reference/api/docker_remote_api.md
<ide> You can set the new container's MAC address explicitly.
<ide> **New!**
<ide> Volumes are now initialized when the container is created.
<ide>
<del>`POST /containers/(id)/start`
<del>
<del>**New!**
<del>Passing the container's `HostConfig` on start is now deprecated. You should
<del>set this when creating the container.
<del>
<ide> `POST /containers/(id)/copy`
<ide>
<ide> **New!**
<ide><path>integration-cli/docker_api_containers_test.go
<ide> import (
<ide> "bytes"
<ide> "encoding/json"
<ide> "io"
<add> "io/ioutil"
<add> "os"
<ide> "os/exec"
<add> "strings"
<ide> "testing"
<ide>
<ide> "github.com/docker/docker/vendor/src/code.google.com/p/go/src/pkg/archive/tar"
<ide> func TestContainerApiGetChanges(t *testing.T) {
<ide>
<ide> logDone("container REST API - check GET containers/changes")
<ide> }
<add>
<add>func TestContainerApiStartVolumeBinds(t *testing.T) {
<add> defer deleteAllContainers()
<add> name := "testing"
<add> config := map[string]interface{}{
<add> "Image": "busybox",
<add> "Volumes": map[string]struct{}{"/tmp": {}},
<add> }
<add>
<add> if _, err := sockRequest("POST", "/containers/create?name="+name, config); err != nil && !strings.Contains(err.Error(), "201 Created") {
<add> t.Fatal(err)
<add> }
<add>
<add> bindPath, err := ioutil.TempDir(os.TempDir(), "test")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> config = map[string]interface{}{
<add> "Binds": []string{bindPath + ":/tmp"},
<add> }
<add> if _, err := sockRequest("POST", "/containers/"+name+"/start", config); err != nil && !strings.Contains(err.Error(), "204 No Content") {
<add> t.Fatal(err)
<add> }
<add>
<add> pth, err := inspectFieldMap(name, "Volumes", "/tmp")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if pth != bindPath {
<add> t.Fatalf("expected volume host path to be %s, got %s", bindPath, pth)
<add> }
<add>
<add> logDone("container REST API - check volume binds on start")
<add>}
<add>
<add>func TestContainerApiStartVolumesFrom(t *testing.T) {
<add> defer deleteAllContainers()
<add> volName := "voltst"
<add> volPath := "/tmp"
<add>
<add> if out, _, err := runCommandWithOutput(exec.Command(dockerBinary, "run", "-d", "--name", volName, "-v", volPath, "busybox")); err != nil {
<add> t.Fatal(out, err)
<add> }
<add>
<add> name := "testing"
<add> config := map[string]interface{}{
<add> "Image": "busybox",
<add> "Volumes": map[string]struct{}{volPath: {}},
<add> }
<add>
<add> if _, err := sockRequest("POST", "/containers/create?name="+name, config); err != nil && !strings.Contains(err.Error(), "201 Created") {
<add> t.Fatal(err)
<add> }
<add>
<add> config = map[string]interface{}{
<add> "VolumesFrom": []string{volName},
<add> }
<add> if _, err := sockRequest("POST", "/containers/"+name+"/start", config); err != nil && !strings.Contains(err.Error(), "204 No Content") {
<add> t.Fatal(err)
<add> }
<add>
<add> pth, err := inspectFieldMap(name, "Volumes", volPath)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> pth2, err := inspectFieldMap(volName, "Volumes", volPath)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if pth != pth2 {
<add> t.Fatalf("expected volume host path to be %s, got %s", pth, pth2)
<add> }
<add>
<add> logDone("container REST API - check VolumesFrom on start")
<add>}
<add>
<add>// Ensure that volumes-from has priority over binds/anything else
<add>// This is pretty much the same as TestRunApplyVolumesFromBeforeVolumes, except with passing the VolumesFrom and the bind on start
<add>func TestVolumesFromHasPriority(t *testing.T) {
<add> defer deleteAllContainers()
<add> volName := "voltst"
<add> volPath := "/tmp"
<add>
<add> if out, _, err := runCommandWithOutput(exec.Command(dockerBinary, "run", "-d", "--name", volName, "-v", volPath, "busybox")); err != nil {
<add> t.Fatal(out, err)
<add> }
<add>
<add> name := "testing"
<add> config := map[string]interface{}{
<add> "Image": "busybox",
<add> "Volumes": map[string]struct{}{volPath: {}},
<add> }
<add>
<add> if _, err := sockRequest("POST", "/containers/create?name="+name, config); err != nil && !strings.Contains(err.Error(), "201 Created") {
<add> t.Fatal(err)
<add> }
<add>
<add> bindPath, err := ioutil.TempDir(os.TempDir(), "test")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> config = map[string]interface{}{
<add> "VolumesFrom": []string{volName},
<add> "Binds": []string{bindPath + ":/tmp"},
<add> }
<add> if _, err := sockRequest("POST", "/containers/"+name+"/start", config); err != nil && !strings.Contains(err.Error(), "204 No Content") {
<add> t.Fatal(err)
<add> }
<add>
<add> pth, err := inspectFieldMap(name, "Volumes", volPath)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> pth2, err := inspectFieldMap(volName, "Volumes", volPath)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> if pth != pth2 {
<add> t.Fatalf("expected volume host path to be %s, got %s", pth, pth2)
<add> }
<add>
<add> logDone("container REST API - check VolumesFrom has priority")
<add>} | 3 |
Python | Python | add a test for ndindex call | 4c489f6d6edccc4d7fe2310b0e0902e980b5f52b | <ide><path>numpy/lib/tests/test_index_tricks.py
<ide> def test_ndindex():
<ide> x = list(np.ndindex((1, 2, 3)))
<ide> assert_array_equal(x, expected)
<ide>
<add> # Test use of scalars and tuples
<add> x = list(np.ndindex((3,)))
<add> assert_array_equal(x, list(np.ndindex(3)))
<add>
<ide> # Make sure size argument is optional
<ide> x = list(np.ndindex())
<ide> assert_equal(x, [()]) | 1 |
Ruby | Ruby | ignore isolation test tests for test-unit 2 also | 98f96a0809de7cc33e683d0d4643ca80e3ac26bf | <ide><path>activesupport/test/isolation_test.rb
<ide> require 'abstract_unit'
<ide>
<add>if defined?(MiniTest) || defined?(Test::Unit::TestResultFailureSupport)
<add> $stderr.puts "Isolation tests can test test-unit 1 only"
<add>
<ide> # Does awesome
<del>if defined?(MiniTest)
<del> $stderr.puts "Umm, MiniTest not supported yet, mmkay?"
<ide> elsif ENV['CHILD']
<ide> class ChildIsolationTest < ActiveSupport::TestCase
<ide> include ActiveSupport::Testing::Isolation | 1 |
Javascript | Javascript | add infrastructure test | b6138c18a10318fb0834663ef081250505ca1485 | <ide><path>test/Compiler.test.js
<ide> describe("Compiler", () => {
<ide> done();
<ide> });
<ide> });
<add> describe("infrastructure logging", () => {
<add> const CONSOLE_METHODS = [
<add> "error",
<add> "warn",
<add> "info",
<add> "log",
<add> "debug",
<add> "trace",
<add> "profile",
<add> "profileEnd",
<add> "group",
<add> "groupEnd",
<add> "groupCollapsed"
<add> ];
<add> const spies = {};
<add> beforeEach(() => {
<add> for (const method of CONSOLE_METHODS) {
<add> if (console[method]) {
<add> spies[method] = jest.spyOn(console, method).mockImplementation();
<add> }
<add> }
<add> });
<add> afterEach(() => {
<add> for (const method in spies) {
<add> spies[method].mockRestore();
<add> delete spies[method];
<add> }
<add> });
<add> it("should log to the console", done => {
<add> class MyPlugin {
<add> apply(compiler) {
<add> const logger = compiler.getInfrastructureLogger("MyPlugin");
<add> logger.group("Group");
<add> logger.error("Error");
<add> logger.warn("Warning");
<add> logger.info("Info");
<add> logger.log("Log");
<add> logger.debug("Debug");
<add> logger.groupCollapsed("Collaped group");
<add> logger.log("Log inside collapsed group");
<add> logger.groupEnd();
<add> logger.groupEnd();
<add> }
<add> }
<add> const compiler = webpack({
<add> context: path.join(__dirname, "fixtures"),
<add> entry: "./a",
<add> output: {
<add> path: "/",
<add> filename: "bundle.js"
<add> },
<add> infrastructureLogging: {
<add> level: "verbose"
<add> },
<add> plugins: [new MyPlugin()]
<add> });
<add> compiler.outputFileSystem = new MemoryFs();
<add> compiler.run((err, stats) => {
<add> expect(spies.group).toHaveBeenCalledTimes(1);
<add> expect(spies.group).toHaveBeenCalledWith("[MyPlugin] Group");
<add> expect(spies.groupCollapsed).toHaveBeenCalledTimes(1);
<add> expect(spies.groupCollapsed).toHaveBeenCalledWith(
<add> "[MyPlugin] Collaped group"
<add> );
<add> expect(spies.error).toHaveBeenCalledTimes(1);
<add> expect(spies.error).toHaveBeenCalledWith("<e> [MyPlugin] Error");
<add> expect(spies.warn).toHaveBeenCalledTimes(1);
<add> expect(spies.warn).toHaveBeenCalledWith("<w> [MyPlugin] Warning");
<add> expect(spies.info).toHaveBeenCalledTimes(1);
<add> expect(spies.info).toHaveBeenCalledWith("<i> [MyPlugin] Info");
<add> expect(spies.log).toHaveBeenCalledTimes(2);
<add> expect(spies.log).toHaveBeenCalledWith("[MyPlugin] Log");
<add> expect(spies.log).toHaveBeenCalledWith(
<add> "[MyPlugin] Log inside collapsed group"
<add> );
<add> expect(spies.debug).toHaveBeenCalledTimes(0);
<add> expect(spies.groupEnd).toHaveBeenCalledTimes(2);
<add> done();
<add> });
<add> });
<add> });
<ide> }); | 1 |
Python | Python | fix tf ctrl model naming | fc64559c4583db4e38ce50a976c8d935b124cf67 | <ide><path>setup.py
<ide>
<ide> # keras2onnx and onnxconverter-common version is specific through a commit until 1.7.0 lands on pypi
<ide> extras["tf"] = [
<del> "tensorflow<=2.2",
<add> "tensorflow",
<ide> # "onnxconverter-common",
<ide> # "keras2onnx"
<ide> "onnxconverter-common @ git+git://github.com/microsoft/onnxconverter-common.git@f64ca15989b6dc95a1f3507ff6e4c395ba12dff5#egg=onnxconverter-common",
<ide> "keras2onnx @ git+git://github.com/onnx/keras-onnx.git@cbdc75cb950b16db7f0a67be96a278f8d2953b48#egg=keras2onnx"
<ide> ]
<ide> extras["tf-cpu"] = [
<del> "tensorflow-cpu<=2.2",
<add> "tensorflow-cpu",
<ide> # "onnxconverter-common",
<ide> # "keras2onnx"
<ide> "onnxconverter-common @ git+git://github.com/microsoft/onnxconverter-common.git@f64ca15989b6dc95a1f3507ff6e4c395ba12dff5#egg=onnxconverter-common",
<ide> extras["torch"] = ["torch"]
<ide>
<ide> extras["serving"] = ["pydantic", "uvicorn", "fastapi", "starlette"]
<del>extras["all"] = extras["serving"] + ["tensorflow<=2.2", "torch"]
<add>extras["all"] = extras["serving"] + ["tensorflow", "torch"]
<ide>
<ide> extras["testing"] = ["pytest", "pytest-xdist", "timeout-decorator", "psutil"]
<ide> # sphinx-rtd-theme==0.5.0 introduced big changes in the style.
<ide> "isort @ git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort",
<ide> "flake8",
<ide> ]
<del>extras["dev"] = extras["testing"] + extras["quality"] + ["mecab-python3<1", "scikit-learn", "tensorflow<=2.2", "torch"]
<add>extras["dev"] = extras["testing"] + extras["quality"] + ["mecab-python3<1", "scikit-learn", "tensorflow", "torch"]
<ide>
<ide> setup(
<ide> name="transformers",
<ide><path>src/transformers/modeling_tf_ctrl.py
<ide> def call(self, inputs, training=False):
<ide> return outputs
<ide>
<ide>
<del>def point_wise_feed_forward_network(d_model_size, dff, name=""):
<del> return tf.keras.Sequential(
<del> [tf.keras.layers.Dense(dff, activation="relu", name="0"), tf.keras.layers.Dense(d_model_size, name="2")],
<del> name="ffn",
<del> )
<add>class TFPointWiseFeedForwardLayer(tf.keras.layers.Layer):
<add> def __init__(self, d_model_size, dff, **kwargs):
<add> super().__init__(**kwargs)
<add>
<add> self.dense_0 = tf.keras.layers.Dense(dff, activation="relu", name="0")
<add> self.dense_2 = tf.keras.layers.Dense(d_model_size, name="2")
<add>
<add> def call(self, inputs, trainable=False):
<add> dense_0_output = self.dense_0(inputs)
<add> dense_2_output = self.dense_2(dense_0_output)
<add>
<add> return dense_2_output
<ide>
<ide>
<ide> class TFEncoderLayer(tf.keras.layers.Layer):
<ide> def __init__(self, d_model_size, num_heads, dff, rate=0.1, layer_norm_epsilon=1e-6, **kwargs):
<ide> super().__init__(**kwargs)
<ide>
<ide> self.multi_head_attention = TFMultiHeadAttention(d_model_size, num_heads, name="multi_head_attention")
<del> self.ffn = point_wise_feed_forward_network(d_model_size, dff, name="ffn")
<add> self.ffn = TFPointWiseFeedForwardLayer(d_model_size, dff, name="ffn")
<ide>
<ide> self.layernorm1 = tf.keras.layers.LayerNormalization(epsilon=layer_norm_epsilon, name="layernorm1")
<ide> self.layernorm2 = tf.keras.layers.LayerNormalization(epsilon=layer_norm_epsilon, name="layernorm2") | 2 |
Text | Text | explain what to do if git push is rejected | d15a5c0fe1380bc33368d08f3cf4564a60146243 | <ide><path>COLLABORATOR_GUIDE.md
<ide> your pull request shows the purple merged status then you should still
<ide> add the "Landed in <commit hash>..<commit hash>" comment if you added
<ide> multiple commits.
<ide>
<add>### Troubleshooting
<add>
<add>Sometimes, when running `git push upstream master`, you may get an error message
<add>like this:
<add>
<add>```console
<add>To https://github.com/nodejs/node
<add> ! [rejected] master -> master (fetch first)
<add>error: failed to push some refs to 'https://github.com/nodejs/node'
<add>hint: Updates were rejected because the remote contains work that you do
<add>hint: not have locally. This is usually caused by another repository pushing
<add>hint: to the same ref. You may want to first integrate the remote changes
<add>hint: (e.g., 'git pull ...') before pushing again.
<add>hint: See the 'Note about fast-forwards' in 'git push --help' for details.
<add>```
<add>
<add>That means a commit has landed since your last rebase against `upstream/master`.
<add>To fix this, fetch, rebase, run the tests again (to make sure no interactions
<add>between your changes and the new changes cause any problems), and push again:
<add>
<add>```sh
<add>git fetch upstream
<add>git rebase upstream/master
<add>make -j4 test
<add>git push upstream master
<add>```
<add>
<ide> ### I Just Made a Mistake
<ide>
<ide> * Ping a CTC member. | 1 |
Python | Python | restore proper import for httperror | 7c6812645afa8ac8ff0c264a8d86f487aa4ed33b | <ide><path>transformers/commands/user.py
<ide> from getpass import getpass
<ide> from typing import List, Union
<ide>
<add>from requests.exceptions import HTTPError
<add>
<ide> from transformers.commands import BaseTransformersCLICommand
<del>from transformers.hf_api import HfApi, HfFolder, HTTPError
<add>from transformers.hf_api import HfApi, HfFolder
<ide>
<ide>
<ide> class UserCommands(BaseTransformersCLICommand):
<ide><path>transformers/tests/hf_api_test.py
<ide>
<ide> import requests
<ide> import six
<add>from requests.exceptions import HTTPError
<ide>
<del>from transformers.hf_api import HfApi, HfFolder, HTTPError, PresignedUrl, S3Obj
<add>from transformers.hf_api import HfApi, HfFolder, PresignedUrl, S3Obj
<ide>
<ide>
<ide> USER = "__DUMMY_TRANSFORMERS_USER__" | 2 |
Python | Python | fix t5 special tokens | b93569457fd758a60f15d94ac7b3ba3a245096c0 | <ide><path>src/transformers/tokenization_t5.py
<ide> def _convert_id_to_token(self, index):
<ide>
<ide> def convert_tokens_to_string(self, tokens):
<ide> """ Converts a sequence of tokens (string) in a single string. """
<del> out_string = self.sp_model.decode_pieces(tokens)
<del> return out_string
<add> current_sub_tokens = []
<add> out_string = ""
<add> for token in tokens:
<add> # make sure that special tokens are not decoded using sentencepiece model
<add> if token in self.all_special_tokens:
<add> out_string += self.sp_model.decode_pieces(current_sub_tokens) + token + " "
<add> current_sub_tokens = []
<add> else:
<add> current_sub_tokens.append(token)
<add> out_string += self.sp_model.decode_pieces(current_sub_tokens)
<add> return out_string.strip()
<ide>
<ide> def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]:
<ide> if not os.path.isdir(save_directory):
<ide><path>tests/test_tokenization_t5.py
<ide> def test_eos_in_input(self):
<ide>
<ide> self.assertEqual(expected_src_tokens, src_ids)
<ide> self.assertEqual(expected_tgt_tokens, tgt_ids)
<add>
<add> def test_fast_and_slow_same_result(self):
<add> src_text = "<pad> Today is <unk> nice day </s>"
<add> tgt_ids = [0, 1960, 19, 2, 1245, 239, 1]
<add> tgt_text = "<pad> Today is<unk> nice day</s>"
<add>
<add> fast_ids = self.t5_base_tokenizer_fast(src_text, add_special_tokens=False).input_ids
<add> slow_ids = self.t5_base_tokenizer(src_text, add_special_tokens=False).input_ids
<add> self.assertEqual(tgt_ids, fast_ids)
<add> self.assertEqual(tgt_ids, slow_ids)
<add>
<add> fast_text = self.t5_base_tokenizer_fast.decode(fast_ids)
<add> slow_text = self.t5_base_tokenizer.decode(fast_ids)
<add> self.assertEqual(tgt_text, fast_text)
<add> self.assertEqual(tgt_text, slow_text) | 2 |
Python | Python | fix imagepullpolicy missing in tests | 6f246b0d54ccaf733b7c5951a8955adda6719acb | <ide><path>kubernetes_tests/test_kubernetes_pod_operator.py
<ide> def setUp(self):
<ide> 'containers': [
<ide> {
<ide> 'image': 'ubuntu:16.04',
<add> 'imagePullPolicy': 'IfNotPresent',
<ide> 'args': ["echo 10"],
<ide> 'command': ["bash", "-cx"],
<ide> 'env': [],
<ide> def test_pod_template_file(
<ide> 'env': [],
<ide> 'envFrom': [],
<ide> 'image': 'apache/airflow:stress-2020.07.10-1.0.4',
<add> 'imagePullPolicy': 'IfNotPresent',
<ide> 'name': 'base',
<ide> 'ports': [],
<ide> 'resources': {'limits': {'memory': '200Mi'}, 'requests': {'memory': '100Mi'}},
<ide><path>kubernetes_tests/test_kubernetes_pod_operator_backcompat.py
<ide> def setUp(self):
<ide> 'containers': [
<ide> {
<ide> 'image': 'ubuntu:16.04',
<add> 'imagePullPolicy': 'IfNotPresent',
<ide> 'args': ["echo 10"],
<ide> 'command': ["bash", "-cx"],
<ide> 'env': [], | 2 |
PHP | PHP | use absolute class names for lang facade | ef98884b29306bf8bdbb918a4a668587ed676745 | <ide><path>src/Illuminate/View/Compilers/BladeCompiler.php
<ide> protected function compileLanguage($value)
<ide> {
<ide> $pattern = $this->createMatcher('lang');
<ide>
<del> $value = preg_replace($pattern, '$1<?php echo Lang::get$2; ?>', $value);
<add> $value = preg_replace($pattern, '$1<?php echo \Illuminate\Support\Facades\Lang::get$2; ?>', $value);
<ide>
<ide> $pattern = $this->createMatcher('choice');
<ide>
<del> return preg_replace($pattern, '$1<?php echo Lang::choice$2; ?>', $value);
<add> return preg_replace($pattern, '$1<?php echo \Illuminate\Support\Facades\Lang::choice$2; ?>', $value);
<ide> }
<ide>
<ide> /** | 1 |
Javascript | Javascript | add attempt logging for verifypython() | 2c6c14d1977b9f92dbeb702e3b89a652f9fadbe6 | <ide><path>script/lib/verify-machine-requirements.js
<ide> function verifyPython() {
<ide> var stdout;
<ide> var fullVersion;
<ide> var usablePythonWasFound;
<add> var triedLog = '';
<ide>
<ide> function verifyBinary(binary, prependFlag) {
<ide> if (binary && !usablePythonWasFound) {
<ide> function verifyPython() {
<ide> stdout = '';
<ide> }
<ide> }
<add>
<add> // Prepare to log which commands were tried, and the results, in case no usable Python can be found.
<add> if (prependFlag) {
<add> var binaryPlusFlag = binary.concat(' ' + prependFlag);
<add> } else {
<add> var binaryPlusFlag = binary;
<add> }
<add> triedLog = triedLog.concat('log message: tried to check version of "' + binaryPlusFlag + '", got: ' + fullVersion + '\n');
<ide> }
<ide> }
<ide>
<ide> function verifyPython() {
<ide> console.log(`Python:\tv${fullVersion}`);
<ide> } else {
<ide> throw new Error(
<del> 'Python 2.7 or 3.5+ is required to build Atom.\n' +
<del> 'verify-machine-requirements.js was unable to find such a version of Python.\n' +
<del> "Set the PYTHON env var to e.g. 'C:/path/to/Python27/python.exe'\n" +
<del> 'if your Python is installed in a non-default location.\n'
<add> `\n${triedLog}\n` +
<add> 'Python 2.7 or 3.5+ is required to build Atom.\n' +
<add> 'verify-machine-requirements.js was unable to find such a version of Python.\n' +
<add> "Set the PYTHON env var to e.g. 'C:/path/to/Python27/python.exe'\n" +
<add> 'if your Python is installed in a non-default location.\n'
<ide> );
<ide> }
<ide> } | 1 |
Java | Java | fix checkstyle violation | 9a71a8d357caf94ba71236a01ecc786c0a7f308d | <ide><path>spring-tx/src/test/java/org/springframework/transaction/event/TransactionalEventListenerTests.java
<ide> import org.springframework.util.LinkedMultiValueMap;
<ide> import org.springframework.util.MultiValueMap;
<ide>
<del>import static org.assertj.core.api.Assertions.*;
<del>import static org.springframework.transaction.event.TransactionPhase.*;
<add>import static org.assertj.core.api.Assertions.assertThat;
<add>import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
<add>import static org.springframework.transaction.event.TransactionPhase.AFTER_COMMIT;
<add>import static org.springframework.transaction.event.TransactionPhase.AFTER_COMPLETION;
<add>import static org.springframework.transaction.event.TransactionPhase.AFTER_ROLLBACK;
<add>import static org.springframework.transaction.event.TransactionPhase.BEFORE_COMMIT;
<ide>
<ide> /**
<ide> * Integration tests for {@link TransactionalEventListener} support | 1 |
Java | Java | favor scriptexception over sqlexception | 92eb99a5abaaf43f4fddeaf00020dad2f3dd73eb | <ide><path>spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/CompositeDatabasePopulator.java
<ide> * See the License for the specific language governing permissions and
<ide> * limitations under the License.
<ide> */
<add>
<ide> package org.springframework.jdbc.datasource.init;
<ide>
<ide> import java.sql.Connection;
<ide> import java.sql.SQLException;
<add>
<ide> import java.util.ArrayList;
<ide> import java.util.Arrays;
<ide> import java.util.List;
<ide> public void setPopulators(DatabasePopulator... populators) {
<ide> }
<ide>
<ide> /**
<del> * Add a populator to the list of delegates.
<add> * Add one or more populators to the list of delegates.
<ide> */
<ide> public void addPopulators(DatabasePopulator... populators) {
<ide> this.populators.addAll(Arrays.asList(populators));
<ide><path>spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/DatabasePopulator.java
<ide> public interface DatabasePopulator {
<ide>
<ide> /**
<del> * Populate the database using the JDBC connection provided.
<add> * Populate the database using the provided JDBC connection.
<add> * <p>Concrete implementations <em>may</em> throw an {@link SQLException} if
<add> * an error is encountered but are <em>strongly encouraged</em> to throw a
<add> * specific {@link ScriptException} instead. For example, Spring's
<add> * {@link ResourceDatabasePopulator} and {@link DatabasePopulatorUtils} wrap
<add> * all {@code SQLExceptions} in {@code ScriptExceptions}.
<ide> * @param connection the JDBC connection to use to populate the db; already
<ide> * configured and ready to use
<ide> * @throws SQLException if an unrecoverable data access exception occurs
<ide><path>spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/ResourceDatabasePopulator.java
<ide> package org.springframework.jdbc.datasource.init;
<ide>
<ide> import java.sql.Connection;
<del>import java.sql.SQLException;
<ide> import java.util.ArrayList;
<ide> import java.util.Arrays;
<ide> import java.util.List;
<ide> public void setIgnoreFailedDrops(boolean ignoreFailedDrops) {
<ide> * {@inheritDoc}
<ide> */
<ide> @Override
<del> public void populate(Connection connection) throws SQLException, ScriptException {
<add> public void populate(Connection connection) throws ScriptException {
<ide> for (Resource script : this.scripts) {
<ide> ScriptUtils.executeSqlScript(connection, encodeScript(script), this.continueOnError,
<ide> this.ignoreFailedDrops, this.commentPrefix, this.separator, this.blockCommentStartDelimiter,
<ide><path>spring-jdbc/src/main/java/org/springframework/jdbc/datasource/init/ScriptUtils.java
<ide> private ScriptUtils() {
<ide> * @param script the SQL script
<ide> * @param separator character separating each statement — typically a ';'
<ide> * @param statements the list that will contain the individual statements
<add> * @throws ScriptException if an error occurred while splitting the SQL script
<ide> * @see #splitSqlScript(String, String, List)
<ide> * @see #splitSqlScript(EncodedResource, String, String, String, String, String, List)
<ide> */
<ide> public static void splitSqlScript(String script, char separator, List<String> st
<ide> * @param script the SQL script
<ide> * @param separator text separating each statement — typically a ';' or newline character
<ide> * @param statements the list that will contain the individual statements
<add> * @throws ScriptException if an error occurred while splitting the SQL script
<ide> * @see #splitSqlScript(String, char, List)
<ide> * @see #splitSqlScript(EncodedResource, String, String, String, String, String, List)
<ide> */
<ide> public static void splitSqlScript(String script, String separator, List<String>
<ide> * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter;
<ide> * never {@code null} or empty
<ide> * @param statements the list that will contain the individual statements
<add> * @throws ScriptException if an error occurred while splitting the SQL script
<ide> */
<ide> public static void splitSqlScript(EncodedResource resource, String script, String separator, String commentPrefix,
<ide> String blockCommentStartDelimiter, String blockCommentEndDelimiter, List<String> statements)
<ide> static String readScript(EncodedResource resource) throws IOException {
<ide> * typically "--"
<ide> * @param separator the statement separator in the SQL script — typically ";"
<ide> * @return a {@code String} containing the script lines
<add> * @throws IOException in case of I/O errors
<ide> */
<ide> private static String readScript(EncodedResource resource, String commentPrefix, String separator)
<ide> throws IOException {
<ide> private static String readScript(EncodedResource resource, String commentPrefix,
<ide> * typically "--"
<ide> * @param separator the statement separator in the SQL script — typically ";"
<ide> * @return a {@code String} containing the script lines
<add> * @throws IOException in case of I/O errors
<ide> */
<ide> public static String readScript(LineNumberReader lineNumberReader, String commentPrefix, String separator)
<ide> throws IOException {
<ide> public static boolean containsSqlScriptDelimiters(String script, String delim) {
<ide> * configured and ready to use
<ide> * @param resource the resource to load the SQL script from; encoded with the
<ide> * current platform's default encoding
<add> * @throws ScriptException if an error occurred while executing the SQL script
<ide> * @see #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String)
<ide> * @see #DEFAULT_COMMENT_PREFIX
<ide> * @see #DEFAULT_STATEMENT_SEPARATOR
<ide> * @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER
<ide> * @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER
<ide> */
<del> public static void executeSqlScript(Connection connection, Resource resource) throws SQLException, ScriptException {
<add> public static void executeSqlScript(Connection connection, Resource resource) throws ScriptException {
<ide> executeSqlScript(connection, new EncodedResource(resource));
<ide> }
<ide>
<ide> public static void executeSqlScript(Connection connection, Resource resource) th
<ide> * configured and ready to use
<ide> * @param resource the resource (potentially associated with a specific encoding)
<ide> * to load the SQL script from
<add> * @throws ScriptException if an error occurred while executing the SQL script
<ide> * @see #executeSqlScript(Connection, EncodedResource, boolean, boolean, String, String, String, String)
<ide> * @see #DEFAULT_COMMENT_PREFIX
<ide> * @see #DEFAULT_STATEMENT_SEPARATOR
<ide> * @see #DEFAULT_BLOCK_COMMENT_START_DELIMITER
<ide> * @see #DEFAULT_BLOCK_COMMENT_END_DELIMITER
<ide> */
<del> public static void executeSqlScript(Connection connection, EncodedResource resource) throws SQLException,
<del> ScriptException {
<add> public static void executeSqlScript(Connection connection, EncodedResource resource) throws ScriptException {
<ide> executeSqlScript(connection, resource, false, false, DEFAULT_COMMENT_PREFIX, DEFAULT_STATEMENT_SEPARATOR,
<ide> DEFAULT_BLOCK_COMMENT_START_DELIMITER, DEFAULT_BLOCK_COMMENT_END_DELIMITER);
<ide> }
<ide> public static void executeSqlScript(Connection connection, EncodedResource resou
<ide> * {@code null} or empty
<ide> * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter; never
<ide> * {@code null} or empty
<add> * @throws ScriptException if an error occurred while executing the SQL script
<ide> */
<ide> public static void executeSqlScript(Connection connection, EncodedResource resource, boolean continueOnError,
<ide> boolean ignoreFailedDrops, String commentPrefix, String separator, String blockCommentStartDelimiter,
<del> String blockCommentEndDelimiter) throws SQLException, ScriptException {
<add> String blockCommentEndDelimiter) throws ScriptException {
<ide>
<del> if (logger.isInfoEnabled()) {
<del> logger.info("Executing SQL script from " + resource);
<del> }
<del> long startTime = System.currentTimeMillis();
<del> List<String> statements = new LinkedList<String>();
<del> String script;
<ide> try {
<del> script = readScript(resource, commentPrefix, separator);
<del> }
<del> catch (IOException ex) {
<del> throw new CannotReadScriptException(resource, ex);
<del> }
<add> if (logger.isInfoEnabled()) {
<add> logger.info("Executing SQL script from " + resource);
<add> }
<ide>
<del> if (separator == null) {
<del> separator = DEFAULT_STATEMENT_SEPARATOR;
<del> }
<del> if (!containsSqlScriptDelimiters(script, separator)) {
<del> separator = FALLBACK_STATEMENT_SEPARATOR;
<del> }
<add> long startTime = System.currentTimeMillis();
<add> List<String> statements = new LinkedList<String>();
<add> String script;
<add> try {
<add> script = readScript(resource, commentPrefix, separator);
<add> }
<add> catch (IOException ex) {
<add> throw new CannotReadScriptException(resource, ex);
<add> }
<ide>
<del> splitSqlScript(resource, script, separator, commentPrefix, blockCommentStartDelimiter,
<del> blockCommentEndDelimiter, statements);
<del> int lineNumber = 0;
<del> Statement stmt = connection.createStatement();
<del> try {
<del> for (String statement : statements) {
<del> lineNumber++;
<del> try {
<del> stmt.execute(statement);
<del> int rowsAffected = stmt.getUpdateCount();
<del> if (logger.isDebugEnabled()) {
<del> logger.debug(rowsAffected + " returned as updateCount for SQL: " + statement);
<del> }
<del> }
<del> catch (SQLException ex) {
<del> boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop");
<del> if (continueOnError || (dropStatement && ignoreFailedDrops)) {
<add> if (separator == null) {
<add> separator = DEFAULT_STATEMENT_SEPARATOR;
<add> }
<add> if (!containsSqlScriptDelimiters(script, separator)) {
<add> separator = FALLBACK_STATEMENT_SEPARATOR;
<add> }
<add>
<add> splitSqlScript(resource, script, separator, commentPrefix, blockCommentStartDelimiter,
<add> blockCommentEndDelimiter, statements);
<add> int lineNumber = 0;
<add> Statement stmt = connection.createStatement();
<add> try {
<add> for (String statement : statements) {
<add> lineNumber++;
<add> try {
<add> stmt.execute(statement);
<add> int rowsAffected = stmt.getUpdateCount();
<ide> if (logger.isDebugEnabled()) {
<del> logger.debug("Failed to execute SQL script statement at line " + lineNumber
<del> + " of resource " + resource + ": " + statement, ex);
<add> logger.debug(rowsAffected + " returned as updateCount for SQL: " + statement);
<ide> }
<ide> }
<del> else {
<del> throw new ScriptStatementFailedException(statement, lineNumber, resource, ex);
<add> catch (SQLException ex) {
<add> boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop");
<add> if (continueOnError || (dropStatement && ignoreFailedDrops)) {
<add> if (logger.isDebugEnabled()) {
<add> logger.debug("Failed to execute SQL script statement at line " + lineNumber
<add> + " of resource " + resource + ": " + statement, ex);
<add> }
<add> }
<add> else {
<add> throw new ScriptStatementFailedException(statement, lineNumber, resource, ex);
<add> }
<ide> }
<ide> }
<ide> }
<del> }
<del> finally {
<del> try {
<del> stmt.close();
<add> finally {
<add> try {
<add> stmt.close();
<add> }
<add> catch (Throwable ex) {
<add> logger.debug("Could not close JDBC Statement", ex);
<add> }
<ide> }
<del> catch (Throwable ex) {
<del> logger.debug("Could not close JDBC Statement", ex);
<add>
<add> long elapsedTime = System.currentTimeMillis() - startTime;
<add> if (logger.isInfoEnabled()) {
<add> logger.info("Executed SQL script from " + resource + " in " + elapsedTime + " ms.");
<ide> }
<ide> }
<add> catch (Exception ex) {
<add> if (ex instanceof ScriptException) {
<add> throw (ScriptException) ex;
<add> }
<ide>
<del> long elapsedTime = System.currentTimeMillis() - startTime;
<del> if (logger.isInfoEnabled()) {
<del> logger.info("Executed SQL script from " + resource + " in " + elapsedTime + " ms.");
<add> throw new UncategorizedScriptException(
<add> "Failed to execute database script from resource [" + resource + "]", ex);
<ide> }
<ide> }
<ide> | 4 |
Ruby | Ruby | make current_page? compare binary strings | b3c0858f732da157195ad3e2dec470791c754cfe | <ide><path>actionview/lib/action_view/helpers/url_helper.rb
<ide> def current_page?(options)
<ide>
<ide> return false unless request.get? || request.head?
<ide>
<del> url_string = url_for(options)
<add> url_string = URI.unescape(url_for(options)).force_encoding(Encoding::BINARY)
<ide>
<ide> # We ignore any extra parameters in the request_uri if the
<ide> # submitted url doesn't have any either. This lets the function
<ide> # work with things like ?order=asc
<ide> request_uri = url_string.index("?") ? request.fullpath : request.path
<add> request_uri = URI.unescape(request_uri).force_encoding(Encoding::BINARY)
<ide>
<ide> if url_string =~ /^\w+:\/\//
<del> URI.unescape(url_string) == URI.unescape("#{request.protocol}#{request.host_with_port}#{request_uri}")
<add> url_string == "#{request.protocol}#{request.host_with_port}#{request_uri}"
<ide> else
<del> URI.unescape(url_string) == URI.unescape(request_uri)
<add> url_string == request_uri
<ide> end
<ide> end
<ide>
<ide><path>actionview/test/template/url_helper_test.rb
<ide> def test_current_page_with_escaped_params
<ide> assert current_page?(controller: 'foo', action: 'category', category: 'administração')
<ide> end
<ide>
<add> def test_current_page_with_escaped_params_with_different_encoding
<add> @request = request_for_url("/")
<add> @request.stub(:path, "/category/administra%c3%a7%c3%a3o".force_encoding(Encoding::ASCII_8BIT)) do
<add> assert current_page?(:controller => 'foo', :action => 'category', category: 'administração')
<add> assert current_page?("http://www.example.com/category/administra%c3%a7%c3%a3o")
<add> end
<add> end
<add>
<ide> def test_current_page_with_double_escaped_params
<ide> @request = request_for_url("/category/administra%c3%a7%c3%a3o?callback_url=http%3a%2f%2fexample.com%2ffoo")
<ide> | 2 |
Mixed | Javascript | throw if common.port used in parallel tests | 800ce94e5cdaa8dba7cf69b4c560a66a51ae0ec8 | <ide><path>test/common/index.js
<ide> const noop = () => {};
<ide> // gets tools to ignore it by default or by simple rules, especially eslint.
<ide> let tmpDirName = '.tmp';
<ide>
<del>exports.PORT = +process.env.NODE_COMMON_PORT || 12346;
<add>Object.defineProperty(exports, 'PORT', {
<add> get: () => {
<add> if (+process.env.TEST_PARALLEL) {
<add> throw new Error('common.PORT cannot be used in a parallelized test');
<add> }
<add> return +process.env.NODE_COMMON_PORT || 12346;
<add> },
<add> enumerable: true
<add>});
<add>
<ide>
<ide> exports.isWindows = process.platform === 'win32';
<ide> exports.isWOW64 = exports.isWindows &&
<ide><path>tools/test.py
<ide> def Run(self):
<ide>
<ide> try:
<ide> result = self.RunCommand(self.GetCommand(), {
<del> "TEST_THREAD_ID": "%d" % self.thread_id
<add> "TEST_THREAD_ID": "%d" % self.thread_id,
<add> "TEST_PARALLEL" : "%d" % self.parallel
<ide> })
<ide> finally:
<ide> # Tests can leave the tty in non-blocking mode. If the test runner | 2 |
Text | Text | remove confusion in the sentence [ci skip] | 8831155f64fcccf76ab33a9f438c295b55547fd3 | <ide><path>guides/source/autoloading_and_reloading_constants.md
<ide> is not entirely equivalent to the one of the body of the definitions using the
<ide> assignment.
<ide>
<ide> Thus, when one informally says "the `String` class", that really means: the
<del>class object stored in the constant called "String" in the class object stored
<del>in the `Object` constant. `String` is otherwise an ordinary Ruby constant and
<del>everything related to constants such as resolution algorithms applies to it.
<add>class object stored in the constant called "String" and this "String" constant
<add>gets stored in `Object` class. `String` is otherwise an ordinary Ruby constant
<add>and everything related to constants such as resolution algorithms applies to it.
<ide>
<ide> Likewise, in the controller
<ide> | 1 |
Java | Java | turn flushingdatabuffer to an empty databuffer | 6b3d5f1bc57a2285abce83121f516250422e03b6 | <ide><path>spring-web-reactive/src/main/java/org/springframework/core/io/buffer/FlushingDataBuffer.java
<ide> import java.nio.ByteBuffer;
<ide> import java.util.function.IntPredicate;
<ide>
<del>import org.springframework.util.Assert;
<del>
<ide> /**
<del> * {@link DataBuffer} wrapper that indicates the file or the socket writing this buffer
<del> * should be flushed.
<add> * Empty {@link DataBuffer} that indicates to the file or the socket writing it that
<add> * previously buffered data should be flushed.
<ide> *
<ide> * @author Sebastien Deleuze
<add> * @see FlushingDataBuffer#INSTANCE
<ide> */
<ide> public class FlushingDataBuffer implements DataBuffer {
<ide>
<add> /** Singleton instance of this class */
<add> public static final FlushingDataBuffer INSTANCE = new FlushingDataBuffer();
<add>
<ide> private final DataBuffer buffer;
<ide>
<del> public FlushingDataBuffer() {
<add>
<add> private FlushingDataBuffer() {
<ide> this.buffer = new DefaultDataBufferFactory().allocateBuffer(0);
<ide> }
<ide>
<del> public FlushingDataBuffer(DataBuffer buffer) {
<del> Assert.notNull(buffer);
<del> this.buffer = buffer;
<del> }
<ide>
<ide> @Override
<ide> public DataBufferFactory factory() {
<ide><path>spring-web-reactive/src/main/java/org/springframework/http/codec/SseEventEncoder.java
<ide> public Flux<DataBuffer> encode(Publisher<?> inputStream, DataBufferFactory buffe
<ide> return Flux.concat(
<ide> encodeString(sb.toString(), bufferFactory),
<ide> dataBuffer,
<del> encodeString("\n", bufferFactory).map(b -> new FlushingDataBuffer(b))
<add> encodeString("\n", bufferFactory),
<add> Mono.just(FlushingDataBuffer.INSTANCE)
<ide> );
<ide> });
<ide>
<ide><path>spring-web-reactive/src/test/java/org/springframework/core/codec/support/SseEventEncoderTests.java
<ide>
<ide> import java.util.Arrays;
<ide>
<add>import static org.junit.Assert.*;
<ide> import org.junit.Test;
<ide> import reactor.core.publisher.Flux;
<ide> import reactor.core.publisher.Mono;
<ide> import org.springframework.core.ResolvableType;
<ide> import org.springframework.core.io.buffer.AbstractDataBufferAllocatingTestCase;
<ide> import org.springframework.core.io.buffer.DataBuffer;
<add>import org.springframework.core.io.buffer.FlushingDataBuffer;
<ide> import org.springframework.http.codec.SseEventEncoder;
<ide> import org.springframework.util.MimeType;
<ide> import org.springframework.web.reactive.sse.SseEvent;
<ide>
<del>import static org.junit.Assert.assertFalse;
<del>import static org.junit.Assert.assertTrue;
<del>
<del>
<ide> /**
<ide> * @author Sebastien Deleuze
<ide> */
<ide> public void encodeServerSentEvent() {
<ide> "event:foo\n" +
<ide> "retry:123\n" +
<ide> ":bla\n:bla bla\n:bla bla bla\n"),
<del> stringConsumer("\n")
<add> stringConsumer("\n"),
<add> b -> assertEquals(FlushingDataBuffer.class, b.getClass())
<ide> );
<ide> }
<ide>
<ide> public void encodeString() {
<ide> .assertValuesWith(
<ide> stringConsumer("data:foo\n"),
<ide> stringConsumer("\n"),
<add> b -> assertEquals(FlushingDataBuffer.class, b.getClass()),
<ide> stringConsumer("data:bar\n"),
<del> stringConsumer("\n")
<add> stringConsumer("\n"),
<add> b -> assertEquals(FlushingDataBuffer.class, b.getClass())
<ide> );
<ide> }
<ide>
<ide> public void encodeMultilineString() {
<ide> .assertValuesWith(
<ide> stringConsumer("data:foo\ndata:bar\n"),
<ide> stringConsumer("\n"),
<add> b -> assertEquals(FlushingDataBuffer.class, b.getClass()),
<ide> stringConsumer("data:foo\ndata:baz\n"),
<del> stringConsumer("\n")
<add> stringConsumer("\n"),
<add> b -> assertEquals(FlushingDataBuffer.class, b.getClass())
<ide> );
<ide> }
<ide>
<del>
<ide> @Test
<ide> public void encodePojo() {
<ide> SseEventEncoder encoder = new SseEventEncoder(Arrays.asList(new JacksonJsonEncoder()));
<ide> public void encodePojo() {
<ide> stringConsumer("{\"foo\":\"foofoo\",\"bar\":\"barbar\"}"),
<ide> stringConsumer("\n"),
<ide> stringConsumer("\n"),
<add> b -> assertEquals(FlushingDataBuffer.class, b.getClass()),
<ide> stringConsumer("data:"),
<ide> stringConsumer("{\"foo\":\"foofoofoo\",\"bar\":\"barbarbar\"}"),
<ide> stringConsumer("\n"),
<del> stringConsumer("\n")
<add> stringConsumer("\n"),
<add> b -> assertEquals(FlushingDataBuffer.class, b.getClass())
<ide> );
<ide> }
<ide>
<ide><path>spring-web-reactive/src/test/java/org/springframework/http/server/reactive/FlushingIntegrationTests.java
<ide> private static class FlushingHandler implements HttpHandler {
<ide> public Mono<Void> handle(ServerHttpRequest request, ServerHttpResponse response) {
<ide> Flux<DataBuffer> responseBody = Flux
<ide> .interval(50)
<del> .take(2)
<del> .concatWith(Flux.never())
<ide> .map(l -> {
<ide> byte[] data = ("data" + l).getBytes();
<ide> DataBuffer buffer = response.bufferFactory().allocateBuffer(data.length);
<ide> buffer.write(data);
<del> return new FlushingDataBuffer(buffer);
<del> });
<add> return buffer;
<add> })
<add> .take(2)
<add> .concatWith(Mono.just(FlushingDataBuffer.INSTANCE))
<add> .concatWith(Flux.never());
<ide> return response.writeWith(responseBody);
<ide> }
<ide> } | 4 |
Python | Python | fix typos (resolves #718) | 1bd53bbf89fe134e20ccb3e6000798c02ef03468 | <ide><path>spacy/en/tokenizer_exceptions.py
<ide> ],
<ide>
<ide> "She's": [
<del> {ORTH: "i", LEMMA: PRON_LEMMA, TAG: "PRP"},
<add> {ORTH: "She", LEMMA: PRON_LEMMA, TAG: "PRP"},
<ide> {ORTH: "'s"}
<ide> ],
<ide>
<ide> ],
<ide>
<ide> "Shedve": [
<del> {ORTH: "i", LEMMA: PRON_LEMMA, TAG: "PRP"},
<add> {ORTH: "She", LEMMA: PRON_LEMMA, TAG: "PRP"},
<ide> {ORTH: "d", LEMMA: "would", TAG: "MD"},
<ide> {ORTH: "ve", LEMMA: "have", TAG: "VB"}
<ide> ], | 1 |
Text | Text | translate 02.1-jsx-in-depth.ja-jp.md to japanese | f460a19d8a53cb7721888ef8974e6f2953e4a9d7 | <ide><path>docs/docs/02.1-jsx-in-depth.ja-JP.md
<add>---
<add>id: jsx-in-depth
<add>title: JSXの深層
<add>permalink: jsx-in-depth-ja-JP.html
<add>prev: displaying-data-ja-JP.html
<add>next: jsx-spread-ja_JP.html
<add>---
<add>
<add>[JSX](https://facebook.github.io/jsx/)はXMLに似たJavaScriptのシンタックスの拡張です。Reactでは、単純なJSXのシンタックスの変換を使うことができます。
<add>
<add>## なぜJSXを使うのでしょうか?
<add>
<add>ReactでJSXの使用を強制されるわけではありません。生のJSを使うこともできます。しかし、JSXは簡潔で、木構造とReactの特性を定義しやすいシンタックスであるため、JSXを使うことをお勧めします。
<add>
<add>デザイナーのようなカジュアルな開発者にとってはさらに馴染みやすいでしょう。
<add>
<add>XMLにはバランスの取れた開始タグと終了タグという利益があります。このことで、関数がオブジェクトリテラルを呼んでいるのを読むよりも簡単に大きな木構造を作ることができます。
<add>
<add>これはJavaScriptのセマンティックスを代替するものではありません。
<add>
<add>## HTMLタグ対Reactコンポーネント
<add>
<add>ReactはHTMLタグ(文字列)とReactコンポーネント(クラス)の両方をレンダリングすることができます。
<add>
<add>以下のようにJSXで小文字のタグ名を使用するだけで、HTMLタグをレンダリングできます。
<add>
<add>```javascript
<add>var myDivElement = <div className="foo" />;
<add>React.render(myDivElement, document.getElementById('example'));
<add>```
<add>
<add>以下のように大文字から始まるローカル変数を作成するだけで、Reactのコンポーネントをレンダリングできます。
<add>
<add>```javascript
<add>var MyComponent = React.createClass({/*...*/});
<add>var myElement = <MyComponent someProperty={true} />;
<add>React.render(myElement, document.getElementById('example'));
<add>```
<add>
<add>ReactのJSXは大文字と小文字を使うことで、ローカルのコンポーネントクラスとHTMLタグを識別する習慣があります。
<add>
<add>> 注意:
<add>>
<add>> JSXはJavaScriptなので、 `class` や `for` といった識別子はXMLの属性名としては使用しません。代わりに、 ReactのDOMコンポーネントはDOMのプロパティ名がそれぞれ `className` や `htmlFor` といったものであることを期待します。
<add>
<add>## The Transform
<add>
<add>ReactのJSXはXMLに似たシンタックスをネイティブなJavaScriptに変換します。XML要素や属性や子要素は `React.createElement` で渡される引数に変換されます。
<add>
<add>```javascript
<add>var Nav;
<add>// 入力 (JSX):
<add>var app = <Nav color="blue" />;
<add>// 出力 (JS):
<add>var app = React.createElement(Nav, {color:"blue"});
<add>```
<add>
<add>`<Nav />` を使うためには、 `Nav` 変数がスコープの中にないといけないことに注意してください。
<add>
<add>以下のように、JSXはXMLシンタックスを使うことで、細かな子要素の使用も許可します。
<add>
<add>```javascript
<add>var Nav, Profile;
<add>// 入力 (JSX):
<add>var app = <Nav color="blue"><Profile>click</Profile></Nav>;
<add>// 出力 (JS):
<add>var app = React.createElement(
<add> Nav,
<add> {color:"blue"},
<add> React.createElement(Profile, null, "click")
<add>);
<add>```
<add>
<add>以下のように、displayNameがundefinedの時には、JSXはクラスの[displayName](/react/docs/component-specs.html#displayname)を変数の割り当てから予測します。
<add>
<add>```javascript
<add>// 入力 (JSX):
<add>var Nav = React.createClass({ });
<add>// 出力 (JS):
<add>var Nav = React.createClass({displayName: "Nav", });
<add>```
<add>
<add>JSXを試し、どのようにネイティブなJavaScriptに変換されるか見るには、[JSX Compiler](/react/jsx-compiler.html)を、すでに存在するHTMLをJSXに変換するには[HTMLからJSXへのコンバーター](/react/html-jsx.html)を使ってください。
<add>
<add>JSXを使いたい場合は、[始めてみましょう](/react/docs/getting-started-ja-JP.html)というガイドがどのようにコンパイルを設定するか示してくれます。
<add>
<add>> 注意:
<add>>
<add>> JSXという表現は常にReactElementを評価します。実際に実行する際の詳細はおそらく異なっているでしょう。最適化されたモードでは `React.createElement` のコードのバリデーションを避けるためにReactElementをオブジェクトリテラルとして配置するでしょう。
<add>
<add>
<add>## ネームスペース化されたコンポーネント
<add>
<add>formのように、たくさんの子要素を持つコンポーネントを構築する際には、以下のように多くの変数を宣言しなければいけないでしょう。
<add>
<add>```javascript
<add>// 変数宣言のあまりよくない部分
<add>var Form = MyFormComponent;
<add>var FormRow = Form.Row;
<add>var FormLabel = Form.Label;
<add>var FormInput = Form.Input;
<add>
<add>var App = (
<add> <Form>
<add> <FormRow>
<add> <FormLabel />
<add> <FormInput />
<add> </FormRow>
<add> </Form>
<add>);
<add>```
<add>
<add>これを単純で簡単にするために、 *ネームスペース化されたコンポーネント* では、以下のように他のコンポーネントを付属物として持つ1つのコンポーネントを使うことができます。
<add>
<add>```javascript
<add>var Form = MyFormComponent;
<add>
<add>var App = (
<add> <Form>
<add> <Form.Row>
<add> <Form.Label />
<add> <Form.Input />
<add> </Form.Row>
<add> </Form>
<add>);
<add>```
<add>
<add>これを行うためには、以下のようにメインコンポーネントの付属物として、「サブコンポーネント」を作るだけで大丈夫です。
<add>
<add>```javascript
<add>var MyFormComponent = React.createClass({ ... });
<add>
<add>MyFormComponent.Row = React.createClass({ ... });
<add>MyFormComponent.Label = React.createClass({ ... });
<add>MyFormComponent.Input = React.createClass({ ... });
<add>```
<add>
<add>JSXはコードをコンパイルする際にこのプロパティをハンドルします。
<add>
<add>```javascript
<add>var App = (
<add> React.createElement(Form, null,
<add> React.createElement(Form.Row, null,
<add> React.createElement(Form.Label, null),
<add> React.createElement(Form.Input, null)
<add> )
<add> )
<add>);
<add>```
<add>
<add>> 注意:
<add>> この特徴は [v0.11](/react/blog/2014/07/17/react-v0.11.html#jsx) 以上で使用できます。
<add>
<add>## JavaScriptの表現
<add>
<add>### アトリビュートの表現
<add>
<add>JavaScriptで書いたものをアトリビュートの値として使うためには、その表現を引用(`""`)ではなく波括弧(`{}`)で囲ってください。
<add>
<add>```javascript
<add>// 入力 (JSX):
<add>var person = <Person name={window.isLoggedIn ? window.name : ''} />;
<add>// 出力 (JS):
<add>var person = React.createElement(
<add> Person,
<add> {name: window.isLoggedIn ? window.name : ''}
<add>);
<add>```
<add>
<add>### Booleanのアトリビュート
<add>
<add>アトリビュートの値を記述しないと、JSXはそれを `true` として扱ってしまいます。`false` を渡すためには、アトリビュートが使われる必要があります。これらはHTMLのform要素の `disabled` 、 `required` 、 `checked` 、 `readOnly` といったアトリビュートを使う際によく見かけられます。
<add>
<add>
<add>```javascript
<add>// 以下の2つはボタンを使用不能にするという意味でJSXでは同義です。
<add><input type="button" disabled />;
<add><input type="button" disabled={true} />;
<add>
<add>// 以下の2つはボタンを使用不能にしないという意味でJSXでは同義です。
<add><input type="button" />;
<add><input type="button" disabled={false} />;
<add>```
<add>
<add>### 子要素の表現
<add>
<add>同様に、JavaScriptは子要素を表現するのに使われることもあります。
<add>
<add>```javascript
<add>// 入力 (JSX):
<add>var content = <Container>{window.isLoggedIn ? <Nav /> : <Login />}</Container>;
<add>// 出力 (JS):
<add>var content = React.createElement(
<add> Container,
<add> null,
<add> window.isLoggedIn ? React.createElement(Nav) : React.createElement(Login)
<add>);
<add>```
<add>
<add>### コメント
<add>
<add>JSXにコメントを加えるのは簡単です。ただのJSの書き方です。タグの内側にコメントを書く時には、 `{}` で囲うことに注意してください。
<add>
<add>```javascript
<add>var content = (
<add> <Nav>
<add> {/* 子要素にコメントを書く時には、 {} で囲う */}
<add> <Person
<add> /* 複数
<add> 行
<add> コメント */
<add> name={window.isLoggedIn ? window.name : ''} // 行末コメント
<add> />
<add> </Nav>
<add>);
<add>```
<add>
<add>> 注意:
<add>> JSXはHTMLに似ていますが、全く同じではありません。いくつかのキーの違いについては[JSXの理解](/react/docs/jsx-gotchas.html) をご覧ください。
<ide>\ No newline at end of file | 1 |
Ruby | Ruby | update asset helpers to use `config.assets.prefix` | 2684f17a17e4f97bdb89d20b4cd08585235263a2 | <ide><path>actionpack/lib/sprockets/helpers/rails_helper.rb
<ide> def debug_assets?
<ide>
<ide> def asset_path(source, default_ext = nil, body = false)
<ide> source = source.logical_path if source.respond_to?(:logical_path)
<del> path = asset_paths.compute_public_path(source, 'assets', default_ext, true)
<add> path = asset_paths.compute_public_path(source, Rails.application.config.assets.prefix, default_ext, true)
<ide> body ? "#{path}?body=1" : path
<ide> end
<ide>
<ide> class AssetPaths < ActionView::Helpers::AssetPaths #:nodoc:
<ide> def compute_public_path(source, dir, ext=nil, include_host=true)
<del> super(source, 'assets', ext, include_host)
<add> super(source, Rails.application.config.assets.prefix, ext, include_host)
<ide> end
<ide>
<ide> def asset_for(source, ext) | 1 |
Python | Python | fix bug with inactive user accessing oauth | 74fbd5ccc5b2aa2f0aab25ead5ffa36024079fcf | <ide><path>rest_framework/authentication.py
<ide> from rest_framework import exceptions, HTTP_HEADER_ENCODING
<ide> from rest_framework.compat import CsrfViewMiddleware
<ide> from rest_framework.compat import oauth, oauth_provider, oauth_provider_store
<del>from rest_framework.compat import oauth2_provider, oauth2_provider_forms
<add>from rest_framework.compat import oauth2_provider
<ide> from rest_framework.authtoken.models import Token
<ide>
<ide>
<ide> def authenticate_credentials(self, request, access_token):
<ide> except oauth2_provider.models.AccessToken.DoesNotExist:
<ide> raise exceptions.AuthenticationFailed('Invalid token')
<ide>
<del> if not token.user.is_active:
<add> user = token.user
<add>
<add> if not user.is_active:
<ide> msg = 'User inactive or deleted: %s' % user.username
<ide> raise exceptions.AuthenticationFailed(msg)
<ide>
<del> return (token.user, token)
<add> return (user, token)
<ide>
<ide> def authenticate_header(self, request):
<ide> """ | 1 |
Text | Text | update the rails security guide | f27325d7e5cce3088c746ca91393b5fd95ee8552 | <ide><path>guides/source/security.md
<ide> User.find(session[:user_id])
<ide>
<ide> ### Session id
<ide>
<del>NOTE: _The session id is a 32 byte long MD5 hash value._
<add>NOTE: _The session id is a 32-character random hex string._
<ide>
<del>A session id consists of the hash value of a random string. The random string is the current time, a random number between 0 and 1, the process id number of the Ruby interpreter (also basically a random number) and a constant string. Currently it is not feasible to brute-force Rails' session ids. To date MD5 is uncompromised, but there have been collisions, so it is theoretically possible to create another input text with the same hash value. But this has had no security impact to date.
<add>The session id is generated using `SecureRandom.hex` which generates a random hex string using platform specific methods (such as openssl, /dev/urandom or win32) for generating cryptographically secure random numbers. Currently it is not feasible to brute-force Rails' session ids.
<ide>
<ide> ### Session Hijacking
<ide> | 1 |
Python | Python | improve debug messages for amp | 8b3aaac72d337940b2af1548314c4e252db6881f | <ide><path>glances/amps/glances_amp.py
<ide> class GlancesAmp(object):
<ide>
<ide> def __init__(self, name=None, args=None):
<ide> """Init AMP classe."""
<del> logger.debug("Init {} version {}".format(self.NAME, self.VERSION))
<add> logger.debug("AMP - Init {} version {}".format(self.NAME, self.VERSION))
<ide>
<ide> # AMP name (= module name without glances_)
<ide> if name is None:
<ide> def load_config(self, config):
<ide> amp_section = 'amp_' + self.amp_name
<ide> if (hasattr(config, 'has_section') and
<ide> config.has_section(amp_section)):
<del> logger.debug("{}: Load configuration".format(self.NAME))
<add> logger.debug("AMP - {}: Load configuration".format(self.NAME))
<ide> for param, _ in config.items(amp_section):
<ide> try:
<ide> self.configs[param] = config.get_float_value(amp_section, param)
<ide> except ValueError:
<ide> self.configs[param] = config.get_value(amp_section, param).split(',')
<ide> if len(self.configs[param]) == 1:
<ide> self.configs[param] = self.configs[param][0]
<del> logger.debug("{}: Load parameter: {} = {}".format(self.NAME, param, self.configs[param]))
<add> logger.debug("AMP - {}: Load parameter: {} = {}".format(self.NAME, param, self.configs[param]))
<ide> else:
<del> logger.debug("{}: Can not find section {} in the configuration file".format(self.NAME, self.amp_name))
<add> logger.debug("AMP - {}: Can not find section {} in the configuration file".format(self.NAME, self.amp_name))
<ide> return False
<ide>
<ide> # enable, regex and refresh are mandatories
<ide> # if not configured then AMP is disabled
<ide> if self.enable():
<ide> for k in ['regex', 'refresh']:
<ide> if k not in self.configs:
<del> logger.warning("{}: Can not find configuration key {} in section {}".format(self.NAME, k, self.amp_name))
<add> logger.warning("AMP - {}: Can not find configuration key {} in section {}".format(self.NAME, k, self.amp_name))
<ide> self.configs['enable'] = 'false'
<ide> else:
<del> logger.debug("{} is disabled".format(self.NAME))
<add> logger.debug("AMP - {} is disabled".format(self.NAME))
<ide>
<ide> # Init the count to 0
<ide> self.configs['count'] = 0 | 1 |
PHP | PHP | prototype an idea for nested label inputs | b66577f4f523e9218659facd0329b41cd46c4190 | <ide><path>src/View/Helper/FormHelper.php
<ide> class FormHelper extends Helper {
<ide> 'templates' => [
<ide> 'button' => '<button{{attrs}}>{{text}}</button>',
<ide> 'checkbox' => '<input type="checkbox" name="{{name}}" value="{{value}}"{{attrs}}>',
<del> 'checkboxFormGroup' => '{{input}}{{label}}',
<add> 'checkboxFormGroup' => '{{label}}',
<ide> 'checkboxWrapper' => '<div class="checkbox">{{input}}{{label}}</div>',
<ide> 'dateWidget' => '{{year}}{{month}}{{day}}{{hour}}{{minute}}{{second}}{{meridian}}',
<ide> 'error' => '<div class="error-message">{{content}}</div>',
<ide> class FormHelper extends Helper {
<ide> 'inputContainer' => '<div class="input {{type}}{{required}}">{{content}}</div>',
<ide> 'inputContainerError' => '<div class="input {{type}}{{required}} error">{{content}}{{error}}</div>',
<ide> 'label' => '<label{{attrs}}>{{text}}</label>',
<add> 'nestedLabel' => '<label{{attrs}}>{{input}}{{text}}</label>',
<ide> 'legend' => '<legend>{{text}}</legend>',
<ide> 'option' => '<option value="{{value}}"{{attrs}}>{{text}}</option>',
<ide> 'optgroup' => '<optgroup label="{{label}}"{{attrs}}>{{content}}</optgroup>',
<ide> 'select' => '<select name="{{name}}"{{attrs}}>{{content}}</select>',
<ide> 'selectMultiple' => '<select name="{{name}}[]" multiple="multiple"{{attrs}}>{{content}}</select>',
<ide> 'radio' => '<input type="radio" name="{{name}}" value="{{value}}"{{attrs}}>',
<del> 'radioWrapper' => '{{input}}{{label}}',
<add> 'radioWrapper' => '{{label}}',
<ide> 'textarea' => '<textarea name="{{name}}"{{attrs}}>{{value}}</textarea>',
<ide> 'submitContainer' => '<div class="submit">{{content}}</div>',
<ide> ]
<ide> public function error($field, $text = null, array $options = []) {
<ide> * {{{
<ide> * echo $this->Form->label('published', 'Publish', array(
<ide> * 'for' => 'published',
<del> * 'input' => $this->text('published')
<add> * 'input' => $this->text('published'),
<ide> * ));
<ide> * <label for="post-publish">Publish <input type="text" name="published"></label>
<ide> * }}}
<ide> public function label($fieldName, $text = null, array $options = []) {
<ide> 'for' => $labelFor,
<ide> 'text' => $text,
<ide> ];
<add> if (isset($options['input'])) {
<add> return $this->widget('nestedLabel', $attrs);
<add> }
<ide> return $this->widget('label', $attrs);
<ide> }
<ide>
<ide> public function fieldset($fields = '', array $options = []) {
<ide> * - `error` - Control the error message that is produced. Set to `false` to disable any kind of error reporting (field
<ide> * error and error messages).
<ide> * - `empty` - String or boolean to enable empty select box options.
<add> * - `nestedInput` - Used with checkbox and radio inputs. Set to false to render inputs outside of label
<add> * elements. Can be set to true on any input to force the input inside the label. If you
<add> * enable this option for radio buttons you will also need to modify the default `radioWrapper` template.
<ide> *
<ide> * @param string $fieldName This should be "Modelname.fieldname"
<ide> * @param array $options Each type of input takes different options.
<ide> public function input($fieldName, array $options = []) {
<ide> 'error' => null,
<ide> 'required' => null,
<ide> 'options' => null,
<del> 'templates' => []
<add> 'templates' => [],
<ide> ];
<ide> $options = $this->_parseOptions($fieldName, $options);
<ide> $options += ['id' => $this->_domId($fieldName)];
<ide> public function input($fieldName, array $options = []) {
<ide> }
<ide>
<ide> $label = $options['label'];
<del> if ($options['type'] !== 'radio') {
<add> $nestedInput = false;
<add> if (in_array($options['type'], ['radio', 'checkbox'], true)) {
<add> $nestedInput = true;
<ide> unset($options['label']);
<ide> }
<add> $nestedInput = isset($options['nestedInput']) ? $options['nestedInput'] : $nestedInput;
<ide>
<ide> $input = $this->_getInput($fieldName, $options);
<ide> if ($options['type'] === 'hidden') {
<ide> public function input($fieldName, array $options = []) {
<ide> return $input;
<ide> }
<ide>
<del> $label = $this->_getLabel($fieldName, compact('input', 'label', 'error') + $options);
<add> $label = $this->_getLabel($fieldName, compact('input', 'label', 'error', 'nestedInput') + $options);
<ide> $result = $this->_groupTemplate(compact('input', 'label', 'error', 'options'));
<ide> $result = $this->_inputContainerTemplate([
<ide> 'content' => $result,
<ide> protected function _inputLabel($fieldName, $label, $options) {
<ide> } else {
<ide> $labelText = $label;
<ide> }
<add> $options += ['id' => null, 'input' => null, 'nestedInput' => false];
<ide>
<del> $labelAttributes = [
<del> 'for' => isset($options['id']) ? $options['id'] : null,
<del> 'input' => isset($options['input']) ? $options['input'] : null
<del> ] + $labelAttributes;
<add> $labelAttributes['for'] = $options['id'];
<add> if ($options['nestedInput']) {
<add> $labelAttributes['input'] = $options['input'];
<add> }
<ide> return $this->label($fieldName, $labelText, $labelAttributes);
<ide> }
<ide>
<ide><path>src/View/Widget/Label.php
<ide> class Label implements WidgetInterface {
<ide> */
<ide> protected $_templates;
<ide>
<add>/**
<add> * The template to use.
<add> *
<add> * @var string
<add> */
<add> protected $_labelTemplate = 'label';
<add>
<ide> /**
<ide> * Constructor.
<ide> *
<ide> public function render(array $data, ContextInterface $context) {
<ide> 'escape' => true,
<ide> ];
<ide>
<del> return $this->_templates->format('label', [
<add> return $this->_templates->format($this->_labelTemplate, [
<ide> 'text' => $data['escape'] ? h($data['text']) : $data['text'],
<ide> 'input' => $data['input'],
<ide> 'attrs' => $this->_templates->formatAttributes($data, ['text', 'input']),
<ide><path>src/View/Widget/NestedLabel.php
<add><?php
<add>/**
<add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
<add> * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> *
<add> * Licensed under The MIT License
<add> * For full copyright and license information, please see the LICENSE.txt
<add> * Redistributions of files must retain the above copyright notice.
<add> *
<add> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> * @link http://cakephp.org CakePHP(tm) Project
<add> * @since 3.0.0
<add> * @license http://www.opensource.org/licenses/mit-license.php MIT License
<add> */
<add>namespace Cake\View\Widget;
<add>
<add>use Cake\View\Widget\Label;
<add>
<add>/**
<add> * Form 'widget' for creating labels that contain their input.
<add> *
<add> * Generally this element is used by other widgets,
<add> * and FormHelper itself.
<add> */
<add>class NestedLabel extends Label {
<add>
<add>/**
<add> * The template to use.
<add> *
<add> * @var string
<add> */
<add> protected $_labelTemplate = 'nestedLabel';
<add>
<add>}
<ide><path>src/View/Widget/WidgetRegistry.php
<ide> class WidgetRegistry {
<ide> 'checkbox' => ['Cake\View\Widget\Checkbox'],
<ide> 'file' => ['Cake\View\Widget\File'],
<ide> 'label' => ['Cake\View\Widget\Label'],
<del> 'multicheckbox' => ['Cake\View\Widget\MultiCheckbox', 'label'],
<del> 'radio' => ['Cake\View\Widget\Radio', 'label'],
<add> 'nestedLabel' => ['Cake\View\Widget\NestedLabel'],
<add> 'multicheckbox' => ['Cake\View\Widget\MultiCheckbox', 'nestedLabel'],
<add> 'radio' => ['Cake\View\Widget\Radio', 'nestedLabel'],
<ide> 'select' => ['Cake\View\Widget\SelectBox'],
<ide> 'textarea' => ['Cake\View\Widget\Textarea'],
<ide> 'datetime' => ['Cake\View\Widget\DateTime', 'select'], | 4 |
Text | Text | suggest new hash syntax in testing guide | 487aa51b1414d3f316c7ccdc92af2f212961e0bb | <ide><path>guides/source/testing.md
<ide> All the keyword arguments are optional.
<ide> Example: Calling the `:show` action, passing an `id` of 12 as the `params` and setting a `user_id` of 5 in the session:
<ide>
<ide> ```ruby
<del>get(:show, params: { 'id' => "12" }, session: { 'user_id' => 5 })
<add>get(:show, params: { id: 12 }, session: { user_id: 5 })
<ide> ```
<ide>
<ide> Another example: Calling the `:view` action, passing an `id` of 12 as the `params`, this time with no session, but with a flash message.
<ide>
<ide> ```ruby
<del>get(:view, params: { 'id' => '12' }, flash: { 'message' => 'booya!' })
<add>get(:view, params: { id: 12 }, flash: { message: 'booya!' })
<ide> ```
<ide>
<ide> NOTE: If you try running `test_should_create_article` test from `articles_controller_test.rb` it will fail on account of the newly added model level validation and rightly so. | 1 |
Text | Text | remove copyrighted content from file | b7de2dff3f2407f8d960ebc4ab0aa0fa9b4e7dfc | <ide><path>guide/english/logic/mobsters-riddle/index.md
<ide> title: Mobsters Riddle
<ide> ---
<ide> ## Mobsters Riddle
<ide>
<del>The gang of four that once ran the biggest protection racket in the east eventually got out of jail. Almost immediately, they began operating again in the same neighborhood. They had previously divided it into four territories: north, south, east, and west. Because each mobster had always kept his operations in the same area, the police nicknamed them Mr. North, Mr. South, Mr. East, and Mr. West.
<add>This is a stub. <a href='https://github.com/freeCodeCamp/freeCodeCamp/blob/master/guide/english/logic/mobsters-riddle/index.md' target='_blank' rel='nofollow'>Help our community expand it</a>.
<ide>
<del>Now the police received a reliable tip that this time each mobster was operating in a new territory. None of the new victims was willing to identify the mobsters from photos the police showed them, but three facts became clear, and that was enough for the FBI to work out which mobster was operating where. These are the facts:
<add><a href='https://github.com/freecodecamp/guides/blob/master/README.md' target='_blank' rel='nofollow'>This quick style guide will help ensure your pull request gets accepted</a>.
<ide>
<del>1. Mr. East is not operating the North.
<del>2. The North territory is not being operated by Mr. South.
<del>3. The South territory is not being operated by Mr. East.
<add><!-- The article goes here, in GitHub-flavored Markdown. Feel free to add YouTube videos, images, and CodePen/JSBin embeds -->
<ide>
<del>Can you tell who is operating in each territory?
<del>
<del><details><summary>Solution</summary><blockquote>
<del>
<del>Mr. North is operating the South, Mr. East is operating the West, Mr. South is operating the East, and Mr. West is operating the North.</blockquote></details>
<add>### More Information:
<add><!-- Please add any articles you think might be helpful to read before writing the article -->
<ide>\ No newline at end of file | 1 |
Javascript | Javascript | remove debug code | 12b0282836d5fb9ee3d72106e7a7897150c05993 | <ide><path>src/api.js
<ide> var WorkerTransport = (function WorkerTransportClosure() {
<ide> var fakeWorker = {
<ide> postMessage: function WorkerTransport_postMessage(obj) {
<ide> fakeWorker.onmessage({data: obj});
<del> try {
<del> testF.contentWindow.postMessage(obj, "*");
<del> } catch(e) {
<del> debugger;
<del> }
<ide> },
<ide> terminate: function WorkerTransport_terminate() {}
<ide> }; | 1 |
Ruby | Ruby | fix the class name | 61c65a0aefc535d20c4ae5f4d51dbb3f624f1836 | <ide><path>railties/lib/rails/generators/app_base.rb
<ide> def set_default_accessors!
<ide>
<ide> def database_gemfile_entry
<ide> return [] if options[:skip_active_record]
<del> gem = GemfileGem.version gem_for_database, nil,
<add> gem = GemfileEntry.version gem_for_database, nil,
<ide> "Use #{options[:database]} as the database for Active Record"
<ide> return [gem]
<ide> end
<ide> def comment_if(value)
<ide> options[value] ? '# ' : ''
<ide> end
<ide>
<del> class GemfileGem < Struct.new(:name, :comment, :version, :options, :commented_out)
<add> class GemfileEntry < Struct.new(:name, :comment, :version, :options, :commented_out)
<ide> def initialize(name, comment, version, options = {}, commented_out = false)
<ide> super
<ide> end
<ide> def padding(max_width)
<ide> def rails_gemfile_entry
<ide> if options.dev?
<ide> [
<del> GemfileGem.path('rails', Rails::Generators::RAILS_DEV_PATH),
<del> GemfileGem.github('arel', 'rails/arel'),
<add> GemfileEntry.path('rails', Rails::Generators::RAILS_DEV_PATH),
<add> GemfileEntry.github('arel', 'rails/arel'),
<ide> ]
<ide> elsif options.edge?
<ide> [
<del> GemfileGem.path('rails', 'rails/rails'),
<del> GemfileGem.path('arel', 'rails/arel'),
<add> GemfileEntry.path('rails', 'rails/rails'),
<add> GemfileEntry.path('arel', 'rails/arel'),
<ide> ]
<ide> else
<ide> [
<del> GemfileGem.new('rails', "Bundle edge Rails instead: gem 'rails', github: 'rails/rails'", Rails::VERSION::STRING)
<add> GemfileEntry.new('rails', "Bundle edge Rails instead: gem 'rails', github: 'rails/rails'", Rails::VERSION::STRING)
<ide> ]
<ide> end
<ide> end
<ide> def assets_gemfile_entry
<ide>
<ide> gems = []
<ide> gemfile = if options.dev? || options.edge?
<del> gems << GemfileGem.github('sprockets-rails', 'rails/sprockets-rails',
<add> gems << GemfileEntry.github('sprockets-rails', 'rails/sprockets-rails',
<ide> 'Use edge version of sprockets-rails')
<del> gems << GemfileGem.github('sass-rails', 'rails/sass-rails',
<add> gems << GemfileEntry.github('sass-rails', 'rails/sass-rails',
<ide> 'Use SCSS for stylesheets')
<ide> else
<del> gems << GemfileGem.version('sass-rails',
<add> gems << GemfileEntry.version('sass-rails',
<ide> '~> 4.0.0.rc1',
<ide> 'Use SCSS for stylesheets')
<ide> end
<ide>
<del> gems << GemfileGem.version('uglifier',
<add> gems << GemfileEntry.version('uglifier',
<ide> '>= 1.3.0',
<ide> 'Use Uglifier as compressor for JavaScript assets')
<ide>
<ide> def assets_gemfile_entry
<ide> def coffee_gemfile_entry
<ide> comment = 'Use CoffeeScript for .js.coffee assets and views'
<ide> if options.dev? || options.edge?
<del> GemfileGem.github 'coffee-rails', 'rails/coffee-rails', comment
<add> GemfileEntry.github 'coffee-rails', 'rails/coffee-rails', comment
<ide> else
<del> GemfileGem.version 'coffee-rails', '~> 4.0.0', comment
<add> GemfileEntry.version 'coffee-rails', '~> 4.0.0', comment
<ide> end
<ide> end
<ide>
<ide> def javascript_gemfile_entry
<ide> []
<ide> else
<ide> gems = [coffee_gemfile_entry, javascript_runtime_gemfile_entry]
<del> gems << GemfileGem.version("#{options[:javascript]}-rails", nil,
<add> gems << GemfileEntry.version("#{options[:javascript]}-rails", nil,
<ide> "Use #{options[:javascript]} as the JavaScript library")
<ide>
<del> gems << GemfileGem.version("turbolinks", nil,
<add> gems << GemfileEntry.version("turbolinks", nil,
<ide> "Turbolinks makes following links in your web application faster. Read more: https://github.com/rails/turbolinks")
<ide> end
<ide> end
<ide>
<ide> def javascript_runtime_gemfile_entry
<ide> comment = 'See https://github.com/sstephenson/execjs#readme for more supported runtimes'
<ide> runtime = if defined?(JRUBY_VERSION)
<del> GemfileGem.version 'therubyrhino', comment, nil
<add> GemfileEntry.version 'therubyrhino', comment, nil
<ide> else
<del> GemfileGem.new 'therubyracer', comment, nil, { :platforms => :ruby }, true
<add> GemfileEntry.new 'therubyracer', comment, nil, { :platforms => :ruby }, true
<ide> end
<ide> end
<ide> | 1 |
Text | Text | fix dotcloud to docker, inc | 36ab1836f973c62ff543456f1613dec8813d00df | <ide><path>CONTRIBUTING.md
<ide> c. The contribution was provided directly to me by some other person who represe
<ide>
<ide> d. I understand and agree that this Project and the contribution are publicly known and that a record of the contribution (including all personal information I submit with it, including my sign-off record) is maintained indefinitely and may be redistributed consistent with this Project or the open source license(s) involved.
<ide>
<del>e. I hereby grant to the Project, dotCloud, Inc and its successors; and recipients of software distributed by the Project a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, modify, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute this contribution and such modifications and derivative works consistent with this Project, the open source license indicated in the previous work or other appropriate open source license specified by the Project and approved by the Open Source Initiative(OSI) at http://www.opensource.org.
<add>e. I hereby grant to the Project, Docker, Inc and its successors; and recipients of software distributed by the Project a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, modify, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute this contribution and such modifications and derivative works consistent with this Project, the open source license indicated in the previous work or other appropriate open source license specified by the Project and approved by the Open Source Initiative(OSI) at http://www.opensource.org.
<ide> ```
<ide>
<ide> then you just add a line saying | 1 |
Ruby | Ruby | remove more uses of deprecated logger methods | bb106e9aa278a44cd970a3b820af1664fd85bba1 | <ide><path>activerecord/test/cases/base_test.rb
<ide> def test_becomes_includes_errors
<ide> def test_silence_sets_log_level_to_error_in_block
<ide> original_logger = ActiveRecord::Base.logger
<ide> log = StringIO.new
<del> ActiveRecord::Base.logger = Logger.new(log)
<add> ActiveRecord::Base.logger = ActiveSupport::Logger.new(log)
<ide> ActiveRecord::Base.logger.level = Logger::DEBUG
<ide> ActiveRecord::Base.silence do
<ide> ActiveRecord::Base.logger.warn "warn"
<ide> def test_silence_sets_log_level_to_error_in_block
<ide> def test_silence_sets_log_level_back_to_level_before_yield
<ide> original_logger = ActiveRecord::Base.logger
<ide> log = StringIO.new
<del> ActiveRecord::Base.logger = Logger.new(log)
<add> ActiveRecord::Base.logger = ActiveSupport::Logger.new(log)
<ide> ActiveRecord::Base.logger.level = Logger::WARN
<ide> ActiveRecord::Base.silence do
<ide> end
<ide> def test_benchmark_with_use_silence
<ide> original_logger = ActiveRecord::Base.logger
<ide> log = StringIO.new
<ide> ActiveRecord::Base.logger = Logger.new(log)
<del> ActiveRecord::Base.benchmark("Logging", :level => :debug, :silence => true) { ActiveRecord::Base.logger.debug "Loud" }
<ide> ActiveRecord::Base.benchmark("Logging", :level => :debug, :silence => false) { ActiveRecord::Base.logger.debug "Quiet" }
<del> assert_no_match(/Loud/, log.string)
<ide> assert_match(/Quiet/, log.string)
<ide> ensure
<ide> ActiveRecord::Base.logger = original_logger
<ide><path>activerecord/test/cases/helper.rb
<ide>
<ide> require 'active_record'
<ide> require 'active_support/dependencies'
<add>require 'active_support/logger'
<ide>
<ide> require 'support/config'
<ide> require 'support/connection'
<ide> def call(name, start, finish, message_id, values)
<ide> unless ENV['FIXTURE_DEBUG']
<ide> module ActiveRecord::TestFixtures::ClassMethods
<ide> def try_to_load_dependency_with_silence(*args)
<del> ActiveRecord::Base.logger.silence { try_to_load_dependency_without_silence(*args)}
<add> old = ActiveRecord::Base.logger.level
<add> ActiveRecord::Base.logger.level = ActiveSupport::Logger::ERROR
<add> try_to_load_dependency_without_silence(*args)
<add> ActiveRecord::Base.logger.level = old
<ide> end
<ide>
<ide> alias_method_chain :try_to_load_dependency, :silence
<ide><path>activerecord/test/cases/log_subscriber_test.rb
<ide>
<ide> class LogSubscriberTest < ActiveRecord::TestCase
<ide> include ActiveSupport::LogSubscriber::TestHelper
<del> include ActiveSupport::BufferedLogger::Severity
<add> include ActiveSupport::Logger::Severity
<ide>
<ide> fixtures :posts
<ide> | 3 |
Javascript | Javascript | fix component helper test | 16ccac91d07e2e975a54efbf004ecf82d585d23e | <ide><path>test/ng/compileSpec.js
<ide> describe('$compile', function() {
<ide> controllerAs: 'myComponent',
<ide> template: '',
<ide> templateUrl: undefined,
<del> transclude: false,
<add> transclude: undefined,
<ide> scope: {},
<ide> bindToController: {},
<ide> restrict: 'E' | 1 |
PHP | PHP | simplify join clause | a06e156348dfd932a36758ca1df5062aa4c953d5 | <ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function from($table)
<ide> */
<ide> public function join($table, $one, $operator = null, $two = null, $type = 'inner', $where = false)
<ide> {
<add> $join = new JoinClause($type, $table, $this);
<add>
<ide> // If the first "column" of the join is really a Closure instance the developer
<ide> // is trying to build a join with a complex "on" clause containing more than
<ide> // one condition, so we'll add the join and call a Closure with the query.
<ide> if ($one instanceof Closure) {
<del> $join = new JoinClause($type, $table);
<del>
<ide> call_user_func($one, $join);
<ide>
<ide> $this->joins[] = $join;
<ide>
<del> $this->addBinding($join->bindings, 'join');
<add> $this->addBinding($join->getBindings(), 'join');
<ide> }
<ide>
<ide> // If the column is simply a string, we can assume the join simply has a basic
<ide> // "on" clause with a single condition. So we will just build the join with
<ide> // this simple join clauses attached to it. There is not a join callback.
<ide> else {
<del> $join = new JoinClause($type, $table);
<add> $method = $where ? 'where' : 'on';
<ide>
<del> $this->joins[] = $join->on(
<add> $this->joins[] = $join->$method(
<ide> $one, $operator, $two, 'and', $where
<ide> );
<ide>
<del> $this->addBinding($join->bindings, 'join');
<add> $this->addBinding($join->getBindings(), 'join');
<ide> }
<ide>
<ide> return $this;
<ide> public function crossJoin($table, $first = null, $operator = null, $second = nul
<ide> return $this->join($table, $first, $operator, $second, 'cross');
<ide> }
<ide>
<del> $this->joins[] = new JoinClause('cross', $table);
<add> $this->joins[] = new JoinClause('cross', $table, $this);
<ide>
<ide> return $this;
<ide> }
<ide><path>src/Illuminate/Database/Query/Grammars/Grammar.php
<ide> namespace Illuminate\Database\Query\Grammars;
<ide>
<ide> use Illuminate\Database\Query\Builder;
<add>use Illuminate\Database\Query\JoinClause;
<ide> use Illuminate\Database\Grammar as BaseGrammar;
<ide>
<ide> class Grammar extends BaseGrammar
<ide> protected function compileJoins(Builder $query, $joins)
<ide> $sql = [];
<ide>
<ide> foreach ($joins as $join) {
<del> $table = $this->wrapTable($join->table);
<del>
<del> $type = $join->type;
<del>
<del> // Cross joins generate a cartesian product between this first table and a joined
<del> // table. In case the user didn't specify any "on" clauses on the join we will
<del> // append this SQL and jump right back into the next iteration of this loop.
<del> if ($type === 'cross' && ! $join->clauses) {
<del> $sql[] = "cross join $table";
<del>
<del> continue;
<del> }
<add> $conditions = $this->compileWheres($join);
<ide>
<del> // First we need to build all of the "on" clauses for the join. There may be many
<del> // of these clauses so we will need to iterate through each one and build them
<del> // separately, then we'll join them up into a single string when we're done.
<del> $clauses = [];
<del>
<del> foreach ($join->clauses as $clause) {
<del> $clauses[] = $this->compileJoinConstraint($clause);
<del> }
<del>
<del> // Once we have constructed the clauses, we'll need to take the boolean connector
<del> // off of the first clause as it obviously will not be required on that clause
<del> // because it leads the rest of the clauses, thus not requiring any boolean.
<del> $clauses[0] = $this->removeLeadingBoolean($clauses[0]);
<del>
<del> $clauses = implode(' ', $clauses);
<add> $table = $this->wrapTable($join->table);
<ide>
<del> // Once we have everything ready to go, we will just concatenate all the parts to
<del> // build the final join statement SQL for the query and we can then return the
<del> // final clause back to the callers as a single, stringified join statement.
<del> $sql[] = "$type join $table on $clauses";
<add> $sql[] = trim("{$join->type} join {$table} {$conditions}");
<ide> }
<ide>
<ide> return implode(' ', $sql);
<ide> }
<ide>
<del> /**
<del> * Create a join clause constraint segment.
<del> *
<del> * @param array $clause
<del> * @return string
<del> */
<del> protected function compileJoinConstraint(array $clause)
<del> {
<del> if ($clause['nested']) {
<del> return $this->compileNestedJoinConstraint($clause);
<del> }
<del>
<del> $first = $this->wrap($clause['first']);
<del>
<del> if ($clause['where']) {
<del> if ($clause['operator'] === 'in' || $clause['operator'] === 'not in') {
<del> $second = '('.implode(', ', array_fill(0, $clause['second'], '?')).')';
<del> } else {
<del> $second = '?';
<del> }
<del> } else {
<del> $second = $this->wrap($clause['second']);
<del> }
<del>
<del> return "{$clause['boolean']} $first {$clause['operator']} $second";
<del> }
<del>
<del> /**
<del> * Create a nested join clause constraint segment.
<del> *
<del> * @param array $clause
<del> * @return string
<del> */
<del> protected function compileNestedJoinConstraint(array $clause)
<del> {
<del> $clauses = [];
<del>
<del> foreach ($clause['join']->clauses as $nestedClause) {
<del> $clauses[] = $this->compileJoinConstraint($nestedClause);
<del> }
<del>
<del> $clauses[0] = $this->removeLeadingBoolean($clauses[0]);
<del>
<del> $clauses = implode(' ', $clauses);
<del>
<del> return "{$clause['boolean']} ({$clauses})";
<del> }
<del>
<ide> /**
<ide> * Compile the "where" portions of the query.
<ide> *
<ide> protected function compileWheres(Builder $query)
<ide> if (count($sql) > 0) {
<ide> $sql = implode(' ', $sql);
<ide>
<del> return 'where '.$this->removeLeadingBoolean($sql);
<add> $conjunction = $query instanceof JoinClause ? 'on' : 'where';
<add>
<add> return $conjunction.' '.$this->removeLeadingBoolean($sql);
<ide> }
<ide>
<ide> return '';
<ide> protected function whereNested(Builder $query, $where)
<ide> {
<ide> $nested = $where['query'];
<ide>
<del> return '('.substr($this->compileWheres($nested), 6).')';
<add> $offset = $query instanceof JoinClause ? 3 : 6;
<add>
<add> return '('.substr($this->compileWheres($nested), $offset).')';
<ide> }
<ide>
<ide> /**
<ide><path>src/Illuminate/Database/Query/Grammars/PostgresGrammar.php
<ide> protected function compileUpdateJoinWheres(Builder $query)
<ide> // all out then implode them. This should give us "where" like syntax after
<ide> // everything has been built and then we will join it to the real wheres.
<ide> foreach ($query->joins as $join) {
<del> foreach ($join->clauses as $clause) {
<del> $joinWheres[] = $this->compileJoinConstraint($clause);
<add> foreach ($join->wheres as $where) {
<add> $method = "where{$where['type']}";
<add>
<add> $joinWheres[] = $where['boolean'].' '.$this->$method($query, $where);
<ide> }
<ide> }
<ide>
<ide><path>src/Illuminate/Database/Query/JoinClause.php
<ide> use Closure;
<ide> use InvalidArgumentException;
<ide>
<del>class JoinClause
<add>class JoinClause extends Builder
<ide> {
<ide> /**
<ide> * The type of join being performed.
<ide> class JoinClause
<ide> public $table;
<ide>
<ide> /**
<del> * The "on" clauses for the join.
<add> * The parent query builder instance.
<ide> *
<del> * @var array
<add> * @var \Illuminate\Database\Query\Builder
<ide> */
<del> public $clauses = [];
<del>
<del> /**
<del> * The "on" bindings for the join.
<del> *
<del> * @var array
<del> */
<del> public $bindings = [];
<add> private $parentQuery;
<ide>
<ide> /**
<ide> * Create a new join clause instance.
<ide> *
<ide> * @param string $type
<ide> * @param string $table
<add> * @param \Illuminate\Database\Query\Builder $parentQuery
<ide> * @return void
<ide> */
<del> public function __construct($type, $table)
<add> public function __construct($type, $table, Builder $parentQuery)
<ide> {
<ide> $this->type = $type;
<ide> $this->table = $table;
<add> $this->parentQuery = $parentQuery;
<add>
<add> parent::__construct(
<add> $parentQuery->connection, $parentQuery->grammar, $parentQuery->processor
<add> );
<ide> }
<ide>
<ide> /**
<ide> public function __construct($type, $table)
<ide> * @param string|null $operator
<ide> * @param string|null $second
<ide> * @param string $boolean
<del> * @param bool $where
<ide> * @return $this
<ide> *
<ide> * @throws \InvalidArgumentException
<ide> */
<del> public function on($first, $operator = null, $second = null, $boolean = 'and', $where = false)
<add> public function on($first, $operator = null, $second = null, $boolean = 'and')
<ide> {
<ide> if ($first instanceof Closure) {
<del> return $this->nest($first, $boolean);
<del> }
<del>
<del> if (func_num_args() < 3) {
<del> throw new InvalidArgumentException('Not enough arguments for the on clause.');
<add> return $this->whereNested($first, $boolean);
<ide> }
<ide>
<del> if ($where) {
<del> $this->bindings[] = $second;
<del> }
<del>
<del> if ($where && ($operator === 'in' || $operator === 'not in') && is_array($second)) {
<del> $second = count($second);
<del> }
<del>
<del> $nested = false;
<del>
<del> $this->clauses[] = compact('first', 'operator', 'second', 'boolean', 'where', 'nested');
<del>
<del> return $this;
<add> return $this->whereColumn($first, $operator, $second, $boolean);
<ide> }
<ide>
<ide> /**
<ide> public function orOn($first, $operator = null, $second = null)
<ide> }
<ide>
<ide> /**
<del> * Add an "on where" clause to the join.
<add> * Get a new instance of the join clause builder.
<ide> *
<del> * @param \Closure|string $first
<del> * @param string|null $operator
<del> * @param string|null $second
<del> * @param string $boolean
<ide> * @return \Illuminate\Database\Query\JoinClause
<ide> */
<del> public function where($first, $operator = null, $second = null, $boolean = 'and')
<add> public function newQuery()
<ide> {
<del> return $this->on($first, $operator, $second, $boolean, true);
<del> }
<del>
<del> /**
<del> * Add an "or on where" clause to the join.
<del> *
<del> * @param \Closure|string $first
<del> * @param string|null $operator
<del> * @param string|null $second
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function orWhere($first, $operator = null, $second = null)
<del> {
<del> return $this->on($first, $operator, $second, 'or', true);
<del> }
<del>
<del> /**
<del> * Add an "on where is null" clause to the join.
<del> *
<del> * @param string $column
<del> * @param string $boolean
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function whereNull($column, $boolean = 'and')
<del> {
<del> return $this->on($column, 'is', new Expression('null'), $boolean, false);
<del> }
<del>
<del> /**
<del> * Add an "or on where is null" clause to the join.
<del> *
<del> * @param string $column
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function orWhereNull($column)
<del> {
<del> return $this->whereNull($column, 'or');
<del> }
<del>
<del> /**
<del> * Add an "on where is not null" clause to the join.
<del> *
<del> * @param string $column
<del> * @param string $boolean
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function whereNotNull($column, $boolean = 'and')
<del> {
<del> return $this->on($column, 'is', new Expression('not null'), $boolean, false);
<del> }
<del>
<del> /**
<del> * Add an "or on where is not null" clause to the join.
<del> *
<del> * @param string $column
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function orWhereNotNull($column)
<del> {
<del> return $this->whereNotNull($column, 'or');
<del> }
<del>
<del> /**
<del> * Add an "on where in (...)" clause to the join.
<del> *
<del> * @param string $column
<del> * @param array $values
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function whereIn($column, array $values)
<del> {
<del> return $this->on($column, 'in', $values, 'and', true);
<del> }
<del>
<del> /**
<del> * Add an "on where not in (...)" clause to the join.
<del> *
<del> * @param string $column
<del> * @param array $values
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function whereNotIn($column, array $values)
<del> {
<del> return $this->on($column, 'not in', $values, 'and', true);
<del> }
<del>
<del> /**
<del> * Add an "or on where in (...)" clause to the join.
<del> *
<del> * @param string $column
<del> * @param array $values
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function orWhereIn($column, array $values)
<del> {
<del> return $this->on($column, 'in', $values, 'or', true);
<del> }
<del>
<del> /**
<del> * Add an "or on where not in (...)" clause to the join.
<del> *
<del> * @param string $column
<del> * @param array $values
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function orWhereNotIn($column, array $values)
<del> {
<del> return $this->on($column, 'not in', $values, 'or', true);
<del> }
<del>
<del> /**
<del> * Add a nested where statement to the query.
<del> *
<del> * @param \Closure $callback
<del> * @param string $boolean
<del> * @return \Illuminate\Database\Query\JoinClause
<del> */
<del> public function nest(Closure $callback, $boolean = 'and')
<del> {
<del> $join = new static($this->type, $this->table);
<del>
<del> $callback($join);
<del>
<del> if (count($join->clauses)) {
<del> $nested = true;
<del>
<del> $this->clauses[] = compact('nested', 'join', 'boolean');
<del> $this->bindings = array_merge($this->bindings, $join->bindings);
<del> }
<del>
<del> return $this;
<add> return new static($this->type, $this->table, $this->parentQuery);
<ide> }
<ide> }
<ide><path>tests/Database/DatabaseEloquentBuilderTest.php
<ide> public function testHasWithContraintsAndJoinAndHavingInSubquery()
<ide> $builder = $model->where('bar', 'baz');
<ide> $builder->whereHas('foo', function ($q) {
<ide> $q->join('quuuux', function ($j) {
<del> $j->on('quuuuux', '=', 'quuuuuux', 'and', true);
<add> $j->where('quuuuux', '=', 'quuuuuux');
<ide> });
<ide> $q->having('bam', '>', 'qux');
<ide> })->where('quux', 'quuux'); | 5 |
PHP | PHP | fix singular form of messages | 114914e523f504b3f16e2f6bd77bacabdcf23ebc | <ide><path>tests/TestCase/I18n/I18nTest.php
<ide> public function testPluralContextFunction()
<ide> 'letter' => [
<ide> '_context' => [
<ide> 'character' => 'The letter {0}',
<del> 'communication' => 'The letters {0} and {1}',
<add> 'communication' => 'She wrote a letter to {0}',
<ide> ]
<ide> ],
<ide> 'letters' => [
<ide> public function testDomainPluralContextFunction()
<ide> 'letter' => [
<ide> '_context' => [
<ide> 'character' => 'The letter {0}',
<del> 'communication' => 'The letters {0} and {1}',
<add> 'communication' => 'She wrote a letter to {0}',
<ide> ]
<ide> ],
<ide> 'letters' => [ | 1 |
Text | Text | fix typo in readme | 81c7e3ec9f26e774902276769d32140bc699c631 | <ide><path>README.md
<ide> with torch.no_grad():
<ide> # get the predicted last token
<ide> predicted_index = torch.argmax(predictions_2[0, -1, :]).item()
<ide> predicted_token = tokenizer.convert_ids_to_tokens([predicted_index])[0]
<del>assert predicted_token == '.</w>'
<add>assert predicted_token == 'who'
<ide> ```
<ide>
<ide> ## Doc | 1 |
Go | Go | add warning for deprecatd flags | bb5ed452241c37ee9f2f3ebd02a2a5e1764334ad | <ide><path>pkg/mflag/flag.go
<ide> func (f *FlagSet) parseOne() (bool, string, error) {
<ide> f.actual = make(map[string]*Flag)
<ide> }
<ide> f.actual[name] = flag
<add> for _, n := range flag.Names {
<add> if n == fmt.Sprintf("#%s", name) {
<add> fmt.Fprintf(f.out(), "Warning: '-%s' is deprecated, it will be removed soon. See usage.\n", name)
<add> }
<add> }
<ide> return true, "", nil
<ide> }
<ide> | 1 |
PHP | PHP | add test for cli | 27dc4bf7c68aa117d36ea9525c99c4e837001a07 | <ide><path>tests/TestCase/Shell/Task/LoadTaskTest.php
<ide> public function setUp()
<ide> ->getMock();
<ide>
<ide> $this->bootstrap = ROOT . DS . 'config' . DS . 'bootstrap.php';
<add> $this->bootstrapCli = ROOT . DS . 'config' . DS . 'bootstrap_cli.php';
<add> copy($this->bootstrap, $this->bootstrapCli);
<ide>
<ide> $bootstrap = new File($this->bootstrap, false);
<ide> $this->originalBootstrapContent = $bootstrap->read();
<ide> public function tearDown()
<ide>
<ide> $bootstrap = new File($this->bootstrap, false);
<ide> $bootstrap->write($this->originalBootstrapContent);
<add> unlink($this->bootstrapCli);
<ide> }
<ide>
<ide> /**
<ide> public function testLoadWithBootstrap()
<ide> $this->assertContains($expected, $bootstrap->read());
<ide> }
<ide>
<add> /**
<add> * Tests that loading with bootstrap_cli works.
<add> *
<add> * @return void
<add> */
<add> public function testLoadBootstrapCli()
<add> {
<add> $this->Task->params = [
<add> 'bootstrap' => false,
<add> 'routes' => false,
<add> 'autoload' => false,
<add> 'cli' => true
<add> ];
<add>
<add> $action = $this->Task->main('CliPlugin');
<add>
<add> $this->assertTrue($action);
<add>
<add> $expected = "Plugin::load('CliPlugin');";
<add> $bootstrap = new File($this->bootstrapCli, false);
<add> $this->assertContains($expected, $bootstrap->read());
<add> }
<add>
<ide> /**
<ide> * testLoadWithRoutes
<ide> * | 1 |
Text | Text | update res.json definition | 192d42bcacf20e8b501b9c3e7d325c852f6796ee | <ide><path>docs/api-routes/response-helpers.md
<ide> export default function handler(req, res) {
<ide> The included helpers are:
<ide>
<ide> - `res.status(code)` - A function to set the status code. `code` must be a valid [HTTP status code](https://en.wikipedia.org/wiki/List_of_HTTP_status_codes)
<del>- `res.json(json)` - Sends a JSON response. `json` must be a valid JSON object
<add>- `res.json(body)` - Sends a JSON response. `body` must be a [serialiazable object](https://developer.mozilla.org/en-US/docs/Glossary/Serialization)
<ide> - `res.send(body)` - Sends the HTTP response. `body` can be a `string`, an `object` or a `Buffer`
<ide> - `res.redirect([status,] path)` - Redirects to a specified path or URL. `status` must be a valid [HTTP status code](https://en.wikipedia.org/wiki/List_of_HTTP_status_codes). If not specified, `status` defaults to "307" "Temporary redirect".
<ide> | 1 |
Text | Text | update install instructions | c2e36510ead564e796ef75a7fc590c2470663573 | <ide><path>guide/english/apache/index.md
<ide> Apache runs on 67% of all webservers in the world. It is fast, reliable, and sec
<ide>
<ide> ### Installation
<ide>
<del>#### On Ubuntu
<add>#### On Ubuntu/Debian
<ide> ```
<del>sudo apt install apache2
<add>sudo aptitude install apache2
<ide> ```
<del>#### On Centos
<add>#### On CentOS
<ide> ```
<ide> sudo yum install httpd
<ide> ```` | 1 |
Python | Python | use 201 status code for post requests. | 94a09149b62496b5434a690de84b5972a5d5b554 | <ide><path>rest_framework/schemas/openapi.py
<ide> def _get_responses(self, path, method):
<ide> response_schema = paginator.get_paginated_response_schema(response_schema)
<ide> else:
<ide> response_schema = item_schema
<del>
<add> status_code = '201' if method == 'POST' else '200'
<ide> return {
<del> '200': {
<add> status_code: {
<ide> 'content': {
<ide> ct: {'schema': response_schema}
<ide> for ct in self.response_media_types
<ide><path>tests/schemas/test_openapi.py
<ide> class View(generics.GenericAPIView):
<ide> inspector.view = view
<ide>
<ide> responses = inspector._get_responses(path, method)
<del> assert responses['200']['content']['application/json']['schema']['required'] == ['text']
<del> assert list(responses['200']['content']['application/json']['schema']['properties'].keys()) == ['text']
<del> assert 'description' in responses['200']
<add> assert '201' in responses
<add> assert responses['201']['content']['application/json']['schema']['required'] == ['text']
<add> assert list(responses['201']['content']['application/json']['schema']['properties'].keys()) == ['text']
<add> assert 'description' in responses['201']
<ide>
<ide> def test_response_body_nested_serializer(self):
<ide> path = '/'
<ide> class View(generics.GenericAPIView):
<ide> inspector.view = view
<ide>
<ide> responses = inspector._get_responses(path, method)
<del> schema = responses['200']['content']['application/json']['schema']
<add> schema = responses['201']['content']['application/json']['schema']
<ide> assert sorted(schema['required']) == ['nested', 'text']
<ide> assert sorted(list(schema['properties'].keys())) == ['nested', 'text']
<ide> assert schema['properties']['nested']['type'] == 'object' | 2 |
Javascript | Javascript | add ended getter middleware | c74c27d99b8d9ab4f267edb8160bd935696ecb83 | <ide><path>src/js/tech/middleware.js
<ide> export const allowedGetters = {
<ide> played: 1,
<ide> paused: 1,
<ide> seekable: 1,
<del> volume: 1
<add> volume: 1,
<add> ended: 1
<ide> };
<ide>
<ide> /** | 1 |
Ruby | Ruby | fix new rubocop failures | ad1acdc1adfe5cc58590069fa604f884e729d89e | <ide><path>Library/Homebrew/requirement.rb
<ide> class Requirement
<ide> attr_reader :tags, :name, :cask, :download
<ide>
<ide> def initialize(tags = [])
<del> @cask ||= self.class.cask
<del> @download ||= self.class.download
<add> @cask = self.class.cask
<add> @download = self.class.download
<ide> tags.each do |tag|
<ide> next unless tag.is_a? Hash
<ide> | 1 |
Javascript | Javascript | simplify .wrapall() and fix style. close gh-1116 | 9434060722b7b935f61f8fb6c97a2a424255dc5d | <ide><path>src/manipulation.js
<ide> jQuery.fn.extend({
<ide> return jQuery.access( this, function( value ) {
<ide> return value === undefined ?
<ide> jQuery.text( this ) :
<del> this.empty().append( ( this[0] && this[0].ownerDocument || document ).createTextNode( value ) );
<add> this.empty().append( ( this[ 0 ] && this[ 0 ].ownerDocument || document ).createTextNode( value ) );
<ide> }, null, value, arguments.length );
<ide> },
<ide>
<ide> wrapAll: function( html ) {
<add> var wrap;
<add>
<ide> if ( jQuery.isFunction( html ) ) {
<del> return this.each(function(i) {
<del> jQuery(this).wrapAll( html.call(this, i) );
<add> return this.each(function( i ) {
<add> jQuery( this ).wrapAll( html.call(this, i) );
<ide> });
<ide> }
<ide>
<del> if ( this[0] ) {
<add> if ( this[ 0 ] ) {
<add>
<ide> // The elements to wrap the target around
<del> var wrap = jQuery( html, this[0].ownerDocument ).eq(0).clone(true);
<add> wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true );
<ide>
<del> if ( this[0].parentNode ) {
<del> wrap.insertBefore( this[0] );
<add> if ( this[ 0 ].parentNode ) {
<add> wrap.insertBefore( this[ 0 ] );
<ide> }
<ide>
<ide> wrap.map(function() {
<ide> var elem = this;
<ide>
<del> while ( elem.firstChild && elem.firstChild.nodeType === 1 ) {
<del> elem = elem.firstChild;
<add> while ( elem.firstElementChild ) {
<add> elem = elem.firstElementChild;
<ide> }
<ide>
<ide> return elem;
<ide> jQuery.fn.extend({
<ide>
<ide> html: function( value ) {
<ide> return jQuery.access( this, function( value ) {
<del> var elem = this[0] || {},
<add> var elem = this[ 0 ] || {},
<ide> i = 0,
<ide> l = this.length;
<ide> | 1 |
Go | Go | update the ui for docker build | 90ffcda05547332020ec6f2b98179380f7d0e56f | <ide><path>commands.go
<ide> func (cli *DockerCli) CmdInsert(args ...string) error {
<ide> }
<ide>
<ide> func (cli *DockerCli) CmdBuild(args ...string) error {
<del> cmd := Subcmd("build", "[OPTIONS]", "Build an image from a Dockerfile")
<del> fileName := cmd.String("f", "Dockerfile", "Use file as Dockerfile. Can be '-' for stdin")
<del> contextPath := cmd.String("c", "", "Use the specified directory as context for the build")
<add> cmd := Subcmd("build", "[OPTIONS] [CONTEXT]", "Build an image from a Dockerfile")
<add> fileName := cmd.String("f", "Dockerfile", "Use `file` as Dockerfile. Can be '-' for stdin")
<ide> if err := cmd.Parse(args); err != nil {
<ide> return nil
<ide> }
<ide> func (cli *DockerCli) CmdBuild(args ...string) error {
<ide> }
<ide> multipartBody = io.MultiReader(multipartBody, file)
<ide>
<add> compression := Bzip2
<add>
<ide> // Create a FormFile multipart for the context if needed
<del> if *contextPath != "" {
<add> if cmd.Arg(0) != "" {
<ide> // FIXME: Use NewTempArchive in order to have the size and avoid too much memory usage?
<del> context, err := Tar(*contextPath, Bzip2)
<add> context, err := Tar(cmd.Arg(0), compression)
<ide> if err != nil {
<ide> return err
<ide> }
<del> if _, err := w.CreateFormFile("Context", *contextPath+".tar.bz2"); err != nil {
<add> if _, err := w.CreateFormFile("Context", cmd.Arg(0)+"."+compression.Extension()); err != nil {
<ide> return err
<ide> }
<ide> multipartBody = io.MultiReader(multipartBody, utils.ProgressReader(ioutil.NopCloser(context), -1, os.Stdout, "Uploading Context %v/%v (%v)"))
<ide> func (cli *DockerCli) CmdBuild(args ...string) error {
<ide> return err
<ide> }
<ide> req.Header.Set("Content-Type", w.FormDataContentType())
<add> if cmd.Arg(0) != "" {
<add> req.Header.Set("X-Docker-Context-Compression", compression.Flag())
<add> }
<ide>
<ide> resp, err := http.DefaultClient.Do(req)
<ide> if err != nil { | 1 |
Javascript | Javascript | add plyexporter to the pages list | 71919368f27580a386f2322d8856432bbbf12fe6 | <ide><path>docs/list.js
<ide> var list = {
<ide> },
<ide>
<ide> "Exporters": {
<del> "GLTFExporter": "examples/exporters/GLTFExporter"
<add> "GLTFExporter": "examples/exporters/GLTFExporter",
<add> "PLYExporter": "examples/exporters/PLYExporter"
<ide> },
<ide>
<ide> "Plugins": { | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.