content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Ruby | Ruby | fix host for activestorage diskservice | b07ce56a383e0ab1514e149b7c9ad2177ea59514 | <ide><path>activestorage/lib/active_storage/service/disk_service.rb
<ide> def url(key, expires_in:, filename:, disposition:, content_type:)
<ide> purpose: :blob_key }
<ide> )
<ide>
<add> current_uri = URI.parse(current_host)
<add>
<ide> generated_url = url_helpers.rails_disk_service_url(verified_key_with_expiration,
<del> host: current_host,
<add> protocol: current_uri.scheme,
<add> host: current_uri.host,
<add> port: current_uri.port,
<ide> disposition: content_disposition,
<ide> content_type: content_type,
<ide> filename: filename
<ide><path>activestorage/test/service/disk_service_test.rb
<ide> class ActiveStorage::Service::DiskServiceTest < ActiveSupport::TestCase
<ide> include ActiveStorage::Service::SharedServiceTests
<ide>
<ide> test "URL generation" do
<del> assert_match(/^https:\/\/example.com\/rails\/active_storage\/disk\/.*\/avatar\.png\?content_type=image%2Fpng&disposition=inline/,
<del> @service.url(@key, expires_in: 5.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("avatar.png"), content_type: "image/png"))
<add> original_url_options = Rails.application.routes.default_url_options.dup
<add> Rails.application.routes.default_url_options.merge!(protocol: "http", host: "test.example.com", port: 3001)
<add> begin
<add> assert_match(/^https:\/\/example.com\/rails\/active_storage\/disk\/.*\/avatar\.png\?content_type=image%2Fpng&disposition=inline/,
<add> @service.url(@key, expires_in: 5.minutes, disposition: :inline, filename: ActiveStorage::Filename.new("avatar.png"), content_type: "image/png"))
<add> ensure
<add> Rails.application.routes.default_url_options = original_url_options
<add> end
<ide> end
<ide>
<ide> test "headers_for_direct_upload generation" do | 2 |
Javascript | Javascript | expand coverage for crypto | 3d645338a005c4318c72a050b2066d4bca313d53 | <ide><path>test/parallel/test-crypto-hash.js
<ide> common.expectsError(
<ide> message: 'The "algorithm" argument must be of type string'
<ide> }
<ide> );
<add>
<add>{
<add> const Hash = crypto.Hash;
<add> const instance = crypto.Hash('sha256');
<add> assert(instance instanceof Hash, 'Hash is expected to return a new instance' +
<add> ' when called without `new`');
<add>}
<ide><path>test/parallel/test-crypto-hmac.js
<ide> if (!common.hasCrypto)
<ide> const assert = require('assert');
<ide> const crypto = require('crypto');
<ide>
<add>{
<add> const Hmac = crypto.Hmac;
<add> const instance = crypto.Hmac('sha256', 'Node');
<add> assert(instance instanceof Hmac, 'Hmac is expected to return a new instance' +
<add> ' when called without `new`');
<add>}
<add>
<add>common.expectsError(
<add> () => crypto.createHmac(null),
<add> {
<add> code: 'ERR_INVALID_ARG_TYPE',
<add> type: TypeError,
<add> message: 'The "hmac" argument must be of type string'
<add> });
<add>
<add>common.expectsError(
<add> () => crypto.createHmac('sha1', null),
<add> {
<add> code: 'ERR_INVALID_ARG_TYPE',
<add> type: TypeError,
<add> message: 'The "key" argument must be one of type string, TypedArray, or ' +
<add> 'DataView'
<add> });
<add>
<ide> {
<ide> // Test HMAC
<ide> const actual = crypto.createHmac('sha1', 'Node') | 2 |
Python | Python | replace lambdalr scheduler wrappers by function | 022525b0031bcdbbb62d1223f75919983f2ac426 | <ide><path>transformers/__init__.py
<ide> from .modeling_encoder_decoder import PreTrainedEncoderDecoder, Model2Model
<ide>
<ide> # Optimization
<del> from .optimization import (AdamW, ConstantLRSchedule, WarmupConstantSchedule, WarmupCosineSchedule,
<del> WarmupCosineWithHardRestartsSchedule, WarmupLinearSchedule)
<add> from .optimization import (AdamW, get_constant_schedule, get_constant_schedule_with_warmup, get_cosine_schedule_with_warmup,
<add> get_cosine_with_hard_restarts_schedule_with_warmup, get_linear_schedule_with_warmup)
<ide>
<ide>
<ide> # TensorFlow
<ide><path>transformers/optimization.py
<ide>
<ide> logger = logging.getLogger(__name__)
<ide>
<del>class ConstantLRSchedule(LambdaLR):
<del> """ Constant learning rate schedule.
<add>
<add>def get_constant_schedule(optimizer, last_epoch=-1):
<add> """ Create a schedule with a constant learning rate.
<ide> """
<del> def __init__(self, optimizer, last_epoch=-1):
<del> super(ConstantLRSchedule, self).__init__(optimizer, lambda _: 1.0, last_epoch=last_epoch)
<add> return LambdaLR(optimizer, lambda _: 1, last_epoch=last_epoch)
<ide>
<ide>
<del>class WarmupConstantSchedule(LambdaLR):
<del> """ Linear warmup and then constant.
<del> Multiplies the learning rate defined in the optimizer by a dynamic variable determined by the current step.
<del> Linearly increases the multiplicative variable from 0. to 1. over `warmup_steps` training steps.
<del> Keeps multiplicative variable equal to 1. after warmup_steps.
<add>def get_constant_schedule_with_warmup(optimizer, num_warmup_steps, last_epoch=-1):
<add> """ Create a schedule with a constant learning rate preceded by a warmup
<add> period during which the learning rate increases linearly between 0 and 1.
<ide> """
<del> def __init__(self, optimizer, warmup_steps, last_epoch=-1):
<del> self.warmup_steps = warmup_steps
<del> super(WarmupConstantSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch)
<del>
<del> def lr_lambda(self, step):
<del> if step < self.warmup_steps:
<del> return float(step) / float(max(1.0, self.warmup_steps))
<add> def lr_lambda(current_step):
<add> if current_step < num_warmup_steps:
<add> return float(current_step) / float(max(1.0, num_warmup_steps))
<ide> return 1.
<ide>
<add> return LambdaLR(optimizer, lr_lambda, last_epoch=last_epoch)
<ide>
<del>class WarmupLinearSchedule(LambdaLR):
<del> """ Linear warmup and then linear decay.
<del> Multiplies the learning rate defined in the optimizer by a dynamic variable determined by the current step.
<del> Linearly increases the multiplicative variable from 0. to 1. over `warmup_steps` training steps.
<del> Linearly decreases the multiplicative variable from 1. to 0. over remaining `t_total - warmup_steps` steps.
<del> """
<del> def __init__(self, optimizer, warmup_steps, t_total, last_epoch=-1):
<del> self.warmup_steps = warmup_steps
<del> self.t_total = t_total
<del> super(WarmupLinearSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch)
<del>
<del> def lr_lambda(self, step):
<del> if step < self.warmup_steps:
<del> return float(step) / float(max(1, self.warmup_steps))
<del> return max(0.0, float(self.t_total - step) / float(max(1.0, self.t_total - self.warmup_steps)))
<del>
<del>
<del>class WarmupCosineSchedule(LambdaLR):
<del> """ Linear warmup and then cosine decay.
<del> Multiplies the learning rate defined in the optimizer by a dynamic variable determined by the current step.
<del> Linearly increases the multiplicative variable from 0. to 1. over `warmup_steps` training steps.
<del> Decreases the multiplicative variable from 1. to 0. over remaining `t_total - warmup_steps` steps following a cosine curve.
<del> If `cycles` (default=0.5) is different from default, then the multiplicative variable follows cosine function after warmup.
<add>
<add>def get_linear_schedule_with_warmup(optimizer, num_warmup_steps, num_training_steps, last_epoch=-1):
<add> """ Create a schedule with a learning rate that decreases linearly after
<add> linearly increasing during a warmup period.
<ide> """
<del> def __init__(self, optimizer, warmup_steps, t_total, cycles=.5, last_epoch=-1):
<del> self.warmup_steps = warmup_steps
<del> self.t_total = t_total
<del> self.cycles = cycles
<del> super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch)
<del>
<del> def lr_lambda(self, step):
<del> if step < self.warmup_steps:
<del> return float(step) / float(max(1.0, self.warmup_steps))
<del> # progress after warmup
<del> progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
<del> return max(0.0, 0.5 * (1. + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
<del>
<del>
<del>class WarmupCosineWithHardRestartsSchedule(LambdaLR):
<del> """ Linear warmup and then cosine cycles with hard restarts.
<del> Multiplies the learning rate defined in the optimizer by a dynamic variable determined by the current step.
<del> Linearly increases the multiplicative variable from 0. to 1. over `warmup_steps` training steps.
<del> If `cycles` (default=1.) is different from default, learning rate follows `cycles` times a cosine decaying
<del> learning rate (with hard restarts).
<add> def lr_lambda(current_step):
<add> if current_step < num_warmup_steps:
<add> return float(current_step) / float(max(1, num_warmup_steps))
<add> return max(0.0, float(num_training_steps - current_step) / float(max(1, num_training_steps - num_warmup_steps)))
<add>
<add> return LambdaLR(optimizer, lr_lambda, last_epoch)
<add>
<add>
<add>def get_cosine_schedule_with_warmup(optimizer, num_warmup_steps, num_training_steps, num_cycles=.5, last_epoch=-1):
<add> """ Create a schedule with a learning rate that decreases following the
<add> values of the cosine function between 0 and `pi * cycles` after a warmup
<add> period during which it increases linearly between 0 and 1.
<ide> """
<del> def __init__(self, optimizer, warmup_steps, t_total, cycles=1., last_epoch=-1):
<del> self.warmup_steps = warmup_steps
<del> self.t_total = t_total
<del> self.cycles = cycles
<del> super(WarmupCosineWithHardRestartsSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch)
<del>
<del> def lr_lambda(self, step):
<del> if step < self.warmup_steps:
<del> return float(step) / float(max(1, self.warmup_steps))
<del> # progress after warmup
<del> progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
<del> if progress >= 1.0:
<del> return 0.0
<del> return max(0.0, 0.5 * (1. + math.cos(math.pi * ((float(self.cycles) * progress) % 1.0))))
<add> def lr_lambda(current_step):
<add> if current_step < num_warmup_steps:
<add> return float(current_step) / float(max(1, num_warmup_steps))
<add> progress = float(current_step - num_warmup_steps) / float(max(1, num_training_steps - num_warmup_steps))
<add> return max(0., 0.5 * (1. + math.cos(math.pi * float(num_cycles) * 2. * progress)))
<add>
<add> return LambdaLR(optimizer, lr_lambda, last_epoch)
<ide>
<ide>
<add>def get_cosine_with_hard_restarts_schedule_with_warmup(optimizer, num_warmup_steps, num_training_steps, num_cycles=1., last_epoch=-1):
<add> """ Create a schedule with a learning rate that decreases following the
<add> values of the cosine function with several hard restarts, after a warmup
<add> period during which it increases linearly between 0 and 1.
<add> """
<add> def lr_lambda(current_step):
<add> if current_step < num_warmup_steps:
<add> return float(current_step) / float(max(1, num_warmup_steps))
<add> progress = float(current_step - num_warmup_steps) / float(max(1, num_training_steps - num_warmup_steps))
<add> if progress >= 1.:
<add> return 0.
<add> return max(0., 0.5 * (1. + math.cos(math.pi * ((float(num_cycles) * progress) % 1.))))
<add>
<add> return LambdaLR(optimizer, lr_lambda, last_epoch)
<add>
<ide>
<ide> class AdamW(Optimizer):
<ide> """ Implements Adam algorithm with weight decay fix.
<ide><path>transformers/tests/optimization_test.py
<ide> if is_torch_available():
<ide> import torch
<ide>
<del> from transformers import (AdamW, ConstantLRSchedule, WarmupConstantSchedule,
<del> WarmupCosineSchedule, WarmupCosineWithHardRestartsSchedule, WarmupLinearSchedule)
<add> from transformers import (AdamW,
<add> get_constant_schedule,
<add> get_constant_schedule_with_warmup,
<add> get_cosine_schedule_with_warmup,
<add> get_cosine_with_hard_restarts_schedule_with_warmup,
<add> get_linear_schedule_with_warmup)
<ide> else:
<ide> pytestmark = pytest.mark.skip("Require Torch")
<ide>
<ide> def assertListAlmostEqual(self, list1, list2, tol):
<ide> self.assertAlmostEqual(a, b, delta=tol)
<ide>
<ide> def test_constant_scheduler(self):
<del> scheduler = ConstantLRSchedule(self.optimizer)
<add> scheduler = get_constant_schedule(self.optimizer)
<ide> lrs = unwrap_schedule(scheduler, self.num_steps)
<ide> expected_learning_rates = [10.] * self.num_steps
<ide> self.assertEqual(len(lrs[0]), 1)
<ide> self.assertListEqual([l[0] for l in lrs], expected_learning_rates)
<ide>
<del> scheduler = ConstantLRSchedule(self.optimizer)
<add> scheduler = get_constant_schedule(self.optimizer)
<ide> lrs_2 = unwrap_and_save_reload_schedule(scheduler, self.num_steps)
<ide> self.assertListEqual([l[0] for l in lrs], [l[0] for l in lrs_2])
<ide>
<ide> def test_warmup_constant_scheduler(self):
<del> scheduler = WarmupConstantSchedule(self.optimizer, warmup_steps=4)
<add> scheduler = get_constant_schedule_with_warmup(self.optimizer, num_warmup_steps=4)
<ide> lrs = unwrap_schedule(scheduler, self.num_steps)
<ide> expected_learning_rates = [2.5, 5.0, 7.5, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0]
<ide> self.assertEqual(len(lrs[0]), 1)
<ide> self.assertListEqual([l[0] for l in lrs], expected_learning_rates)
<ide>
<del> scheduler = WarmupConstantSchedule(self.optimizer, warmup_steps=4)
<add> scheduler = get_constant_schedule_with_warmup(self.optimizer, num_warmup_steps=4)
<ide> lrs_2 = unwrap_and_save_reload_schedule(scheduler, self.num_steps)
<ide> self.assertListEqual([l[0] for l in lrs], [l[0] for l in lrs_2])
<ide>
<ide> def test_warmup_linear_scheduler(self):
<del> scheduler = WarmupLinearSchedule(self.optimizer, warmup_steps=2, t_total=10)
<add> scheduler = get_linear_schedule_with_warmup(self.optimizer, num_warmup_steps=2, num_training_steps=10)
<ide> lrs = unwrap_schedule(scheduler, self.num_steps)
<ide> expected_learning_rates = [5.0, 10.0, 8.75, 7.5, 6.25, 5.0, 3.75, 2.5, 1.25, 0.0]
<ide> self.assertEqual(len(lrs[0]), 1)
<ide> self.assertListEqual([l[0] for l in lrs], expected_learning_rates)
<ide>
<del> scheduler = WarmupLinearSchedule(self.optimizer, warmup_steps=2, t_total=10)
<add> scheduler = get_linear_schedule_with_warmup(self.optimizer, num_warmup_steps=2, num_training_steps=10)
<ide> lrs_2 = unwrap_and_save_reload_schedule(scheduler, self.num_steps)
<ide> self.assertListEqual([l[0] for l in lrs], [l[0] for l in lrs_2])
<ide>
<ide> def test_warmup_cosine_scheduler(self):
<del> scheduler = WarmupCosineSchedule(self.optimizer, warmup_steps=2, t_total=10)
<add> scheduler = get_cosine_schedule_with_warmup(self.optimizer, num_warmup_steps=2, num_training_steps=10)
<ide> lrs = unwrap_schedule(scheduler, self.num_steps)
<ide> expected_learning_rates = [5.0, 10.0, 9.61, 8.53, 6.91, 5.0, 3.08, 1.46, 0.38, 0.0]
<ide> self.assertEqual(len(lrs[0]), 1)
<ide> self.assertListAlmostEqual([l[0] for l in lrs], expected_learning_rates, tol=1e-2)
<ide>
<del> scheduler = WarmupCosineSchedule(self.optimizer, warmup_steps=2, t_total=10)
<add> scheduler = get_cosine_schedule_with_warmup(self.optimizer, num_warmup_steps=2, num_training_steps=10)
<ide> lrs_2 = unwrap_and_save_reload_schedule(scheduler, self.num_steps)
<ide> self.assertListEqual([l[0] for l in lrs], [l[0] for l in lrs_2])
<ide>
<ide> def test_warmup_cosine_hard_restart_scheduler(self):
<del> scheduler = WarmupCosineWithHardRestartsSchedule(self.optimizer, warmup_steps=2, cycles=2, t_total=10)
<add> scheduler = get_cosine_with_hard_restarts_schedule_with_warmup(self.optimizer, num_warmup_steps=2, num_cycles=2, num_training_steps=10)
<ide> lrs = unwrap_schedule(scheduler, self.num_steps)
<ide> expected_learning_rates = [5.0, 10.0, 8.53, 5.0, 1.46, 10.0, 8.53, 5.0, 1.46, 0.0]
<ide> self.assertEqual(len(lrs[0]), 1)
<ide> self.assertListAlmostEqual([l[0] for l in lrs], expected_learning_rates, tol=1e-2)
<ide>
<del> scheduler = WarmupCosineWithHardRestartsSchedule(self.optimizer, warmup_steps=2, cycles=2, t_total=10)
<add> scheduler = get_cosine_with_hard_restarts_schedule_with_warmup(self.optimizer, num_warmup_steps=2, num_cycles=2, num_training_steps=10)
<ide> lrs_2 = unwrap_and_save_reload_schedule(scheduler, self.num_steps)
<ide> self.assertListEqual([l[0] for l in lrs], [l[0] for l in lrs_2])
<ide>
<add>
<ide> if __name__ == "__main__":
<ide> unittest.main() | 3 |
Text | Text | fix links to ner examples | 3f42eb979f7bd20448ff6b15ab316d63f5489a6f | <ide><path>docs/source/examples.md
<ide> pip install -r ./examples/requirements.txt
<ide> | [GLUE](#glue) | Examples running BERT/XLM/XLNet/RoBERTa on the 9 GLUE tasks. Examples feature distributed training as well as half-precision. |
<ide> | [SQuAD](#squad) | Using BERT/RoBERTa/XLNet/XLM for question answering, examples with distributed training. |
<ide> | [Multiple Choice](#multiple-choice) | Examples running BERT/XLNet/RoBERTa on the SWAG/RACE/ARC tasks. |
<del>| [Named Entity Recognition](https://github.com/huggingface/transformers/tree/master/examples/ner) | Using BERT for Named Entity Recognition (NER) on the CoNLL 2003 dataset, examples with distributed training. |
<add>| [Named Entity Recognition](https://github.com/huggingface/transformers/tree/master/examples/token-classification) | Using BERT for Named Entity Recognition (NER) on the CoNLL 2003 dataset, examples with distributed training. |
<ide> | [XNLI](#xnli) | Examples running BERT/XLM on the XNLI benchmark. |
<ide> | [Adversarial evaluation of model performances](#adversarial-evaluation-of-model-performances) | Testing a model with adversarial evaluation of natural language inference on the Heuristic Analysis for NLI Systems (HANS) dataset (McCoy et al., 2019.) |
<ide>
<ide><path>examples/token-classification/README.md
<ide> ## Named Entity Recognition
<ide>
<del>Based on the scripts [`run_ner.py`](https://github.com/huggingface/transformers/blob/master/examples/ner/run_ner.py) for Pytorch and
<del>[`run_tf_ner.py`](https://github.com/huggingface/transformers/blob/master/examples/ner/run_tf_ner.py) for Tensorflow 2.
<add>Based on the scripts [`run_ner.py`](https://github.com/huggingface/transformers/blob/master/examples/token-classification/run_ner.py) for Pytorch and
<add>[`run_tf_ner.py`](https://github.com/huggingface/transformers/blob/master/examples/token-classification/run_tf_ner.py) for Tensorflow 2.
<ide> This example fine-tune Bert Multilingual on GermEval 2014 (German NER).
<ide> Details and results for the fine-tuning provided by @stefan-it.
<ide> | 2 |
Python | Python | fix template for inputs docstrings | 790f1c9545f4a83b97bf75640be82b2112c3efe7 | <ide><path>src/transformers/models/big_bird/modeling_big_bird.py
<ide> def _init_weights(self, module):
<ide>
<ide> BIG_BIRD_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.BigBirdTokenizer`. See
<ide> :func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
<ide> details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> - 0 corresponds to a `sentence A` token,
<ide> - 1 corresponds to a `sentence B` token.
<ide>
<ide> `What are token type IDs? <../glossary.html#token-type-ids>`_
<del> position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
<ide> config.max_position_embeddings - 1]``.
<ide>
<ide> def _init_weights(self, module):
<ide> - 1 indicates the head is **not masked**,
<ide> - 0 indicates the head is **masked**.
<ide>
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide> def set_attention_type(self, value: str):
<ide> self.attention_type = value
<ide> self.encoder.set_attention_type(value)
<ide>
<del> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def get_output_embeddings(self):
<ide> def set_output_embeddings(self, new_embeddings):
<ide> self.cls.predictions.decoder = new_embeddings
<ide>
<del> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config, add_pooling_layer=False):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(BIG_BIRD_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint="google/bigbird-base-trivia-itc",
<ide><path>src/transformers/models/canine/modeling_canine.py
<ide> def _init_weights(self, module):
<ide>
<ide> CANINE_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.CanineTokenizer`. See
<ide> :func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
<ide> details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> - 0 corresponds to a `sentence A` token,
<ide> - 1 corresponds to a `sentence B` token.
<ide>
<ide> `What are token type IDs? <../glossary.html#token-type-ids>`_
<del> position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
<ide> config.max_position_embeddings - 1]``.
<ide>
<ide> def _init_weights(self, module):
<ide> - 1 indicates the head is **not masked**,
<ide> - 0 indicates the head is **masked**.
<ide>
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide> def _repeat_molecules(self, molecules: torch.Tensor, char_seq_length: torch.Tens
<ide> # `repeated`: [batch_size, char_seq_len, molecule_hidden_size]
<ide> return torch.cat([repeated, remainder_repeated], dim=-2)
<ide>
<del> @add_start_docstrings_to_model_forward(CANINE_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(CANINE_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(CANINE_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(CANINE_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(CANINE_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(CANINE_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide><path>src/transformers/models/convbert/modeling_convbert.py
<ide> def forward(self, hidden_states):
<ide>
<ide> CONVBERT_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.ConvBertTokenizer`. See
<ide> :func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
<ide> details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> def forward(self, hidden_states):
<ide> - 1 corresponds to a `sentence B` token.
<ide>
<ide> `What are token type IDs? <../glossary.html#token-type-ids>`_
<del> position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
<ide> config.max_position_embeddings - 1]``.
<ide>
<ide> def forward(self, hidden_states):
<ide> - 1 indicates the head is **not masked**,
<ide> - 0 indicates the head is **masked**.
<ide>
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(CONVBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide><path>src/transformers/models/deberta/modeling_deberta.py
<ide> def _init_weights(self, module):
<ide>
<ide> DEBERTA_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.DebertaTokenizer`. See
<ide> :func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
<ide> details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> - 0 corresponds to a `sentence A` token,
<ide> - 1 corresponds to a `sentence B` token.
<ide>
<ide> `What are token type IDs? <../glossary.html#token-type-ids>`_
<del> position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
<ide> config.max_position_embeddings - 1]``.
<ide>
<ide> `What are position IDs? <../glossary.html#position-ids>`_
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide><path>src/transformers/models/deberta_v2/modeling_deberta_v2.py
<ide> def _init_weights(self, module):
<ide>
<ide> DEBERTA_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.DebertaV2Tokenizer`. See
<ide> :func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
<ide> details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> - 0 corresponds to a `sentence A` token,
<ide> - 1 corresponds to a `sentence B` token.
<ide>
<ide> `What are token type IDs? <../glossary.html#token-type-ids>`_
<del> position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
<ide> config.max_position_embeddings - 1]``.
<ide>
<ide> `What are position IDs? <../glossary.html#position-ids>`_
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide><path>src/transformers/models/deit/modeling_deit.py
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(DEIT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(DEIT_INPUTS_DOCSTRING)
<ide> @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(DEIT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<add> @add_start_docstrings_to_model_forward(DEIT_INPUTS_DOCSTRING)
<ide> @replace_return_docstrings(output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(DEIT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<add> @add_start_docstrings_to_model_forward(DEIT_INPUTS_DOCSTRING)
<ide> @replace_return_docstrings(output_type=DeiTForImageClassificationWithTeacherOutput, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<ide><path>src/transformers/models/ibert/modeling_ibert.py
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(IBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(IBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide><path>src/transformers/models/mpnet/modeling_mpnet.py
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(MPNET_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(MPNET_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(MPNET_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(MPNET_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide><path>src/transformers/models/rembert/modeling_rembert.py
<ide> def _init_weights(self, module):
<ide>
<ide> REMBERT_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.RemBertTokenizer`. See
<ide> :func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
<ide> details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> - 0 corresponds to a `sentence A` token,
<ide> - 1 corresponds to a `sentence B` token.
<ide>
<ide> `What are token type IDs? <../glossary.html#token-type-ids>`_
<del> position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
<ide> config.max_position_embeddings - 1]``.
<ide>
<ide> def _init_weights(self, module):
<ide> - 1 indicates the head is **not masked**,
<ide> - 0 indicates the head is **masked**.
<ide>
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint="rembert",
<ide> def get_output_embeddings(self):
<ide> def set_output_embeddings(self, new_embeddings):
<ide> self.cls.predictions.decoder = new_embeddings
<ide>
<del> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint="rembert",
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint="rembert",
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(REMBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint="rembert",
<ide><path>src/transformers/models/roberta/modeling_roberta.py
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(ROBERTA_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(ROBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide><path>src/transformers/models/roformer/modeling_roformer.py
<ide> def _init_weights(self, module):
<ide>
<ide> ROFORMER_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.RoFormerTokenizer`. See
<ide> :func:`transformers.PreTrainedTokenizer.encode` and :func:`transformers.PreTrainedTokenizer.__call__` for
<ide> details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> def _init_weights(self, module):
<ide> - 1 indicates the head is **not masked**,
<ide> - 0 indicates the head is **masked**.
<ide>
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def get_output_embeddings(self):
<ide> def set_output_embeddings(self, new_embeddings):
<ide> self.cls.predictions.decoder = new_embeddings
<ide>
<del> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(ROFORMER_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide><path>src/transformers/models/squeezebert/modeling_squeezebert.py
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def get_output_embeddings(self):
<ide> def set_output_embeddings(self, new_embeddings):
<ide> self.cls.predictions.decoder = new_embeddings
<ide>
<del> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide> self.init_weights()
<ide>
<ide> @add_start_docstrings_to_model_forward(
<del> SQUEEZEBERT_INPUTS_DOCSTRING.format("(batch_size, num_choices, sequence_length)")
<add> SQUEEZEBERT_INPUTS_DOCSTRING.format("batch_size, num_choices, sequence_length")
<ide> )
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(SQUEEZEBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide><path>src/transformers/models/vit/modeling_vit.py
<ide> class PreTrainedModel
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward(VIT_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward(VIT_INPUTS_DOCSTRING)
<ide> @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward(VIT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<add> @add_start_docstrings_to_model_forward(VIT_INPUTS_DOCSTRING)
<ide> @replace_return_docstrings(output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<ide><path>src/transformers/models/xlnet/modeling_xlnet.py
<ide> class XLNetForQuestionAnsweringOutput(ModelOutput):
<ide>
<ide> XLNET_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.XLNetTokenizer`. See
<ide><path>templates/adding_a_new_model/cookiecutter-template-{{cookiecutter.modelname}}/modeling_{{cookiecutter.lowercase_modelname}}.py
<ide> def _init_weights(self, module):
<ide>
<ide> {{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING = r"""
<ide> Args:
<del> input_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`):
<add> input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
<ide> Indices of input sequence tokens in the vocabulary.
<ide>
<ide> Indices can be obtained using :class:`transformers.{{cookiecutter.camelcase_modelname}}Tokenizer`.
<ide> See :func:`transformers.PreTrainedTokenizer.encode` and
<ide> :func:`transformers.PreTrainedTokenizer.__call__` for details.
<ide>
<ide> `What are input IDs? <../glossary.html#input-ids>`__
<del> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`{0}`, `optional`):
<add> attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
<ide> Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
<ide>
<ide> - 1 for tokens that are **not masked**,
<ide> - 0 for tokens that are **masked**.
<ide>
<ide> `What are attention masks? <../glossary.html#attention-mask>`__
<del> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
<ide> 1]``:
<ide>
<ide> - 0 corresponds to a `sentence A` token,
<ide> - 1 corresponds to a `sentence B` token.
<ide>
<ide> `What are token type IDs? <../glossary.html#token-type-ids>`_
<del> position_ids (:obj:`torch.LongTensor` of shape :obj:`{0}`, `optional`):
<add> position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
<ide> Indices of positions of each input sequence tokens in the position embeddings.
<ide> Selected in the range ``[0, config.max_position_embeddings - 1]``.
<ide>
<ide> def _init_weights(self, module):
<ide> - 1 indicates the head is **not masked**,
<ide> - 0 indicates the head is **masked**.
<ide>
<del> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
<add> inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
<ide> Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
<ide> This is useful if you want more control over how to convert `input_ids` indices into associated vectors
<ide> than the model's internal embedding lookup matrix.
<ide> def _prune_heads(self, heads_to_prune):
<ide> for layer, heads in heads_to_prune.items():
<ide> self.encoder.layer[layer].attention.prune_heads(heads)
<ide>
<del> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def get_output_embeddings(self):
<ide> def set_output_embeddings(self, new_embeddings):
<ide> self.cls.predictions.decoder = new_embeddings
<ide>
<del> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC,
<ide> def __init__(self, config):
<ide>
<ide> self.init_weights()
<ide>
<del> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("(batch_size, sequence_length)"))
<add> @add_start_docstrings_to_model_forward({{cookiecutter.uppercase_modelname}}_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
<ide> @add_code_sample_docstrings(
<ide> tokenizer_class=_TOKENIZER_FOR_DOC,
<ide> checkpoint=_CHECKPOINT_FOR_DOC, | 15 |
Ruby | Ruby | add macos sierra. (#353) | 1c46db9a73e6d0fa56d0a83db2ed04a1d62837b4 | <ide><path>Library/Homebrew/os/mac/version.rb
<ide> module OS
<ide> module Mac
<ide> class Version < ::Version
<ide> SYMBOLS = {
<add> :sierra => "10.12",
<ide> :el_capitan => "10.11",
<ide> :yosemite => "10.10",
<ide> :mavericks => "10.9", | 1 |
Text | Text | rearrange reference section and code | 162574f0b6d9cfa4a54bf44a46afe8871c1f4108 | <ide><path>guide/english/r/data-types/index.md
<ide> title: Data Types in R
<ide> Scalar refers to an atomic quantity that can hold only one value at a time. Scalars are the most basic data types. Some common types of scalars :
<ide>
<ide> 1. Number
<del>
<add>```r
<ide> > x <- 5
<ide> > y <- 5.5
<ide> > class(x)
<ide> title: Data Types in R
<ide> [1] "numeric"
<ide> > class(x+y)
<ide> [1] "numeric"
<add>```
<ide>
<ide> 2. Logical value
<del>
<add>```r
<ide> > m <- x > y # Used to check, Is x larger than y?
<ide> > n <- x < y # Used to check, Is x smaller than y?
<ide> > m
<ide> title: Data Types in R
<ide> [1] "logical"
<ide> > class(NA) # NA is another logical value: 'Not Available'/Missing Values
<ide> [1] "logical"
<add>```
<ide>
<ide> 3. Character(string)
<del>
<add>```r
<ide> > a <- "1"; b <- "2.5"
<ide> > a;b
<ide> [1] "1"
<ide> title: Data Types in R
<ide> [1] "numeric"
<ide> > class(as.character(x))
<ide> [1] "character"
<del>
<add>```
<ide>
<ide> ## Vector
<ide> It is a sequence of data elements of the same basic type. For example:
<ide> title: Data Types in R
<ide>
<ide>
<ide> ## Reference:
<del><a href='https://cran.r-project.org/manuals.html' target='_blank' rel='nofollow'>Official Docs</a>
<del><a href='https://www.r-bloggers.com/classes-and-objects-in-r/' target='_blank' rel='nofollow'>Data Types in R by r-bloggers</a>
<ide>\ No newline at end of file
<add> * [Official Docs](https://cran.r-project.org/manuals.html)
<add> * [Data Types in R by r-bloggers](https://www.r-bloggers.com/classes-and-objects-in-r/) | 1 |
Text | Text | provide more detail to static folder placement | dc208054337127fa2e740d7630976e8d57fa9472 | <ide><path>docs/advanced-features/output-file-tracing.md
<ide> module.exports = {
<ide>
<ide> This will create a folder at `.next/standalone` which can then be deployed on it's own without installing `node_modules`.
<ide>
<del>Additionally, a minimal `server.js` file is also output which can be used instead of `next start`. This minimal server does not copy the `public` or `.next/static` folders by default as these should ideally be handled by a CDN instead, although these folders can be copied to the `standalone` folder manually and the `server.js` file will serve it automatically.
<add>Additionally, a minimal `server.js` file is also output which can be used instead of `next start`. This minimal server does not copy the `public` or `.next/static` folders by default as these should ideally be handled by a CDN instead, although these folders can be copied to the `standalone/public` and `standalone/.next/static` folders manually, after which `server.js` file will serve these automatically.
<ide>
<ide> ## Caveats
<ide> | 1 |
Python | Python | add benchmarks for 32-bit data type sorting | 61dec07ba52c0aafe1149e43bb22ffd0cb4b06c9 | <ide><path>benchmarks/benchmarks/bench_function_base.py
<ide> class Sort(Benchmark):
<ide> # In NumPy 1.17 and newer, 'merge' can be one of several
<ide> # stable sorts, it isn't necessarily merge sort.
<ide> ['quick', 'merge', 'heap'],
<del> ['float64', 'int64', 'int16'],
<add> ['float64', 'int64', 'float32', 'uint32', 'int32', 'int16'],
<ide> [
<ide> ('random',),
<ide> ('ordered',), | 1 |
Python | Python | test multi_dot with 2 arrays | c1ee3d86a23aced31e8f3b02ad93dd6509c28e59 | <ide><path>numpy/linalg/tests/test_linalg.py
<ide> def test_basic_function_with_three_arguments(self):
<ide> assert_almost_equal(multi_dot([A, B, C]), A.dot(B).dot(C))
<ide> assert_almost_equal(multi_dot([A, B, C]), np.dot(A, np.dot(B, C)))
<ide>
<add> def test_basic_function_with_two_arguments(self):
<add> # separate code path with two arguments
<add> A = np.random.random((6, 2))
<add> B = np.random.random((2, 6))
<add>
<add> assert_almost_equal(multi_dot([A, B]), A.dot(B))
<add> assert_almost_equal(multi_dot([A, B]), np.dot(A, B))
<add>
<ide> def test_basic_function_with_dynamic_programing_optimization(self):
<ide> # multi_dot with four or more arguments uses the dynamic programing
<ide> # optimization and therefore deserve a separate | 1 |
Ruby | Ruby | squash backtraces from bundler.setup | b73177a0954aed11543f2bb829cd07cc12d4f590 | <ide><path>railties/lib/rails/generators/rails/app/templates/config/boot.rb
<ide>
<ide> # Set up gems listed in the Gemfile.
<ide> gemfile = File.expand_path('../../Gemfile', __FILE__)
<del>if File.exist?(gemfile)
<add>begin
<ide> ENV['BUNDLE_GEMFILE'] = gemfile
<ide> require 'bundler'
<ide> Bundler.setup
<del>end
<ide>\ No newline at end of file
<add>rescue Bundler::GemNotFound => e
<add> STDERR.puts e.message
<add> STDERR.puts "Try running `bundle install`."
<add> exit!
<add>end if File.exist?(gemfile) | 1 |
PHP | PHP | unskip more tests and fix issues in filenames | c26df7838b8ae4c2d1755fa618eb4839542de823 | <ide><path>src/Console/Command/Task/ModelTask.php
<ide> public function bakeEntity($model, $data = []) {
<ide> if (!empty($this->params['no-entity'])) {
<ide> return;
<ide> }
<del> $name = $model->alias();
<add> $name = Inflector::singularize($model->alias());
<ide>
<ide> $ns = Configure::read('App.namespace');
<ide> $pluginPath = '';
<ide> public function bakeEntity($model, $data = []) {
<ide> }
<ide>
<ide> $data += [
<del> 'name' => Inflector::singularize($name),
<add> 'name' => $name,
<ide> 'namespace' => $ns,
<ide> 'plugin' => $this->plugin,
<ide> 'pluginPath' => $pluginPath,
<ide> public function bakeTable($model, $data = []) {
<ide> $out = $this->Template->generate('classes', 'table');
<ide>
<ide> $path = $this->getPath();
<del> $filename = $path . 'Table/' . $name . '.php';
<add> $filename = $path . 'Table/' . $name . 'Table.php';
<ide> $this->out("\n" . __d('cake_console', 'Baking table class for %s...', $name), 1, Shell::QUIET);
<ide> $this->createFile($filename, $out);
<ide> TableRegistry::clear();
<ide><path>tests/TestCase/Console/Command/Task/ModelTaskTest.php
<ide> public function testBakeEntityFields() {
<ide> $this->assertContains("protected \$_accessible = ['title', 'body', 'published']", $result);
<ide> }
<ide>
<add>/**
<add> * test bake() with a -plugin param
<add> *
<add> * @return void
<add> */
<add> public function testBakeTableWithPlugin() {
<add> $this->Task->plugin = 'ControllerTest';
<add>
<add> // fake plugin path
<add> Plugin::load('ControllerTest', array('path' => APP . 'Plugin/ControllerTest/'));
<add> $path = APP . 'Plugin/ControllerTest/Model/Table/BakeArticlesTable.php';
<add> $this->Task->expects($this->once())->method('createFile')
<add> ->with($path, $this->logicalAnd(
<add> $this->stringContains('namespace ControllerTest\\Model\\Table;'),
<add> $this->stringContains('use Cake\\ORM\\Table;'),
<add> $this->stringContains('class BakeArticlesTable extends Table {')
<add> ));
<ide>
<add> $model = TableRegistry::get('BakeArticles');
<add> $this->Task->bakeTable($model);
<add> }
<ide>
<ide> /**
<ide> * test bake() with a -plugin param
<ide> *
<ide> * @return void
<ide> */
<del> public function testBakeWithPlugin() {
<del> $this->markTestIncomplete('Not done here yet');
<add> public function testBakeEntityWithPlugin() {
<ide> $this->Task->plugin = 'ControllerTest';
<ide>
<del> //fake plugin path
<add> // fake plugin path
<ide> Plugin::load('ControllerTest', array('path' => APP . 'Plugin/ControllerTest/'));
<del> $path = APP . 'Plugin/ControllerTest/Model/BakeArticle.php';
<add> $path = APP . 'Plugin/ControllerTest/Model/Entity/BakeArticle.php';
<ide> $this->Task->expects($this->once())->method('createFile')
<del> ->with($path, $this->stringContains('BakeArticle extends ControllerTestAppModel'));
<del>
<del> $result = $this->Task->bake('BakeArticle', array(), array());
<del> $this->assertContains("App::uses('ControllerTestAppModel', 'ControllerTest.Model');", $result);
<add> ->with($path, $this->logicalAnd(
<add> $this->stringContains('namespace ControllerTest\\Model\\Entity;'),
<add> $this->stringContains('use Cake\\ORM\\Entity;'),
<add> $this->stringContains('class BakeArticle extends Entity {')
<add> ));
<ide>
<del> $this->assertEquals(count(ClassRegistry::keys()), 0);
<del> $this->assertEquals(count(ClassRegistry::mapKeys()), 0);
<add> $model = TableRegistry::get('BakeArticles');
<add> $this->Task->bakeEntity($model);
<ide> }
<ide>
<ide> /** | 2 |
Python | Python | update the dataset name for synthetic data. | d93d121167e986d5cc2b71939b2c9e64a9400996 | <ide><path>official/resnet/resnet_run_loop.py
<ide> def resnet_main(
<ide> 'synthetic_data': flags_obj.use_synthetic_data,
<ide> 'train_epochs': flags_obj.train_epochs,
<ide> }
<add> if flags_obj.use_synthetic_data:
<add> dataset_name = dataset_name + "-synthetic"
<ide>
<ide> benchmark_logger = logger.get_benchmark_logger()
<ide> benchmark_logger.log_run_info('resnet', dataset_name, run_params, | 1 |
Text | Text | fix return value for require.resolve.paths() | 553d4ee57db12ef9f1c039985024c2e4f79fd815 | <ide><path>doc/api/modules.md
<ide> added: v8.9.0
<ide> -->
<ide>
<ide> * `request` {string} The module path whose lookup paths are being retrieved.
<del>* Returns: {Array}
<add>* Returns: {Array|null}
<ide>
<del>Returns an array containing the paths searched during resolution of `request`.
<add>Returns an array containing the paths searched during resolution of `request` or
<add>null if the `request` string references a core module, for example `http` or
<add>`fs`.
<ide>
<ide> ## The `module` Object
<ide> <!-- YAML | 1 |
Javascript | Javascript | update stats-config for new polyfills location | 059741ea00feafc5b9da3e9fb3d9a2f8856507b0 | <ide><path>test/.stats-app/stats-config.js
<ide> const clientGlobs = [
<ide> name: 'Client Bundles (main, webpack, commons)',
<ide> globs: [
<ide> '.next/static/runtime/+(main|webpack)-!(*.module.js)',
<del> '.next/static/chunks/!(*.module.js)',
<add> '.next/static/chunks/!(polyfills-*|*.module.js)',
<ide> ],
<ide> },
<ide> {
<ide> name: 'Client Bundles (main, webpack, commons) Modern',
<ide> globs: [
<ide> '.next/static/runtime/+(main|webpack)-*.module.js',
<del> '.next/static/chunks/*.module.js',
<add> '.next/static/chunks/!(polyfills-*)*.module.js',
<ide> ],
<ide> },
<ide> {
<ide> name: 'Legacy Client Bundles (polyfills)',
<del> globs: ['.next/static/runtime/+(polyfills)-!(*.module.js)'],
<add> globs: ['.next/static/chunks/+(polyfills)-!(*.module.js)'],
<ide> },
<ide> {
<ide> name: 'Client Pages', | 1 |
Python | Python | add shift_tokens_right in flaxmt5 | 934e21cd4b41fb8d4025c58c1f1a03a10ae4208b | <ide><path>src/transformers/models/mt5/modeling_flax_mt5.py
<ide> # limitations under the License.
<ide> """ Flax mT5 model."""
<ide>
<add>import numpy as np
<add>
<ide> from ...utils import logging
<ide> from ..t5.modeling_flax_t5 import FlaxT5ForConditionalGeneration, FlaxT5Model
<ide> from .configuration_mt5 import MT5Config
<ide> _TOKENIZER_FOR_DOC = "T5Tokenizer"
<ide>
<ide>
<add># Copied from transformers.models.bart.modeling_flax_bart.shift_tokens_right
<add>def shift_tokens_right(input_ids: np.array, pad_token_id: int, decoder_start_token_id: int) -> np.ndarray:
<add> """
<add> Shift input ids one token to the right.
<add> """
<add> shifted_input_ids = np.zeros_like(input_ids)
<add> shifted_input_ids[:, 1:] = input_ids[:, :-1]
<add> shifted_input_ids[:, 0] = decoder_start_token_id
<add>
<add> shifted_input_ids = np.where(shifted_input_ids == -100, pad_token_id, shifted_input_ids)
<add> return shifted_input_ids
<add>
<add>
<ide> class FlaxMT5Model(FlaxT5Model):
<ide> r"""
<ide> This class overrides [`FlaxT5Model`]. Please check the superclass for the appropriate documentation alongside usage | 1 |
Ruby | Ruby | convert `checksum_verification` test to spec | ba34cbdc6256b76cd5e3bb31dc9c3fc0ec511333 | <ide><path>Library/Homebrew/test/checksum_verification_spec.rb
<add>require "formula"
<add>
<add>describe Formula do
<add> def formula(&block)
<add> super do
<add> url "file://#{TEST_FIXTURE_DIR}/tarballs/testball-0.1.tbz"
<add> instance_eval(&block)
<add> end
<add> end
<add>
<add> describe "#brew" do
<add> it "does not raise an error when the checksum matches" do
<add> expect {
<add> shutup do
<add> f = formula do
<add> sha256 TESTBALL_SHA256
<add> end
<add>
<add> f.brew {}
<add> end
<add> }.not_to raise_error
<add> end
<add>
<add> it "raises an error when the checksum doesn't match" do
<add> expect {
<add> shutup do
<add> f = formula do
<add> sha256 "dcbf5f44743b74add648c7e35e414076632fa3b24463d68d1f6afc5be77024f8"
<add> end
<add>
<add> f.brew {}
<add> end
<add> }.to raise_error(ChecksumMismatchError)
<add> end
<add> end
<add>end
<ide><path>Library/Homebrew/test/checksum_verification_test.rb
<del>require "testing_env"
<del>require "formula"
<del>
<del>class ChecksumVerificationTests < Homebrew::TestCase
<del> def assert_checksum_good
<del> assert_nothing_raised { shutup { @_f.brew {} } }
<del> end
<del>
<del> def assert_checksum_bad
<del> assert_raises(ChecksumMismatchError) { shutup { @_f.brew {} } }
<del> end
<del>
<del> def formula(&block)
<del> super do
<del> url "file://#{TEST_FIXTURE_DIR}/tarballs/testball-0.1.tbz"
<del> instance_eval(&block)
<del> end
<del> end
<del>
<del> def test_good_sha256
<del> formula do
<del> sha256 TESTBALL_SHA256
<del> end
<del>
<del> assert_checksum_good
<del> end
<del>
<del> def test_bad_sha256
<del> formula do
<del> sha256 "dcbf5f44743b74add648c7e35e414076632fa3b24463d68d1f6afc5be77024f8"
<del> end
<del>
<del> assert_checksum_bad
<del> end
<del>end | 2 |
Mixed | Javascript | attach request as res.req | fc3f1c3c7106a586b6ec90c97ce1b0196791b89f | <ide><path>doc/api/http.md
<ide> Removes a header that's queued for implicit sending.
<ide> response.removeHeader('Content-Encoding');
<ide> ```
<ide>
<add>### `response.req`
<add><!-- YAML
<add>added: REPLACEME
<add>-->
<add>
<add>* {http.IncomingMessage}
<add>
<add>A reference to the original HTTP `request` object.
<add>
<ide> ### `response.sendDate`
<ide> <!-- YAML
<ide> added: v0.7.5
<ide><path>doc/api/http2.md
<ide> Removes a header that has been queued for implicit sending.
<ide> response.removeHeader('Content-Encoding');
<ide> ```
<ide>
<add>### `response.req`
<add><!-- YAML
<add>added: REPLACEME
<add>-->
<add>
<add>* {http2.Http2ServerRequest}
<add>
<add>A reference to the original HTTP2 `request` object.
<add>
<ide> #### `response.sendDate`
<ide> <!-- YAML
<ide> added: v8.4.0
<ide><path>lib/_http_server.js
<ide> function ServerResponse(req) {
<ide>
<ide> if (req.method === 'HEAD') this._hasBody = false;
<ide>
<add> this.req = req;
<ide> this.sendDate = true;
<ide> this._sent100 = false;
<ide> this._expect_continue = false;
<ide><path>lib/internal/http2/compat.js
<ide> class Http2ServerResponse extends Stream {
<ide> return this[kStream].headersSent;
<ide> }
<ide>
<add> get req() {
<add> return this[kStream][kRequest];
<add> }
<add>
<ide> get sendDate() {
<ide> return this[kState].sendDate;
<ide> }
<ide><path>test/parallel/test-http-server.js
<ide> const server = http.createServer(function(req, res) {
<ide> res.id = request_number;
<ide> req.id = request_number++;
<ide>
<add> assert.strictEqual(res.req, req);
<add>
<ide> if (req.id === 0) {
<ide> assert.strictEqual(req.method, 'GET');
<ide> assert.strictEqual(url.parse(req.url).pathname, '/hello');
<ide><path>test/parallel/test-http2-compat-serverresponse.js
<add>'use strict';
<add>
<add>const common = require('../common');
<add>if (!common.hasCrypto)
<add> common.skip('missing crypto');
<add>const assert = require('assert');
<add>const h2 = require('http2');
<add>
<add>// Http2ServerResponse should expose convenience properties
<add>
<add>const server = h2.createServer();
<add>server.listen(0, common.mustCall(function() {
<add> const port = server.address().port;
<add> server.once('request', common.mustCall(function(request, response) {
<add> assert.strictEqual(response.req, request);
<add>
<add> response.on('finish', common.mustCall(function() {
<add> process.nextTick(() => {
<add> server.close();
<add> });
<add> }));
<add> response.end();
<add> }));
<add>
<add> const url = `http://localhost:${port}`;
<add> const client = h2.connect(url, common.mustCall(function() {
<add> const headers = {
<add> ':path': '/foobar',
<add> ':method': 'GET',
<add> ':scheme': 'http',
<add> ':authority': `localhost:${port}`
<add> };
<add> const request = client.request(headers);
<add> request.on('end', common.mustCall(function() {
<add> client.close();
<add> }));
<add> request.end();
<add> request.resume();
<add> }));
<add>})); | 6 |
Ruby | Ruby | debrittlize tests somewhat - fix for ci build | 7d76474e5da6c1c06eb831fe436f12591cca4339 | <ide><path>railties/test/generators/app_generator_test.rb
<ide> def test_application_names_are_not_singularized
<ide> def test_config_database_is_added_by_default
<ide> run_generator
<ide> assert_file "config/database.yml", /sqlite3/
<del> assert_file "Gemfile", /^gem "sqlite3-ruby", :require => "sqlite3"$/
<add> assert_file "Gemfile", /^gem\s+["']sqlite3-ruby["'],\s+:require\s+=>\s+["']sqlite3["']$/
<ide> end
<ide>
<ide> def test_config_another_database
<ide> run_generator([destination_root, "-d", "mysql"])
<ide> assert_file "config/database.yml", /mysql/
<del> assert_file "Gemfile", /^gem "mysql"$/
<add> assert_file "Gemfile", /^gem\s+["']mysql["']$/
<ide> end
<ide>
<ide> def test_config_database_is_not_added_if_skip_activerecord_is_given
<ide> def test_config_database_is_not_added_if_skip_activerecord_is_given
<ide>
<ide> def test_activerecord_is_removed_from_frameworks_if_skip_activerecord_is_given
<ide> run_generator [destination_root, "--skip-activerecord"]
<del> assert_file "config/application.rb", /# require "active_record\/railtie"/
<add> assert_file "config/application.rb", /#\s+require\s+["']active_record\/railtie["']/
<ide> end
<ide>
<ide> def test_prototype_and_test_unit_are_added_by_default
<ide> def test_dev_option
<ide> silence(:stdout){ generator.invoke }
<ide> rails_path = File.expand_path('../../..', Rails.root)
<ide> dev_gem = %(gem "rails", :path => #{rails_path.inspect})
<del> assert_file 'Gemfile', /^#{Regexp.escape(dev_gem)}$/
<add> assert_file 'Gemfile', /^gem\s+["']rails["'],\s+:path\s+=>\s+["']\/Users\/mikel\/rails_programs\/rails["']$/
<ide> end
<ide>
<ide> def test_edge_option
<ide> generator([destination_root], :edge => true).expects(:run).with("bundle install")
<ide> silence(:stdout){ generator.invoke }
<ide> edge_gem = %(gem "rails", :git => "git://github.com/rails/rails.git")
<del> assert_file 'Gemfile', /^#{Regexp.escape(edge_gem)}$/
<add> assert_file 'Gemfile', /^gem\s+["']rails["'],\s+:git\s+=>\s+["']git:\/\/github\.com\/rails\/rails\.git["']$/
<ide> end
<ide>
<ide> protected | 1 |
Go | Go | experimental it for net vlan drivers | afeea8c9ca71780cd02abc86ad61e587f26be620 | <ide><path>integration-cli/docker_experimental_network_test.go
<add>// +build experimental
<add>
<add>package main
<add>
<add>import (
<add> "os/exec"
<add> "strings"
<add> "time"
<add>
<add> "github.com/docker/docker/pkg/integration/checker"
<add> "github.com/docker/docker/pkg/parsers/kernel"
<add> "github.com/go-check/check"
<add>)
<add>
<add>var (
<add> MacvlanKernelSupport = testRequirement{
<add> func() bool {
<add> const macvlanKernelVer = 3 // minimum macvlan kernel support
<add> const macvlanMajorVer = 9 // minimum macvlan major kernel support
<add> kv, err := kernel.GetKernelVersion()
<add> if err != nil {
<add> return false
<add> }
<add> // ensure Kernel version is >= v3.9 for macvlan support
<add> if kv.Kernel < macvlanKernelVer || (kv.Kernel == macvlanKernelVer && kv.Major < macvlanMajorVer) {
<add> return false
<add> }
<add> return true
<add> },
<add> "kernel version failed to meet the minimum macvlan kernel requirement of 3.9",
<add> }
<add> IpvlanKernelSupport = testRequirement{
<add> func() bool {
<add> const ipvlanKernelVer = 4 // minimum ipvlan kernel support
<add> const ipvlanMajorVer = 2 // minimum ipvlan major kernel support
<add> kv, err := kernel.GetKernelVersion()
<add> if err != nil {
<add> return false
<add> }
<add> // ensure Kernel version is >= v4.2 for ipvlan support
<add> if kv.Kernel < ipvlanKernelVer || (kv.Kernel == ipvlanKernelVer && kv.Major < ipvlanMajorVer) {
<add> return false
<add> }
<add> return true
<add> },
<add> "kernel version failed to meet the minimum ipvlan kernel requirement of 4.0.0",
<add> }
<add>)
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkMacvlanPersistance(c *check.C) {
<add> // verify the driver automatically provisions the 802.1q link (dm-dummy0.60)
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> // master dummy interface 'dm' abbreviation represents 'docker macvlan'
<add> master := "dm-dummy0"
<add> // simulate the master link the vlan tagged subinterface parent link will use
<add> out, err := createMasterDummy(c, master)
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network specifying the desired sub-interface name
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "-o", "parent=dm-dummy0.60", "dm-persist")
<add> assertNwIsAvailable(c, "dm-persist")
<add> // Restart docker daemon to test the config has persisted to disk
<add> s.d.Restart()
<add> // verify network is recreated from persistence
<add> assertNwIsAvailable(c, "dm-persist")
<add> // cleanup the master interface that also collects the slave dev
<add> deleteInterface(c, "dm-dummy0")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkIpvlanPersistance(c *check.C) {
<add> // verify the driver automatically provisions the 802.1q link (di-dummy0.70)
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> // master dummy interface 'di' notation represent 'docker ipvlan'
<add> master := "di-dummy0"
<add> // simulate the master link the vlan tagged subinterface parent link will use
<add> out, err := createMasterDummy(c, master)
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network specifying the desired sub-interface name
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "-o", "parent=di-dummy0.70", "di-persist")
<add> assertNwIsAvailable(c, "di-persist")
<add> // Restart docker daemon to test the config has persisted to disk
<add> s.d.Restart()
<add> // verify network is recreated from persistence
<add> assertNwIsAvailable(c, "di-persist")
<add> // cleanup the master interface that also collects the slave dev
<add> deleteInterface(c, "di-dummy0")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkMacvlanSubIntCreate(c *check.C) {
<add> // verify the driver automatically provisions the 802.1q link (dm-dummy0.50)
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> // master dummy interface 'dm' abbreviation represents 'docker macvlan'
<add> master := "dm-dummy0"
<add> // simulate the master link the vlan tagged subinterface parent link will use
<add> out, err := createMasterDummy(c, master)
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network specifying the desired sub-interface name
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "-o", "parent=dm-dummy0.50", "dm-subinterface")
<add> assertNwIsAvailable(c, "dm-subinterface")
<add> // cleanup the master interface which also collects the slave dev
<add> deleteInterface(c, "dm-dummy0")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkIpvlanSubIntCreate(c *check.C) {
<add> // verify the driver automatically provisions the 802.1q link (di-dummy0.50)
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> // master dummy interface 'dm' abbreviation represents 'docker ipvlan'
<add> master := "di-dummy0"
<add> // simulate the master link the vlan tagged subinterface parent link will use
<add> out, err := createMasterDummy(c, master)
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network specifying the desired sub-interface name
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "-o", "parent=di-dummy0.60", "di-subinterface")
<add> assertNwIsAvailable(c, "di-subinterface")
<add> // cleanup the master interface which also collects the slave dev
<add> deleteInterface(c, "di-dummy0")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkMacvlanOverlapParent(c *check.C) {
<add> // verify the same parent interface cannot be used if already in use by an existing network
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> // master dummy interface 'dm' abbreviation represents 'docker macvlan'
<add> master := "dm-dummy0"
<add> out, err := createMasterDummy(c, master)
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> out, err = createVlanInterface(c, master, "dm-dummy0.40", "40")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network using an existing parent interface
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "-o", "parent=dm-dummy0.40", "dm-subinterface")
<add> assertNwIsAvailable(c, "dm-subinterface")
<add> // attempt to create another network using the same parent iface that should fail
<add> out, _, err = dockerCmdWithError("network", "create", "--driver=macvlan", "-o", "parent=dm-dummy0.40", "dm-parent-net-overlap")
<add> // verify that the overlap returns an error
<add> c.Assert(err, check.NotNil)
<add> // cleanup the master interface which also collects the slave dev
<add> deleteInterface(c, "dm-dummy0")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkIpvlanOverlapParent(c *check.C) {
<add> // verify the same parent interface cannot be used if already in use by an existing network
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> // master dummy interface 'dm' abbreviation represents 'docker ipvlan'
<add> master := "di-dummy0"
<add> out, err := createMasterDummy(c, master)
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> out, err = createVlanInterface(c, master, "di-dummy0.30", "30")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network using an existing parent interface
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "-o", "parent=di-dummy0.30", "di-subinterface")
<add> assertNwIsAvailable(c, "di-subinterface")
<add> // attempt to create another network using the same parent iface that should fail
<add> out, _, err = dockerCmdWithError("network", "create", "--driver=ipvlan", "-o", "parent=di-dummy0.30", "di-parent-net-overlap")
<add> // verify that the overlap returns an error
<add> c.Assert(err, check.NotNil)
<add> // cleanup the master interface which also collects the slave dev
<add> deleteInterface(c, "di-dummy0")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkMacvlanMultiSubnet(c *check.C) {
<add> // create a dual stack multi-subnet Macvlan bridge mode network and validate connectivity between four containers, two on each subnet
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "--subnet=172.28.100.0/24", "--subnet=172.28.102.0/24", "--gateway=172.28.102.254",
<add> "--subnet=2001:db8:abc2::/64", "--subnet=2001:db8:abc4::/64", "--gateway=2001:db8:abc4::254", "dualstackbridge")
<add> // Ensure the network was created
<add> assertNwIsAvailable(c, "dualstackbridge")
<add> // start dual stack containers and verify the user specified --ip and --ip6 addresses on subnets 172.28.100.0/24 and 2001:db8:abc2::/64
<add> dockerCmd(c, "run", "-d", "--net=dualstackbridge", "--name=first", "--ip", "172.28.100.20", "--ip6", "2001:db8:abc2::20", "busybox", "top")
<add> dockerCmd(c, "run", "-d", "--net=dualstackbridge", "--name=second", "--ip", "172.28.100.21", "--ip6", "2001:db8:abc2::21", "busybox", "top")
<add>
<add> // Inspect and store the v4 address from specified container on the network dualstackbridge
<add> ip := inspectField(c, "first", "NetworkSettings.Networks.dualstackbridge.IPAddress")
<add> // Inspect and store the v6 address from specified container on the network dualstackbridge
<add> ip6 := inspectField(c, "first", "NetworkSettings.Networks.dualstackbridge.GlobalIPv6Address")
<add>
<add> // verify ipv4 connectivity to the explicit --ipv address second to first
<add> _, _, err := dockerCmdWithError("exec", "second", "ping", "-c", "1", strings.TrimSpace(ip))
<add> c.Assert(err, check.IsNil)
<add> // verify ipv6 connectivity to the explicit --ipv6 address second to first
<add> _, _, err = dockerCmdWithError("exec", "second", "ping6", "-c", "1", strings.TrimSpace(ip6))
<add> c.Assert(err, check.IsNil)
<add>
<add> // start dual stack containers and verify the user specified --ip and --ip6 addresses on subnets 172.28.102.0/24 and 2001:db8:abc4::/64
<add> dockerCmd(c, "run", "-d", "--net=dualstackbridge", "--name=third", "--ip", "172.28.102.20", "--ip6", "2001:db8:abc4::20", "busybox", "top")
<add> dockerCmd(c, "run", "-d", "--net=dualstackbridge", "--name=fourth", "--ip", "172.28.102.21", "--ip6", "2001:db8:abc4::21", "busybox", "top")
<add>
<add> // Inspect and store the v4 address from specified container on the network dualstackbridge
<add> ip = inspectField(c, "third", "NetworkSettings.Networks.dualstackbridge.IPAddress")
<add> // Inspect and store the v6 address from specified container on the network dualstackbridge
<add> ip6 = inspectField(c, "third", "NetworkSettings.Networks.dualstackbridge.GlobalIPv6Address")
<add>
<add> // verify ipv4 connectivity to the explicit --ipv address from third to fourth
<add> _, _, err = dockerCmdWithError("exec", "fourth", "ping", "-c", "1", strings.TrimSpace(ip))
<add> c.Assert(err, check.IsNil)
<add> // verify ipv6 connectivity to the explicit --ipv6 address from third to fourth
<add> _, _, err = dockerCmdWithError("exec", "fourth", "ping6", "-c", "1", strings.TrimSpace(ip6))
<add> c.Assert(err, check.IsNil)
<add>
<add> // Inspect the v4 gateway to ensure the proper default GW was assigned
<add> ip4gw := inspectField(c, "first", "NetworkSettings.Networks.dualstackbridge.Gateway")
<add> c.Assert(strings.TrimSpace(ip4gw), check.Equals, "172.28.100.1")
<add> // Inspect the v6 gateway to ensure the proper default GW was assigned
<add> ip6gw := inspectField(c, "first", "NetworkSettings.Networks.dualstackbridge.IPv6Gateway")
<add> c.Assert(strings.TrimSpace(ip6gw), check.Equals, "2001:db8:abc2::1")
<add>
<add> // Inspect the v4 gateway to ensure the proper explicitly assigned default GW was assigned
<add> ip4gw = inspectField(c, "third", "NetworkSettings.Networks.dualstackbridge.Gateway")
<add> c.Assert(strings.TrimSpace(ip4gw), check.Equals, "172.28.102.254")
<add> // Inspect the v6 gateway to ensure the proper explicitly assigned default GW was assigned
<add> ip6gw = inspectField(c, "third", "NetworkSettings.Networks.dualstackbridge.IPv6Gateway")
<add> c.Assert(strings.TrimSpace(ip6gw), check.Equals, "2001:db8:abc4::254")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkIpvlanL2MultiSubnet(c *check.C) {
<add> // create a dual stack multi-subnet Ipvlan L2 network and validate connectivity within the subnets, two on each subnet
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "--subnet=172.28.200.0/24", "--subnet=172.28.202.0/24", "--gateway=172.28.202.254",
<add> "--subnet=2001:db8:abc8::/64", "--subnet=2001:db8:abc6::/64", "--gateway=2001:db8:abc6::254", "dualstackl2")
<add> // Ensure the network was created
<add> assertNwIsAvailable(c, "dualstackl2")
<add> // start dual stack containers and verify the user specified --ip and --ip6 addresses on subnets 172.28.200.0/24 and 2001:db8:abc8::/64
<add> dockerCmd(c, "run", "-d", "--net=dualstackl2", "--name=first", "--ip", "172.28.200.20", "--ip6", "2001:db8:abc8::20", "busybox", "top")
<add> dockerCmd(c, "run", "-d", "--net=dualstackl2", "--name=second", "--ip", "172.28.200.21", "--ip6", "2001:db8:abc8::21", "busybox", "top")
<add>
<add> // Inspect and store the v4 address from specified container on the network dualstackl2
<add> ip := inspectField(c, "first", "NetworkSettings.Networks.dualstackl2.IPAddress")
<add> // Inspect and store the v6 address from specified container on the network dualstackl2
<add> ip6 := inspectField(c, "first", "NetworkSettings.Networks.dualstackl2.GlobalIPv6Address")
<add>
<add> // verify ipv4 connectivity to the explicit --ipv address second to first
<add> _, _, err := dockerCmdWithError("exec", "second", "ping", "-c", "1", strings.TrimSpace(ip))
<add> c.Assert(err, check.IsNil)
<add> // verify ipv6 connectivity to the explicit --ipv6 address second to first
<add> _, _, err = dockerCmdWithError("exec", "second", "ping6", "-c", "1", strings.TrimSpace(ip6))
<add> c.Assert(err, check.IsNil)
<add>
<add> // start dual stack containers and verify the user specified --ip and --ip6 addresses on subnets 172.28.202.0/24 and 2001:db8:abc6::/64
<add> dockerCmd(c, "run", "-d", "--net=dualstackl2", "--name=third", "--ip", "172.28.202.20", "--ip6", "2001:db8:abc6::20", "busybox", "top")
<add> dockerCmd(c, "run", "-d", "--net=dualstackl2", "--name=fourth", "--ip", "172.28.202.21", "--ip6", "2001:db8:abc6::21", "busybox", "top")
<add>
<add> // Inspect and store the v4 address from specified container on the network dualstackl2
<add> ip = inspectField(c, "third", "NetworkSettings.Networks.dualstackl2.IPAddress")
<add> // Inspect and store the v6 address from specified container on the network dualstackl2
<add> ip6 = inspectField(c, "third", "NetworkSettings.Networks.dualstackl2.GlobalIPv6Address")
<add>
<add> // verify ipv4 connectivity to the explicit --ipv address from third to fourth
<add> _, _, err = dockerCmdWithError("exec", "fourth", "ping", "-c", "1", strings.TrimSpace(ip))
<add> c.Assert(err, check.IsNil)
<add> // verify ipv6 connectivity to the explicit --ipv6 address from third to fourth
<add> _, _, err = dockerCmdWithError("exec", "fourth", "ping6", "-c", "1", strings.TrimSpace(ip6))
<add> c.Assert(err, check.IsNil)
<add>
<add> // Inspect the v4 gateway to ensure the proper default GW was assigned
<add> ip4gw := inspectField(c, "first", "NetworkSettings.Networks.dualstackl2.Gateway")
<add> c.Assert(strings.TrimSpace(ip4gw), check.Equals, "172.28.200.1")
<add> // Inspect the v6 gateway to ensure the proper default GW was assigned
<add> ip6gw := inspectField(c, "first", "NetworkSettings.Networks.dualstackl2.IPv6Gateway")
<add> c.Assert(strings.TrimSpace(ip6gw), check.Equals, "2001:db8:abc8::1")
<add>
<add> // Inspect the v4 gateway to ensure the proper explicitly assigned default GW was assigned
<add> ip4gw = inspectField(c, "third", "NetworkSettings.Networks.dualstackl2.Gateway")
<add> c.Assert(strings.TrimSpace(ip4gw), check.Equals, "172.28.202.254")
<add> // Inspect the v6 gateway to ensure the proper explicitly assigned default GW was assigned
<add> ip6gw = inspectField(c, "third", "NetworkSettings.Networks.dualstackl2.IPv6Gateway")
<add> c.Assert(strings.TrimSpace(ip6gw), check.Equals, "2001:db8:abc6::254")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkIpvlanL3MultiSubnet(c *check.C) {
<add> // create a dual stack multi-subnet Ipvlan L3 network and validate connectivity between all four containers per L3 mode
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "--subnet=172.28.10.0/24", "--subnet=172.28.12.0/24", "--gateway=172.28.12.254",
<add> "--subnet=2001:db8:abc9::/64", "--subnet=2001:db8:abc7::/64", "--gateway=2001:db8:abc7::254", "-o", "ipvlan_mode=l3", "dualstackl3")
<add> // Ensure the network was created
<add> assertNwIsAvailable(c, "dualstackl3")
<add>
<add> // start dual stack containers and verify the user specified --ip and --ip6 addresses on subnets 172.28.10.0/24 and 2001:db8:abc9::/64
<add> dockerCmd(c, "run", "-d", "--net=dualstackl3", "--name=first", "--ip", "172.28.10.20", "--ip6", "2001:db8:abc9::20", "busybox", "top")
<add> dockerCmd(c, "run", "-d", "--net=dualstackl3", "--name=second", "--ip", "172.28.10.21", "--ip6", "2001:db8:abc9::21", "busybox", "top")
<add>
<add> // Inspect and store the v4 address from specified container on the network dualstackl3
<add> ip := inspectField(c, "first", "NetworkSettings.Networks.dualstackl3.IPAddress")
<add> // Inspect and store the v6 address from specified container on the network dualstackl3
<add> ip6 := inspectField(c, "first", "NetworkSettings.Networks.dualstackl3.GlobalIPv6Address")
<add>
<add> // verify ipv4 connectivity to the explicit --ipv address second to first
<add> _, _, err := dockerCmdWithError("exec", "second", "ping", "-c", "1", strings.TrimSpace(ip))
<add> c.Assert(err, check.IsNil)
<add> // verify ipv6 connectivity to the explicit --ipv6 address second to first
<add> _, _, err = dockerCmdWithError("exec", "second", "ping6", "-c", "1", strings.TrimSpace(ip6))
<add> c.Assert(err, check.IsNil)
<add>
<add> // start dual stack containers and verify the user specified --ip and --ip6 addresses on subnets 172.28.12.0/24 and 2001:db8:abc7::/64
<add> dockerCmd(c, "run", "-d", "--net=dualstackl3", "--name=third", "--ip", "172.28.12.20", "--ip6", "2001:db8:abc7::20", "busybox", "top")
<add> dockerCmd(c, "run", "-d", "--net=dualstackl3", "--name=fourth", "--ip", "172.28.12.21", "--ip6", "2001:db8:abc7::21", "busybox", "top")
<add>
<add> // Inspect and store the v4 address from specified container on the network dualstackl3
<add> ip = inspectField(c, "third", "NetworkSettings.Networks.dualstackl3.IPAddress")
<add> // Inspect and store the v6 address from specified container on the network dualstackl3
<add> ip6 = inspectField(c, "third", "NetworkSettings.Networks.dualstackl3.GlobalIPv6Address")
<add>
<add> // verify ipv4 connectivity to the explicit --ipv address from third to fourth
<add> _, _, err = dockerCmdWithError("exec", "fourth", "ping", "-c", "1", strings.TrimSpace(ip))
<add> c.Assert(err, check.IsNil)
<add> // verify ipv6 connectivity to the explicit --ipv6 address from third to fourth
<add> _, _, err = dockerCmdWithError("exec", "fourth", "ping6", "-c", "1", strings.TrimSpace(ip6))
<add> c.Assert(err, check.IsNil)
<add>
<add> // Inspect and store the v4 address from specified container on the network dualstackl3
<add> ip = inspectField(c, "second", "NetworkSettings.Networks.dualstackl3.IPAddress")
<add> // Inspect and store the v6 address from specified container on the network dualstackl3
<add> ip6 = inspectField(c, "second", "NetworkSettings.Networks.dualstackl3.GlobalIPv6Address")
<add>
<add> // Verify connectivity across disparate subnets which is unique to L3 mode only
<add> _, _, err = dockerCmdWithError("exec", "third", "ping", "-c", "1", strings.TrimSpace(ip))
<add> c.Assert(err, check.IsNil)
<add> _, _, err = dockerCmdWithError("exec", "third", "ping6", "-c", "1", strings.TrimSpace(ip6))
<add> c.Assert(err, check.IsNil)
<add>
<add> // Inspect the v4 gateway to ensure no next hop is assigned in L3 mode
<add> ip4gw := inspectField(c, "first", "NetworkSettings.Networks.dualstackl3.Gateway")
<add> c.Assert(strings.TrimSpace(ip4gw), check.Equals, "")
<add> // Inspect the v6 gateway to ensure the explicitly specified default GW is ignored per L3 mode enabled
<add> ip6gw := inspectField(c, "third", "NetworkSettings.Networks.dualstackl3.IPv6Gateway")
<add> c.Assert(strings.TrimSpace(ip6gw), check.Equals, "")
<add>}
<add>
<add>func (s *DockerNetworkSuite) TestDockerNetworkIpvlanAddressing(c *check.C) {
<add> // Ensure the default gateways, next-hops and default dev devices are properly set
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "--subnet=172.28.130.0/24",
<add> "--subnet=2001:db8:abca::/64", "--gateway=2001:db8:abca::254", "-o", "macvlan_mode=bridge", "dualstackbridge")
<add> assertNwIsAvailable(c, "dualstackbridge")
<add> dockerCmd(c, "run", "-d", "--net=dualstackbridge", "--name=first", "busybox", "top")
<add> // Validate macvlan bridge mode defaults gateway sets the default IPAM next-hop inferred from the subnet
<add> out, _, err := dockerCmdWithError("exec", "first", "ip", "route")
<add> c.Assert(err, check.IsNil)
<add> c.Assert(out, checker.Contains, "default via 172.28.130.1 dev eth0")
<add> // Validate macvlan bridge mode sets the v6 gateway to the user specified default gateway/next-hop
<add> out, _, err = dockerCmdWithError("exec", "first", "ip", "-6", "route")
<add> c.Assert(err, check.IsNil)
<add> c.Assert(out, checker.Contains, "default via 2001:db8:abca::254 dev eth0")
<add>
<add> // Verify ipvlan l2 mode sets the proper default gateway routes via netlink
<add> // for either an explicitly set route by the user or inferred via default IPAM
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "--subnet=172.28.140.0/24", "--gateway=172.28.140.254",
<add> "--subnet=2001:db8:abcb::/64", "-o", "ipvlan_mode=l2", "dualstackl2")
<add> assertNwIsAvailable(c, "dualstackl2")
<add> dockerCmd(c, "run", "-d", "--net=dualstackl2", "--name=second", "busybox", "top")
<add> // Validate ipvlan l2 mode defaults gateway sets the default IPAM next-hop inferred from the subnet
<add> out, _, err = dockerCmdWithError("exec", "second", "ip", "route")
<add> c.Assert(err, check.IsNil)
<add> c.Assert(out, checker.Contains, "default via 172.28.140.254 dev eth0")
<add> // Validate ipvlan l2 mode sets the v6 gateway to the user specified default gateway/next-hop
<add> out, _, err = dockerCmdWithError("exec", "second", "ip", "-6", "route")
<add> c.Assert(err, check.IsNil)
<add> c.Assert(out, checker.Contains, "default via 2001:db8:abcb::1 dev eth0")
<add>
<add> // Validate ipvlan l3 mode sets the v4 gateway to dev eth0 and disregards any explicit or inferred next-hops
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "--subnet=172.28.160.0/24", "--gateway=172.28.160.254",
<add> "--subnet=2001:db8:abcd::/64", "--gateway=2001:db8:abcd::254", "-o", "ipvlan_mode=l3", "dualstackl3")
<add> assertNwIsAvailable(c, "dualstackl3")
<add> dockerCmd(c, "run", "-d", "--net=dualstackl3", "--name=third", "busybox", "top")
<add> // Validate ipvlan l3 mode sets the v4 gateway to dev eth0 and disregards any explicit or inferred next-hops
<add> out, _, err = dockerCmdWithError("exec", "third", "ip", "route")
<add> c.Assert(err, check.IsNil)
<add> c.Assert(out, checker.Contains, "default dev eth0")
<add> // Validate ipvlan l3 mode sets the v6 gateway to dev eth0 and disregards any explicit or inferred next-hops
<add> out, _, err = dockerCmdWithError("exec", "third", "ip", "-6", "route")
<add> c.Assert(err, check.IsNil)
<add> c.Assert(out, checker.Contains, "default dev eth0")
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkMacVlanBridgeNilParent(c *check.C) {
<add> // macvlan bridge mode - dummy parent interface is provisioned dynamically
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "dm-nil-parent")
<add> assertNwIsAvailable(c, "dm-nil-parent")
<add>
<add> // start two containers on the same subnet
<add> dockerCmd(c, "run", "-d", "--net=dm-nil-parent", "--name=first", "busybox", "top")
<add> c.Assert(waitRun("first"), check.IsNil)
<add> dockerCmd(c, "run", "-d", "--net=dm-nil-parent", "--name=second", "busybox", "top")
<add> c.Assert(waitRun("second"), check.IsNil)
<add>
<add> // intra-network communications should succeed
<add> _, _, err := dockerCmdWithError("exec", "second", "ping", "-c", "1", "first")
<add> c.Assert(err, check.IsNil)
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkMacVlanBridgeInternalMode(c *check.C) {
<add> // macvlan bridge mode --internal containers can communicate inside the network but not externally
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "--internal", "dm-internal")
<add> assertNwIsAvailable(c, "dm-internal")
<add> nr := getNetworkResource(c, "dm-internal")
<add> c.Assert(nr.Internal, checker.True)
<add>
<add> // start two containers on the same subnet
<add> dockerCmd(c, "run", "-d", "--net=dm-internal", "--name=first", "busybox", "top")
<add> c.Assert(waitRun("first"), check.IsNil)
<add> dockerCmd(c, "run", "-d", "--net=dm-internal", "--name=second", "busybox", "top")
<add> c.Assert(waitRun("second"), check.IsNil)
<add>
<add> // access outside of the network should fail
<add> _, _, err := dockerCmdWithTimeout(time.Second, "exec", "first", "ping", "-c", "1", "-w", "1", "8.8.8.8")
<add> c.Assert(err, check.NotNil)
<add> // intra-network communications should succeed
<add> _, _, err = dockerCmdWithError("exec", "second", "ping", "-c", "1", "first")
<add> c.Assert(err, check.IsNil)
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkIpvlanL2NilParent(c *check.C) {
<add> // ipvlan l2 mode - dummy parent interface is provisioned dynamically
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "di-nil-parent")
<add> assertNwIsAvailable(c, "di-nil-parent")
<add>
<add> // start two containers on the same subnet
<add> dockerCmd(c, "run", "-d", "--net=di-nil-parent", "--name=first", "busybox", "top")
<add> c.Assert(waitRun("first"), check.IsNil)
<add> dockerCmd(c, "run", "-d", "--net=di-nil-parent", "--name=second", "busybox", "top")
<add> c.Assert(waitRun("second"), check.IsNil)
<add>
<add> // intra-network communications should succeed
<add> _, _, err := dockerCmdWithError("exec", "second", "ping", "-c", "1", "first")
<add> c.Assert(err, check.IsNil)
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkIpvlanL2InternalMode(c *check.C) {
<add> // ipvlan l2 mode --internal containers can communicate inside the network but not externally
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "--internal", "di-internal")
<add> assertNwIsAvailable(c, "di-internal")
<add> nr := getNetworkResource(c, "di-internal")
<add> c.Assert(nr.Internal, checker.True)
<add>
<add> // start two containers on the same subnet
<add> dockerCmd(c, "run", "-d", "--net=di-internal", "--name=first", "busybox", "top")
<add> c.Assert(waitRun("first"), check.IsNil)
<add> dockerCmd(c, "run", "-d", "--net=di-internal", "--name=second", "busybox", "top")
<add> c.Assert(waitRun("second"), check.IsNil)
<add>
<add> // access outside of the network should fail
<add> _, _, err := dockerCmdWithTimeout(time.Second, "exec", "first", "ping", "-c", "1", "-w", "1", "8.8.8.8")
<add> c.Assert(err, check.NotNil)
<add> // intra-network communications should succeed
<add> _, _, err = dockerCmdWithError("exec", "second", "ping", "-c", "1", "first")
<add> c.Assert(err, check.IsNil)
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkIpvlanL3NilParent(c *check.C) {
<add> // ipvlan l3 mode - dummy parent interface is provisioned dynamically
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "--subnet=172.28.230.0/24",
<add> "--subnet=172.28.220.0/24", "-o", "ipvlan_mode=l3", "di-nil-parent-l3")
<add> assertNwIsAvailable(c, "di-nil-parent-l3")
<add>
<add> // start two containers on separate subnets
<add> dockerCmd(c, "run", "-d", "--ip=172.28.220.10", "--net=di-nil-parent-l3", "--name=first", "busybox", "top")
<add> c.Assert(waitRun("first"), check.IsNil)
<add> dockerCmd(c, "run", "-d", "--ip=172.28.230.10", "--net=di-nil-parent-l3", "--name=second", "busybox", "top")
<add> c.Assert(waitRun("second"), check.IsNil)
<add>
<add> // intra-network communications should succeed
<add> _, _, err := dockerCmdWithError("exec", "second", "ping", "-c", "1", "first")
<add> c.Assert(err, check.IsNil)
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkIpvlanL3InternalMode(c *check.C) {
<add> // ipvlan l3 mode --internal containers can communicate inside the network but not externally
<add> testRequires(c, DaemonIsLinux, IpvlanKernelSupport, NotUserNamespace, NotArm)
<add> dockerCmd(c, "network", "create", "--driver=ipvlan", "--subnet=172.28.230.0/24",
<add> "--subnet=172.28.220.0/24", "-o", "ipvlan_mode=l3", "--internal", "di-internal-l3")
<add> assertNwIsAvailable(c, "di-internal-l3")
<add> nr := getNetworkResource(c, "di-internal-l3")
<add> c.Assert(nr.Internal, checker.True)
<add>
<add> // start two containers on separate subnets
<add> dockerCmd(c, "run", "-d", "--ip=172.28.220.10", "--net=di-internal-l3", "--name=first", "busybox", "top")
<add> c.Assert(waitRun("first"), check.IsNil)
<add> dockerCmd(c, "run", "-d", "--ip=172.28.230.10", "--net=di-internal-l3", "--name=second", "busybox", "top")
<add> c.Assert(waitRun("second"), check.IsNil)
<add>
<add> // access outside of the network should fail
<add> _, _, err := dockerCmdWithTimeout(time.Second, "exec", "first", "ping", "-c", "1", "-w", "1", "8.8.8.8")
<add> c.Assert(err, check.NotNil)
<add> // intra-network communications should succeed
<add> _, _, err = dockerCmdWithError("exec", "second", "ping", "-c", "1", "first")
<add> c.Assert(err, check.IsNil)
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkMacVlanExistingParent(c *check.C) {
<add> // macvlan bridge mode - empty parent interface containers can reach each other internally but not externally
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> netName := "dm-parent-exists"
<add> out, err := createMasterDummy(c, "dm-dummy0")
<add> //out, err := createVlanInterface(c, "dm-parent", "dm-slave", "macvlan", "bridge")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network using an existing parent interface
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "-o", "parent=dm-dummy0", netName)
<add> assertNwIsAvailable(c, netName)
<add> // delete the network while preserving the parent link
<add> dockerCmd(c, "network", "rm", netName)
<add> assertNwNotAvailable(c, netName)
<add> // verify the network delete did not delete the predefined link
<add> out, err = linkExists(c, "dm-dummy0")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> deleteInterface(c, "dm-dummy0")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add>}
<add>
<add>func (s *DockerSuite) TestDockerNetworkMacVlanSubinterface(c *check.C) {
<add> // macvlan bridge mode - empty parent interface containers can reach each other internally but not externally
<add> testRequires(c, DaemonIsLinux, MacvlanKernelSupport, NotUserNamespace, NotArm)
<add> netName := "dm-subinterface"
<add> out, err := createMasterDummy(c, "dm-dummy0")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> out, err = createVlanInterface(c, "dm-dummy0", "dm-dummy0.20", "20")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // create a network using an existing parent interface
<add> dockerCmd(c, "network", "create", "--driver=macvlan", "-o", "parent=dm-dummy0.20", netName)
<add> assertNwIsAvailable(c, netName)
<add>
<add> // start containers on 802.1q tagged '-o parent' sub-interface
<add> dockerCmd(c, "run", "-d", "--net=dm-subinterface", "--name=first", "busybox", "top")
<add> c.Assert(waitRun("first"), check.IsNil)
<add> dockerCmd(c, "run", "-d", "--net=dm-subinterface", "--name=second", "busybox", "top")
<add> c.Assert(waitRun("second"), check.IsNil)
<add> // verify containers can communicate
<add> _, _, err = dockerCmdWithError("exec", "second", "ping", "-c", "1", "first")
<add> c.Assert(err, check.IsNil)
<add>
<add> // remove the containers
<add> dockerCmd(c, "rm", "-f", "first")
<add> dockerCmd(c, "rm", "-f", "second")
<add> // delete the network while preserving the parent link
<add> dockerCmd(c, "network", "rm", netName)
<add> assertNwNotAvailable(c, netName)
<add> // verify the network delete did not delete the predefined sub-interface
<add> out, err = linkExists(c, "dm-dummy0.20")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add> // delete the parent interface which also collects the slave
<add> deleteInterface(c, "dm-dummy0")
<add> c.Assert(err, check.IsNil, check.Commentf(out))
<add>}
<add>
<add>func createMasterDummy(c *check.C, master string) (string, error) {
<add> // ip link add <dummy_name> type dummy
<add> args := []string{"link", "add", master, "type", "dummy"}
<add> ipLinkCmd := exec.Command("ip", args...)
<add> out, _, err := runCommandWithOutput(ipLinkCmd)
<add> if err != nil {
<add> return out, err
<add> }
<add> // ip link set dummy_name up
<add> args = []string{"link", "set", master, "up"}
<add> ipLinkCmd = exec.Command("ip", args...)
<add> out, _, err = runCommandWithOutput(ipLinkCmd)
<add> if err != nil {
<add> return out, err
<add> }
<add> return out, err
<add>}
<add>
<add>func createVlanInterface(c *check.C, master, slave, id string) (string, error) {
<add> // ip link add link <master> name <master>.<VID> type vlan id <VID>
<add> args := []string{"link", "add", "link", master, "name", slave, "type", "vlan", "id", id}
<add> ipLinkCmd := exec.Command("ip", args...)
<add> out, _, err := runCommandWithOutput(ipLinkCmd)
<add> if err != nil {
<add> return out, err
<add> }
<add> // ip link set <sub_interface_name> up
<add> args = []string{"link", "set", slave, "up"}
<add> ipLinkCmd = exec.Command("ip", args...)
<add> out, _, err = runCommandWithOutput(ipLinkCmd)
<add> if err != nil {
<add> return out, err
<add> }
<add> return out, err
<add>}
<add>
<add>func linkExists(c *check.C, master string) (string, error) {
<add> // verify the specified link exists, ip link show <link_name>
<add> args := []string{"link", "show", master}
<add> ipLinkCmd := exec.Command("ip", args...)
<add> out, _, err := runCommandWithOutput(ipLinkCmd)
<add> if err != nil {
<add> return out, err
<add> }
<add> return out, err
<add>} | 1 |
Javascript | Javascript | fix net.socket.connect argument parsing | c60cdcda4e39081fa56568df941aff1a4b56b39c | <ide><path>lib/net_legacy.js
<ide> Socket.prototype.connect = function() {
<ide> self._connecting = true; // set false in doConnect
<ide> self.writable = true;
<ide>
<del> var lastArg = arguments[arguments.length - 1];
<del> if (typeof lastArg == 'function') {
<del> self.addListener('connect', lastArg);
<add> var host;
<add> if (typeof arguments[1] === 'function') {
<add> self.on('connect', arguments[1]);
<add> } else {
<add> host = arguments[1];
<add> if (typeof arguments[2] === 'function') {
<add> self.on('connect', arguments[2]);
<add> }
<ide> }
<ide>
<ide> var port = toPort(arguments[0]);
<ide> Socket.prototype.connect = function() {
<ide> doConnect(self, arguments[0]);
<ide> } else {
<ide> // TCP
<del> require('dns').lookup(arguments[1], function(err, ip, addressType) {
<add> require('dns').lookup(host, function(err, ip, addressType) {
<ide> if (err) {
<ide> self.emit('error', err);
<ide> } else {
<ide><path>lib/net_uv.js
<ide> exports.createServer = function() {
<ide> };
<ide>
<ide>
<del>exports.connect = exports.createConnection = function(port, host /* [cb] */ ) {
<add>exports.connect = exports.createConnection = function(port /* [host], [cb] */) {
<ide> var s = new Socket();
<del> s.connect(port, host, arguments[2]);
<add> s.connect(port, arguments[1], arguments[2]);
<ide> return s;
<ide> };
<ide>
<ide> function connectip(self, port, ip) {
<ide> }
<ide>
<ide>
<del>Socket.prototype.connect = function(port, host /* [cb] */) {
<add>Socket.prototype.connect = function(port /* [host], [cb] */) {
<ide> var self = this;
<ide>
<ide> if (this.destroyed) {
<ide> this._handle = new TCP();
<ide> initSocketHandle(this);
<ide> }
<ide>
<del> if (typeof arguments[2] === 'function') {
<del> self.on('connect', arguments[2]);
<add> var host;
<add> if (typeof arguments[1] === 'function') {
<add> self.on('connect', arguments[1]);
<add> } else {
<add> host = arguments[1];
<add> if (typeof arguments[2] === 'function') {
<add> self.on('connect', arguments[2]);
<add> }
<ide> }
<ide>
<ide> timers.active(this);
<ide><path>test/simple/test-net-create-connection.js
<ide> var assert = require('assert');
<ide> var net = require('net');
<ide>
<ide> var tcpPort = common.PORT;
<del>var connectHappened = false;
<add>var clientConnected = 0;
<add>var serverConnected = 0;
<ide>
<ide> var server = net.createServer(function(socket) {
<del> server.close();
<ide> socket.end();
<add> if (++serverConnected === 4) {
<add> server.close();
<add> }
<ide> });
<ide> server.listen(tcpPort, 'localhost', function() {
<del> var client = net.createConnection(tcpPort, 'localhost', function() {
<del> connectHappened = true;
<del> });
<add> function cb() {
<add> ++clientConnected;
<add> }
<add>
<add> net.createConnection(tcpPort).on('connect', cb);
<add> net.createConnection(tcpPort, 'localhost').on('connect', cb);
<add> net.createConnection(tcpPort, cb);
<add> net.createConnection(tcpPort, 'localhost', cb);
<ide> });
<ide>
<ide> process.on('exit', function () {
<del> assert.ok(connectHappened);
<add> assert.equal(clientConnected, 4);
<ide> });
<ide> | 3 |
Text | Text | fix code snippets in tls.md | 054481633a3a6feba53213b8a5335ccfa421de0e | <ide><path>doc/api/tls.md
<ide> The `callback` function, if specified, will be added as a listener for the
<ide>
<ide> `tls.connect()` returns a [`tls.TLSSocket`][] object.
<ide>
<del>The following implements a simple "echo server" example:
<add>Here is an example of a client of echo server as described in
<add>[`tls.createServer()`][]:
<ide>
<ide> ```js
<add>// This example assumes that you have created an echo server that is
<add>// listening on port 8000.
<ide> const tls = require('tls');
<ide> const fs = require('fs');
<ide>
<ide> socket.on('data', (data) => {
<ide> console.log(data);
<ide> });
<ide> socket.on('end', () => {
<del> server.close();
<add> console.log('client ends');
<ide> });
<ide> ```
<ide>
<ide> Or
<ide>
<ide> ```js
<add>// This example assumes that you have created an echo server that is
<add>// listening on port 8000.
<ide> const tls = require('tls');
<ide> const fs = require('fs');
<ide>
<ide> socket.on('data', (data) => {
<ide> console.log(data);
<ide> });
<ide> socket.on('end', () => {
<del> server.close();
<add> console.log('client ends');
<ide> });
<ide> ```
<ide> | 1 |
Javascript | Javascript | remove common.port from test-tls-securepair-client | 9ac42f1be42f162ec9cf6bb56e817d7b210a6db9 | <ide><path>test/sequential/test-tls-securepair-client.js
<ide> function test(keyPath, certPath, check, next) {
<ide> const cert = fixtures.readSync(certPath).toString();
<ide>
<ide> const server = spawn(common.opensslCli, ['s_server',
<del> '-accept', common.PORT,
<add> '-accept', 0,
<ide> '-cert', fixtures.path(certPath),
<ide> '-key', fixtures.path(keyPath)]);
<ide> server.stdout.pipe(process.stdout);
<ide> function test(keyPath, certPath, check, next) {
<ide> console.log(state);
<ide> switch (state) {
<ide> case 'WAIT-ACCEPT':
<del> if (/ACCEPT/.test(serverStdoutBuffer)) {
<del> // Give s_server half a second to start up.
<del> setTimeout(startClient, 500);
<add> const matches = serverStdoutBuffer.match(/ACCEPT .*?:(\d+)/);
<add> if (matches) {
<add> const port = matches[1];
<ide> state = 'WAIT-HELLO';
<add> startClient(port);
<ide> }
<ide> break;
<ide>
<ide> function test(keyPath, certPath, check, next) {
<ide> });
<ide>
<ide>
<del> function startClient() {
<add> function startClient(port) {
<ide> const s = new net.Stream();
<ide>
<ide> const sslcontext = tls.createSecureContext({ key, cert });
<ide> function test(keyPath, certPath, check, next) {
<ide> pair.encrypted.pipe(s);
<ide> s.pipe(pair.encrypted);
<ide>
<del> s.connect(common.PORT);
<add> s.connect(port);
<ide>
<ide> s.on('connect', function() {
<ide> console.log('client connected'); | 1 |
Ruby | Ruby | remove dead test code for unsupported adapters | 8df8334327b3afe2c66458ed7877d2aacfabd515 | <ide><path>activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
<ide> def [](name)
<ide> # Default is (38,0).
<ide> # * DB2: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..62].
<ide> # Default unknown.
<del> # * Firebird: <tt>:precision</tt> [1..18], <tt>:scale</tt> [0..18].
<del> # Default (9,0). Internal types NUMERIC and DECIMAL have different
<del> # storage rules, decimal being better.
<del> # * FrontBase?: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
<del> # Default (38,0). WARNING Max <tt>:precision</tt>/<tt>:scale</tt> for
<del> # NUMERIC is 19, and DECIMAL is 38.
<ide> # * SqlServer?: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
<ide> # Default (38,0).
<del> # * Sybase: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
<del> # Default (38,0).
<del> # * OpenBase?: Documentation unclear. Claims storage in <tt>double</tt>.
<ide> #
<ide> # This method returns <tt>self</tt>.
<ide> #
<ide><path>activerecord/lib/active_record/migration.rb
<ide> def initialize
<ide> # == Database support
<ide> #
<ide> # Migrations are currently supported in MySQL, PostgreSQL, SQLite,
<del> # SQL Server, Sybase, and Oracle (all supported databases except DB2).
<add> # SQL Server, and Oracle (all supported databases except DB2).
<ide> #
<ide> # == More examples
<ide> #
<ide><path>activerecord/lib/active_record/model_schema.rb
<ide> def reset_sequence_name #:nodoc:
<ide> # given block. This is required for Oracle and is useful for any
<ide> # database which relies on sequences for primary key generation.
<ide> #
<del> # If a sequence name is not explicitly set when using Oracle or Firebird,
<add> # If a sequence name is not explicitly set when using Oracle,
<ide> # it will default to the commonly used pattern of: #{table_name}_seq
<ide> #
<ide> # If a sequence name is not explicitly set when using PostgreSQL, it
<ide><path>activerecord/test/cases/adapter_test.rb
<ide> def test_indexes
<ide> @connection.add_index :accounts, :firm_id, :name => idx_name
<ide> indexes = @connection.indexes("accounts")
<ide> assert_equal "accounts", indexes.first.table
<del> # OpenBase does not have the concept of a named index
<del> # Indexes are merely properties of columns.
<del> assert_equal idx_name, indexes.first.name unless current_adapter?(:OpenBaseAdapter)
<add> assert_equal idx_name, indexes.first.name
<ide> assert !indexes.first.unique
<ide> assert_equal ["firm_id"], indexes.first.columns
<ide> else
<ide> def test_reset_empty_table_with_custom_pk
<ide> assert_equal 1, Movie.create(:name => 'fight club').id
<ide> end
<ide>
<del> if ActiveRecord::Base.connection.adapter_name != "FrontBase"
<del> def test_reset_table_with_non_integer_pk
<del> Subscriber.delete_all
<del> Subscriber.connection.reset_pk_sequence! 'subscribers'
<del> sub = Subscriber.new(:name => 'robert drake')
<del> sub.id = 'bob drake'
<del> assert_nothing_raised { sub.save! }
<del> end
<add> def test_reset_table_with_non_integer_pk
<add> Subscriber.delete_all
<add> Subscriber.connection.reset_pk_sequence! 'subscribers'
<add> sub = Subscriber.new(:name => 'robert drake')
<add> sub.id = 'bob drake'
<add> assert_nothing_raised { sub.save! }
<ide> end
<ide> end
<ide>
<ide> def test_uniqueness_violations_are_translated_to_specific_exception
<ide> @connection.execute "INSERT INTO subscribers(nick) VALUES('me')"
<ide> end
<ide> end
<del>
<add>
<ide> unless current_adapter?(:SQLite3Adapter)
<ide> def test_foreign_key_violations_are_translated_to_specific_exception
<ide> assert_raises(ActiveRecord::InvalidForeignKey) do
<ide> def test_foreign_key_violations_are_translated_to_specific_exception
<ide> end
<ide> end
<ide> end
<del>
<add>
<ide> def test_foreign_key_violations_are_translated_to_specific_exception_with_validate_false
<ide> klass_has_fk = Class.new(ActiveRecord::Base) do
<ide> self.table_name = 'fk_test_has_fk'
<ide><path>activerecord/test/cases/associations/eager_test.rb
<ide> def test_eager_with_has_many_and_limit
<ide> end
<ide>
<ide> def test_eager_with_has_many_and_limit_and_conditions
<del> if current_adapter?(:OpenBaseAdapter)
<del> posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => "FETCHBLOB(posts.body) = 'hello'", :order => "posts.id").to_a
<del> else
<del> posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => "posts.body = 'hello'", :order => "posts.id").to_a
<del> end
<add> posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => "posts.body = 'hello'", :order => "posts.id").to_a
<ide> assert_equal 2, posts.size
<ide> assert_equal [4,5], posts.collect { |p| p.id }
<ide> end
<ide>
<ide> def test_eager_with_has_many_and_limit_and_conditions_array
<del> if current_adapter?(:OpenBaseAdapter)
<del> posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => [ "FETCHBLOB(posts.body) = ?", 'hello' ], :order => "posts.id").to_a
<del> else
<del> posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => [ "posts.body = ?", 'hello' ], :order => "posts.id").to_a
<del> end
<add> posts = Post.all.merge!(:includes => [ :author, :comments ], :limit => 2, :where => [ "posts.body = ?", 'hello' ], :order => "posts.id").to_a
<ide> assert_equal 2, posts.size
<ide> assert_equal [4,5], posts.collect { |p| p.id }
<ide> end
<ide> def test_preconfigured_includes_with_has_many_and_habtm
<ide> end
<ide>
<ide> def test_count_with_include
<del> if current_adapter?(:SybaseAdapter)
<del> assert_equal 3, authors(:david).posts_with_comments.where("len(comments.body) > 15").references(:comments).count
<del> elsif current_adapter?(:OpenBaseAdapter)
<del> assert_equal 3, authors(:david).posts_with_comments.where("length(FETCHBLOB(comments.body)) > 15").references(:comments).count
<del> else
<del> assert_equal 3, authors(:david).posts_with_comments.where("length(comments.body) > 15").references(:comments).count
<del> end
<add> assert_equal 3, authors(:david).posts_with_comments.where("length(comments.body) > 15").references(:comments).count
<ide> end
<ide>
<ide> def test_load_with_sti_sharing_association
<ide><path>activerecord/test/cases/base_test.rb
<ide> def test_table_exists
<ide> end
<ide>
<ide> def test_preserving_date_objects
<del> if current_adapter?(:SybaseAdapter)
<del> # Sybase ctlib does not (yet?) support the date type; use datetime instead.
<del> assert_kind_of(
<del> Time, Topic.find(1).last_read,
<del> "The last_read attribute should be of the Time class"
<del> )
<del> else
<del> # Oracle enhanced adapter allows to define Date attributes in model class (see topic.rb)
<del> assert_kind_of(
<del> Date, Topic.find(1).last_read,
<del> "The last_read attribute should be of the Date class"
<del> )
<del> end
<add> # Oracle enhanced adapter allows to define Date attributes in model class (see topic.rb)
<add> assert_kind_of(
<add> Date, Topic.find(1).last_read,
<add> "The last_read attribute should be of the Date class"
<add> )
<ide> end
<ide>
<ide> def test_previously_changed
<ide> def test_default_values
<ide> end
<ide> end
<ide>
<del> # Oracle, and Sybase do not have a TIME datatype.
<del> unless current_adapter?(:OracleAdapter, :SybaseAdapter)
<add> # Oracle does not have a TIME datatype.
<add> unless current_adapter?(:OracleAdapter)
<ide> def test_utc_as_time_zone
<ide> with_timezone_config default: :utc do
<ide> attributes = { "bonus_time" => "5:42:00AM" }
<ide> def test_default_values_on_empty_strings
<ide> topic = Topic.find(topic.id)
<ide> assert_nil topic.last_read
<ide>
<del> # Sybase adapter does not allow nulls in boolean columns
<del> if current_adapter?(:SybaseAdapter)
<del> assert topic.approved == false
<del> else
<del> assert_nil topic.approved
<del> end
<add> assert_nil topic.approved
<ide> end
<ide>
<ide> def test_equality
<ide> def test_group_weirds_by_from
<ide> end
<ide>
<ide> def test_attributes_on_dummy_time
<del> # Oracle, and Sybase do not have a TIME datatype.
<del> return true if current_adapter?(:OracleAdapter, :SybaseAdapter)
<add> # Oracle does not have a TIME datatype.
<add> return true if current_adapter?(:OracleAdapter)
<ide>
<ide> with_timezone_config default: :local do
<ide> attributes = {
<ide> def test_attributes_on_dummy_time
<ide> end
<ide>
<ide> def test_attributes_on_dummy_time_with_invalid_time
<del> # Oracle, and Sybase do not have a TIME datatype.
<del> return true if current_adapter?(:OracleAdapter, :SybaseAdapter)
<add> # Oracle does not have a TIME datatype.
<add> return true if current_adapter?(:OracleAdapter)
<ide>
<ide> attributes = {
<ide> "bonus_time" => "not a time"
<ide><path>activerecord/test/cases/binary_test.rb
<ide> require "cases/helper"
<ide>
<ide> # Without using prepared statements, it makes no sense to test
<del># BLOB data with DB2 or Firebird, because the length of a statement
<add># BLOB data with DB2, because the length of a statement
<ide> # is limited to 32KB.
<del>unless current_adapter?(:SybaseAdapter, :DB2Adapter, :FirebirdAdapter)
<add>unless current_adapter?(:DB2Adapter)
<ide> require 'models/binary'
<ide>
<ide> class BinaryTest < ActiveRecord::TestCase
<ide><path>activerecord/test/cases/defaults_test.rb
<ide> def test_nil_defaults_for_not_null_columns
<ide> end
<ide> end
<ide>
<del> if current_adapter?(:PostgreSQLAdapter, :FirebirdAdapter, :OpenBaseAdapter, :OracleAdapter)
<add> if current_adapter?(:PostgreSQLAdapter, :OracleAdapter)
<ide> def test_default_integers
<ide> default = Default.new
<ide> assert_instance_of Fixnum, default.positive_integer
<ide><path>activerecord/test/cases/inheritance_test.rb
<ide> def test_base_class_activerecord_error
<ide> end
<ide>
<ide> def test_a_bad_type_column
<del> #SQLServer need to turn Identity Insert On before manually inserting into the Identity column
<del> if current_adapter?(:SybaseAdapter)
<del> Company.connection.execute "SET IDENTITY_INSERT companies ON"
<del> end
<ide> Company.connection.insert "INSERT INTO companies (id, #{QUOTED_TYPE}, name) VALUES(100, 'bad_class!', 'Not happening')"
<ide>
<del> #We then need to turn it back Off before continuing.
<del> if current_adapter?(:SybaseAdapter)
<del> Company.connection.execute "SET IDENTITY_INSERT companies OFF"
<del> end
<ide> assert_raise(ActiveRecord::SubclassNotFound) { Company.find(100) }
<ide> end
<ide>
<ide><path>activerecord/test/cases/locking_test.rb
<ide> def test_destroy_dependents
<ide> def add_counter_column_to(model, col='test_count')
<ide> model.connection.add_column model.table_name, col, :integer, :null => false, :default => 0
<ide> model.reset_column_information
<del> # OpenBase does not set a value to existing rows when adding a not null default column
<del> model.update_all(col => 0) if current_adapter?(:OpenBaseAdapter)
<ide> end
<ide>
<ide> def remove_counter_column_from(model, col = :test_count)
<ide> def counter_test(model, expected_count)
<ide> # is so cumbersome. Will deadlock Ruby threads if the underlying db.execute
<ide> # blocks, so separate script called by Kernel#system is needed.
<ide> # (See exec vs. async_exec in the PostgreSQL adapter.)
<del>unless current_adapter?(:SybaseAdapter, :OpenBaseAdapter) || in_memory_db?
<add>unless in_memory_db?
<ide> class PessimisticLockingTest < ActiveRecord::TestCase
<ide> self.use_transactional_fixtures = false
<ide> fixtures :people, :readers
<ide><path>activerecord/test/cases/migration/change_schema_test.rb
<ide> def test_create_table_without_a_block
<ide> connection.create_table table_name
<ide> end
<ide>
<del> # Sybase, and SQLite3 will not allow you to add a NOT NULL
<add> # SQLite3 will not allow you to add a NOT NULL
<ide> # column to a table without a default value.
<del> unless current_adapter?(:SybaseAdapter, :SQLite3Adapter)
<add> unless current_adapter?(:SQLite3Adapter)
<ide> def test_add_column_not_null_without_default
<ide> connection.create_table :testings do |t|
<ide> t.column :foo, :string
<ide> def test_add_column_not_null_with_default
<ide> end
<ide>
<ide> con = connection
<del> connection.enable_identity_insert("testings", true) if current_adapter?(:SybaseAdapter)
<ide> connection.execute "insert into testings (#{con.quote_column_name('id')}, #{con.quote_column_name('foo')}) values (1, 'hello')"
<del> connection.enable_identity_insert("testings", false) if current_adapter?(:SybaseAdapter)
<ide> assert_nothing_raised {connection.add_column :testings, :bar, :string, :null => false, :default => "default" }
<ide>
<ide> assert_raises(ActiveRecord::StatementInvalid) do
<del> unless current_adapter?(:OpenBaseAdapter)
<del> connection.execute "insert into testings (#{con.quote_column_name('id')}, #{con.quote_column_name('foo')}, #{con.quote_column_name('bar')}) values (2, 'hello', NULL)"
<del> else
<del> connection.insert("INSERT INTO testings (#{con.quote_column_name('id')}, #{con.quote_column_name('foo')}, #{con.quote_column_name('bar')}) VALUES (2, 'hello', NULL)",
<del> "Testing Insert","id",2)
<del> end
<add> connection.execute "insert into testings (#{con.quote_column_name('id')}, #{con.quote_column_name('foo')}, #{con.quote_column_name('bar')}) values (2, 'hello', NULL)"
<ide> end
<ide> end
<ide>
<ide><path>activerecord/test/cases/migration/column_attributes_test.rb
<ide> def test_native_decimal_insert_manual_vs_automatic
<ide> # Do a manual insertion
<ide> if current_adapter?(:OracleAdapter)
<ide> connection.execute "insert into test_models (id, wealth) values (people_seq.nextval, 12345678901234567890.0123456789)"
<del> elsif current_adapter?(:OpenBaseAdapter) || (current_adapter?(:MysqlAdapter) && Mysql.client_version < 50003) #before mysql 5.0.3 decimals stored as strings
<add> elsif current_adapter?(:MysqlAdapter) && Mysql.client_version < 50003 #before mysql 5.0.3 decimals stored as strings
<ide> connection.execute "insert into test_models (wealth) values ('12345678901234567890.0123456789')"
<ide> elsif current_adapter?(:PostgreSQLAdapter)
<ide> connection.execute "insert into test_models (wealth) values (12345678901234567890.0123456789)"
<ide> def test_native_types
<ide> assert_equal Fixnum, bob.age.class
<ide> assert_equal Time, bob.birthday.class
<ide>
<del> if current_adapter?(:OracleAdapter, :SybaseAdapter)
<del> # Sybase, and Oracle don't differentiate between date/time
<add> if current_adapter?(:OracleAdapter)
<add> # Oracle doesn't differentiate between date/time
<ide> assert_equal Time, bob.favorite_day.class
<ide> else
<ide> assert_equal Date, bob.favorite_day.class
<ide><path>activerecord/test/cases/migration/index_test.rb
<ide> def setup
<ide> ActiveRecord::Base.primary_key_prefix_type = nil
<ide> end
<ide>
<del> unless current_adapter?(:OpenBaseAdapter)
<del> def test_rename_index
<del> # keep the names short to make Oracle and similar behave
<del> connection.add_index(table_name, [:foo], :name => 'old_idx')
<del> connection.rename_index(table_name, 'old_idx', 'new_idx')
<del>
<del> # if the adapter doesn't support the indexes call, pick defaults that let the test pass
<del> assert_not connection.index_name_exists?(table_name, 'old_idx', false)
<del> assert connection.index_name_exists?(table_name, 'new_idx', true)
<del> end
<add> def test_rename_index
<add> # keep the names short to make Oracle and similar behave
<add> connection.add_index(table_name, [:foo], :name => 'old_idx')
<add> connection.rename_index(table_name, 'old_idx', 'new_idx')
<add>
<add> # if the adapter doesn't support the indexes call, pick defaults that let the test pass
<add> assert_not connection.index_name_exists?(table_name, 'old_idx', false)
<add> assert connection.index_name_exists?(table_name, 'new_idx', true)
<add> end
<ide>
<del> def test_double_add_index
<add> def test_double_add_index
<add> connection.add_index(table_name, [:foo], :name => 'some_idx')
<add> assert_raises(ArgumentError) {
<ide> connection.add_index(table_name, [:foo], :name => 'some_idx')
<del> assert_raises(ArgumentError) {
<del> connection.add_index(table_name, [:foo], :name => 'some_idx')
<del> }
<del> end
<add> }
<add> end
<ide>
<del> def test_remove_nonexistent_index
<del> # we do this by name, so OpenBase is a wash as noted above
<del> assert_raise(ArgumentError) { connection.remove_index(table_name, "no_such_index") }
<del> end
<add> def test_remove_nonexistent_index
<add> assert_raise(ArgumentError) { connection.remove_index(table_name, "no_such_index") }
<ide> end
<ide>
<ide> def test_add_index_works_with_long_index_names
<ide> def test_add_index
<ide> connection.add_index("testings", "last_name")
<ide> connection.remove_index("testings", "last_name")
<ide>
<del> # Orcl nds shrt indx nms. Sybs 2.
<del> # OpenBase does not have named indexes. You must specify a single column name
<del> unless current_adapter?(:SybaseAdapter, :OpenBaseAdapter)
<add> connection.add_index("testings", ["last_name", "first_name"])
<add> connection.remove_index("testings", :column => ["last_name", "first_name"])
<add>
<add> # Oracle adapter cannot have specified index name larger than 30 characters
<add> # Oracle adapter is shortening index name when just column list is given
<add> unless current_adapter?(:OracleAdapter)
<ide> connection.add_index("testings", ["last_name", "first_name"])
<del> connection.remove_index("testings", :column => ["last_name", "first_name"])
<del>
<del> # Oracle adapter cannot have specified index name larger than 30 characters
<del> # Oracle adapter is shortening index name when just column list is given
<del> unless current_adapter?(:OracleAdapter)
<del> connection.add_index("testings", ["last_name", "first_name"])
<del> connection.remove_index("testings", :name => :index_testings_on_last_name_and_first_name)
<del> connection.add_index("testings", ["last_name", "first_name"])
<del> connection.remove_index("testings", "last_name_and_first_name")
<del> end
<add> connection.remove_index("testings", :name => :index_testings_on_last_name_and_first_name)
<ide> connection.add_index("testings", ["last_name", "first_name"])
<del> connection.remove_index("testings", ["last_name", "first_name"])
<add> connection.remove_index("testings", "last_name_and_first_name")
<add> end
<add> connection.add_index("testings", ["last_name", "first_name"])
<add> connection.remove_index("testings", ["last_name", "first_name"])
<ide>
<del> connection.add_index("testings", ["last_name"], :length => 10)
<del> connection.remove_index("testings", "last_name")
<add> connection.add_index("testings", ["last_name"], :length => 10)
<add> connection.remove_index("testings", "last_name")
<ide>
<del> connection.add_index("testings", ["last_name"], :length => {:last_name => 10})
<del> connection.remove_index("testings", ["last_name"])
<add> connection.add_index("testings", ["last_name"], :length => {:last_name => 10})
<add> connection.remove_index("testings", ["last_name"])
<ide>
<del> connection.add_index("testings", ["last_name", "first_name"], :length => 10)
<del> connection.remove_index("testings", ["last_name", "first_name"])
<add> connection.add_index("testings", ["last_name", "first_name"], :length => 10)
<add> connection.remove_index("testings", ["last_name", "first_name"])
<ide>
<del> connection.add_index("testings", ["last_name", "first_name"], :length => {:last_name => 10, :first_name => 20})
<del> connection.remove_index("testings", ["last_name", "first_name"])
<del> end
<add> connection.add_index("testings", ["last_name", "first_name"], :length => {:last_name => 10, :first_name => 20})
<add> connection.remove_index("testings", ["last_name", "first_name"])
<ide>
<del> # quoting
<del> # Note: changed index name from "key" to "key_idx" since "key" is a Firebird reserved word
<del> # OpenBase does not have named indexes. You must specify a single column name
<del> unless current_adapter?(:OpenBaseAdapter)
<del> connection.add_index("testings", ["key"], :name => "key_idx", :unique => true)
<del> connection.remove_index("testings", :name => "key_idx", :unique => true)
<del> end
<add> connection.add_index("testings", ["key"], :name => "key_idx", :unique => true)
<add> connection.remove_index("testings", :name => "key_idx", :unique => true)
<ide>
<del> # Sybase adapter does not support indexes on :boolean columns
<del> # OpenBase does not have named indexes. You must specify a single column
<del> unless current_adapter?(:SybaseAdapter, :OpenBaseAdapter)
<del> connection.add_index("testings", %w(last_name first_name administrator), :name => "named_admin")
<del> connection.remove_index("testings", :name => "named_admin")
<del> end
<add> connection.add_index("testings", %w(last_name first_name administrator), :name => "named_admin")
<add> connection.remove_index("testings", :name => "named_admin")
<ide>
<ide> # Selected adapters support index sort order
<ide> if current_adapter?(:SQLite3Adapter, :MysqlAdapter, :Mysql2Adapter, :PostgreSQLAdapter)
<ide><path>activerecord/test/cases/migration/rename_table_test.rb
<ide> def test_rename_table_for_sqlite_should_work_with_reserved_words
<ide> def test_rename_table
<ide> rename_table :test_models, :octopi
<ide>
<del> # Using explicit id in insert for compatibility across all databases
<del> connection.enable_identity_insert("octopi", true) if current_adapter?(:SybaseAdapter)
<del>
<ide> connection.execute "INSERT INTO octopi (#{connection.quote_column_name('id')}, #{connection.quote_column_name('url')}) VALUES (1, 'http://www.foreverflying.com/octopus-black7.jpg')"
<ide>
<del> connection.enable_identity_insert("octopi", false) if current_adapter?(:SybaseAdapter)
<del>
<ide> assert_equal 'http://www.foreverflying.com/octopus-black7.jpg', connection.select_value("SELECT url FROM octopi WHERE id=1")
<ide> end
<ide>
<ide> def test_rename_table_with_an_index
<ide>
<ide> rename_table :test_models, :octopi
<ide>
<del> # Using explicit id in insert for compatibility across all databases
<del> connection.enable_identity_insert("octopi", true) if current_adapter?(:SybaseAdapter)
<ide> connection.execute "INSERT INTO octopi (#{connection.quote_column_name('id')}, #{connection.quote_column_name('url')}) VALUES (1, 'http://www.foreverflying.com/octopus-black7.jpg')"
<del> connection.enable_identity_insert("octopi", false) if current_adapter?(:SybaseAdapter)
<ide>
<ide> assert_equal 'http://www.foreverflying.com/octopus-black7.jpg', connection.select_value("SELECT url FROM octopi WHERE id=1")
<ide> index = connection.indexes(:octopi).first
<ide><path>activerecord/test/cases/multiparameter_attributes_test.rb
<ide> def test_multiparameter_attributes_on_time_with_skip_time_zone_conversion_for_at
<ide> Topic.skip_time_zone_conversion_for_attributes = []
<ide> end
<ide>
<del> # Oracle, and Sybase do not have a TIME datatype.
<del> unless current_adapter?(:OracleAdapter, :SybaseAdapter)
<add> # Oracle does not have a TIME datatype.
<add> unless current_adapter?(:OracleAdapter)
<ide> def test_multiparameter_attributes_on_time_only_column_with_time_zone_aware_attributes_does_not_do_time_zone_conversion
<ide> with_timezone_config default: :utc, aware_attributes: true, zone: -28800 do
<ide> attributes = {
<ide><path>activerecord/test/cases/pooled_connections_test.rb
<ide> def test_pooled_connection_checkin_one
<ide> def add_record(name)
<ide> ActiveRecord::Base.connection_pool.with_connection { Project.create! :name => name }
<ide> end
<del>end unless current_adapter?(:FrontBase) || in_memory_db?
<add>end unless in_memory_db?
<ide><path>activerecord/test/cases/test_case.rb
<ide> def teardown
<ide> end
<ide>
<ide> def assert_date_from_db(expected, actual, message = nil)
<del> # SybaseAdapter doesn't have a separate column type just for dates,
<del> # so the time is in the string and incorrectly formatted
<del> if current_adapter?(:SybaseAdapter)
<del> assert_equal expected.to_s, actual.to_date.to_s, message
<del> else
<del> assert_equal expected.to_s, actual.to_s, message
<del> end
<add> assert_equal expected.to_s, actual.to_s, message
<ide> end
<ide>
<ide> def capture_sql
<ide><path>activerecord/test/cases/xml_serialization_test.rb
<ide> def test_to_xml
<ide> xml = REXML::Document.new(topics(:first).to_xml(:indent => 0))
<ide> bonus_time_in_current_timezone = topics(:first).bonus_time.xmlschema
<ide> written_on_in_current_timezone = topics(:first).written_on.xmlschema
<del> last_read_in_current_timezone = topics(:first).last_read.xmlschema
<ide>
<ide> assert_equal "topic", xml.root.name
<ide> assert_equal "The First Topic" , xml.elements["//title"].text
<ide> def test_to_xml
<ide> assert_equal "integer", xml.elements["//parent-id"].attributes['type']
<ide> assert_equal "true", xml.elements["//parent-id"].attributes['nil']
<ide>
<del> if current_adapter?(:SybaseAdapter)
<del> assert_equal last_read_in_current_timezone, xml.elements["//last-read"].text
<del> assert_equal "dateTime" , xml.elements["//last-read"].attributes['type']
<del> else
<del> # Oracle enhanced adapter allows to define Date attributes in model class (see topic.rb)
<del> assert_equal "2004-04-15", xml.elements["//last-read"].text
<del> assert_equal "date" , xml.elements["//last-read"].attributes['type']
<del> end
<add> # Oracle enhanced adapter allows to define Date attributes in model class (see topic.rb)
<add> assert_equal "2004-04-15", xml.elements["//last-read"].text
<add> assert_equal "date" , xml.elements["//last-read"].attributes['type']
<ide>
<ide> # Oracle and DB2 don't have true boolean or time-only fields
<ide> unless current_adapter?(:OracleAdapter, :DB2Adapter)
<ide><path>activerecord/test/schema/schema.rb
<ide> def except(adapter_names_to_exclude)
<ide>
<ide> #put adapter specific setup here
<ide> case adapter_name
<del> # For Firebird, set the sequence values 10000 when create_table is called;
<del> # this prevents primary key collisions between "normally" created records
<del> # and fixture-based (YAML) records.
<del> when "Firebird"
<del> def create_table(*args, &block)
<del> ActiveRecord::Base.connection.create_table(*args, &block)
<del> ActiveRecord::Base.connection.execute "SET GENERATOR #{args.first}_seq TO 10000"
<del> end
<ide> when "PostgreSQL"
<ide> enable_uuid_ossp!(ActiveRecord::Base.connection)
<ide> create_table :uuid_parents, id: :uuid, force: true do |t| | 19 |
Text | Text | add growthsimple to list of airflow users | bbdbe81e86acc36593cdad08eea05db444096a23 | <ide><path>README.md
<ide> Currently **officially** using Airflow:
<ide> 1. [Groupalia](http://es.groupalia.com) [[@jesusfcr](https://github.com/jesusfcr)]
<ide> 1. [Groupon](https://groupon.com) [[@stevencasey](https://github.com/stevencasey)]
<ide> 1. [Growbots](https://www.growbots.com/)[[@exploy](https://github.com/exploy)]
<add>1. [GrowthSimple](https://growthsimple.ai/)
<ide> 1. [GSN Games](https://www.gsngames.com)
<ide> 1. [Gusto](https://gusto.com) [[@frankhsu](https://github.com/frankhsu)]
<ide> 1. [Handshake](https://joinhandshake.com/) [[@mhickman](https://github.com/mhickman)] | 1 |
Python | Python | add additional assert | a3f5297e82f8ea4d3a723c2d1996a1392e065445 | <ide><path>integration/storage/base.py
<ide> def _test_objects(self, do_upload, do_download, size=1 * MB):
<ide> self.assertEqual([blob.name for blob in blobs], [blob_name])
<ide>
<ide> # check that the file can be read back
<del> self.assertEqual(do_download(obj), content)
<add> downloaded_content = do_download(obj)
<add> self.assertEqual(len(downloaded_content), size)
<add> self.assertEqual(downloaded_content, content)
<ide>
<ide> # delete the file
<ide> self.driver.delete_object(obj) | 1 |
Ruby | Ruby | exclude ruby from unbrewed | 319ea4273e61944b41f033194496cdf6df9fe288 | <ide><path>Library/Homebrew/cmd/list.rb
<ide> def list
<ide> lib/python[23].[0-9]/*
<ide> lib/pypy/*
<ide> lib/pypy3/*
<add> lib/ruby/gems/[12].*
<add> lib/ruby/site_ruby/[12].*
<add> lib/ruby/vendor_ruby/[12].*
<ide> share/pypy/*
<ide> share/pypy3/*
<ide> share/doc/homebrew/* | 1 |
Ruby | Ruby | simplify code in as json encoder | 046e27a7338f2961c10e9d133e0e2229b51c2ba8 | <ide><path>activesupport/lib/active_support/json/encoding.rb
<ide> module Encoding #:nodoc:
<ide> class CircularReferenceError < StandardError; end
<ide>
<ide> class Encoder
<del> attr_reader :options
<ide>
<ide> def initialize(options = nil)
<ide> @options = options || {}
<ide> def options_for(value)
<ide> if value.is_a?(Array) || value.is_a?(Hash)
<ide> # hashes and arrays need to get encoder in the options, so that
<ide> # they can detect circular references.
<del> options.merge(:encoder => self)
<add> @options.merge(:encoder => self)
<ide> else
<del> options.dup
<add> @options.dup
<ide> end
<ide> end
<ide>
<ide> def as_json(options = nil) #:nodoc:
<ide> end
<ide>
<ide> module Enumerable
<del> def as_json(options = nil) #:nodoc:
<del> to_a.as_json(options)
<del> end
<del>end
<del>
<del>class Range
<del> def as_json(options = nil) #:nodoc:
<del> to_s
<del> end
<del>end
<del>
<del>class Array
<ide> def as_json(options = nil) #:nodoc:
<ide> # use encoder as a proxy to call as_json on all elements, to protect from circular references
<ide> encoder = options && options[:encoder] || ActiveSupport::JSON::Encoding::Encoder.new(options)
<ide> def encode_json(encoder) #:nodoc:
<ide> end
<ide> end
<ide>
<add>class Range
<add> def as_json(options = nil) #:nodoc:
<add> to_s
<add> end
<add>end
<add>
<ide> class Hash
<ide> def as_json(options = nil) #:nodoc:
<ide> # create a subset of the hash by applying :only or :except | 1 |
Python | Python | update tensorboard callback | b602a93e17964cfcedda09eec90f140665246431 | <ide><path>keras/backend/tensorflow_backend.py
<ide> def permute_dimensions(x, pattern):
<ide> '''
<ide> return tf.transpose(x, perm=pattern)
<ide>
<add>
<ide> def repeat_elements(x, rep, axis):
<ide> '''Repeats the elements of a tensor along an axis, like np.repeat
<ide>
<ide><path>keras/callbacks.py
<ide>
<ide> from collections import deque
<ide> from .utils.generic_utils import Progbar
<del>from .backend import _BACKEND
<ide> from keras import backend as K
<ide>
<ide>
<ide> class TensorBoard(Callback):
<ide> https://www.tensorflow.org/versions/master/how_tos/summaries_and_tensorboard/index.html
<ide>
<ide> # Arguments
<del> model: a keras model linked to a tensorflow session
<del> feed: a dictionnary mapping tensors (inputs, outputs, weigths)
<del> from the model._test keras function i.e. model._test.inputs
<del> to the corresponding arrays.
<del> freq: the frequency at which the callback will output
<del> parameters and metrics to the log
<ide> log_dir: the path of the directory where to save the log
<ide> files to be parsed by tensorboard
<add> histogram_freq: frequency (in epochs) at which to compute activation
<add> histograms for the layers of the model. If set to 0,
<add> histograms won't be computed.
<ide> '''
<del> def __init__(self, model, feed, freq=2, log_dir='./logs',
<del> show_accuracy=False):
<add> def __init__(self, log_dir='./logs', histogram_freq=0):
<ide> super(Callback, self).__init__()
<del> assert _BACKEND == 'tensorflow', \
<del> 'TensorBoard callback only works with the tensorflow backend'
<add> if K._BACKEND != 'tensorflow':
<add> raise Exception('TensorBoard callback only works '
<add> 'with the TensorFlow backend')
<add> self.log_dir = log_dir
<add> self.histogram_freq = histogram_freq
<add>
<add> def _set_model(self, model):
<ide> import tensorflow as tf
<ide> import keras.backend.tensorflow_backend as KTF
<ide>
<ide> self.model = model
<del> self.freq = freq
<del> self.log_dir = log_dir
<ide> self.sess = KTF._get_session()
<del> self.feed = feed
<del> mod_type = self.model.get_config()['name']
<del> if mod_type == 'Sequential':
<del> layers = {l.get_config()['name']: l for l in self.model.layers}
<del> elif mod_type == 'Graph':
<del> layers = self.model.nodes
<del> else:
<del> raise Exception('Unrecognized model:',
<del> self.model.get_config()['name'])
<del> for l in layers:
<del> cur_layer = layers[l]
<del> if hasattr(cur_layer, 'W'):
<del> tf.histogram_summary('{}_W'.format(l), cur_layer.W)
<del> if hasattr(cur_layer, 'b'):
<del> tf.histogram_summary('{}_b'.format(l), cur_layer.b)
<del> if hasattr(cur_layer, 'get_output'):
<del> tf.histogram_summary('{}_out'.format(l),
<del> cur_layer.get_output())
<del> f_output = self.model._test
<del> if mod_type == 'Sequential':
<del> if show_accuracy is True:
<del> f_output = self.model._test_with_acc
<del> tf.scalar_summary('Accuracy',
<del> f_output.outputs[1])
<del> tf.scalar_summary('Loss',
<del> f_output.outputs[0])
<del> else:
<del> losses = [self.model.loss[loss] for loss in self.model.loss]
<del> if len(losses) > 1:
<del> l_name = " + ".join(losses)
<add> if self.histogram_freq:
<add> mod_type = self.model.get_config()['name']
<add> if mod_type == 'Sequential':
<add> layers = {l.get_config()['name']: l for l in self.model.layers}
<add> elif mod_type == 'Graph':
<add> layers = self.model.nodes
<ide> else:
<del> l_name = losses[0]
<del> tf.scalar_summary(l_name,
<del> f_output.outputs[0])
<add> raise Exception('Unrecognized model:',
<add> self.model.get_config()['name'])
<add> for l in layers:
<add> cur_layer = layers[l]
<add> if hasattr(cur_layer, 'W'):
<add> tf.histogram_summary('{}_W'.format(l), cur_layer.W)
<add> if hasattr(cur_layer, 'b'):
<add> tf.histogram_summary('{}_b'.format(l), cur_layer.b)
<add> if hasattr(cur_layer, 'get_output'):
<add> tf.histogram_summary('{}_out'.format(l),
<add> cur_layer.get_output())
<ide> self.merged = tf.merge_all_summaries()
<ide> self.writer = tf.train.SummaryWriter(self.log_dir,
<ide> self.sess.graph_def)
<ide>
<add> def on_epoch_begin(self, epoch, logs={}):
<add> self.seen = 0
<add> self.totals = {}
<add>
<add> def on_batch_end(self, batch, logs={}):
<add> batch_size = logs.get('size', 0)
<add> self.seen += batch_size
<add> for k, v in logs.items():
<add> if k in self.totals:
<add> self.totals[k] += v * batch_size
<add> else:
<add> self.totals[k] = v * batch_size
<add>
<ide> def on_epoch_end(self, epoch, logs={}):
<del> if epoch % self.freq == 0:
<del> result = self.sess.run([self.merged],
<del> feed_dict=self.feed)
<del> summary_str = result[0]
<del> self.writer.add_summary(summary_str, epoch)
<add> import tensorflow as tf
<add>
<add> if self.model.validation_data and self.histogram_freq:
<add> if epoch % self.histogram_freq == 0:
<add> if self.params.get('show_accuracy'):
<add> test_function = self.model._test_with_acc
<add> else:
<add> test_function = self.model._test
<add> names = [v.name for v in test_function.inputs]
<add> feed_dict = dict(zip(names, self.model.validation_data))
<add> result = self.sess.run([self.merged], feed_dict=feed_dict)
<add> summary_str = result[0]
<add> self.writer.add_summary(summary_str, epoch)
<add>
<add> for name, value in self.totals.items() + logs.items():
<add> if name in ['batch', 'size']:
<add> continue
<add> summary = tf.Summary()
<add> summary_value = summary.value.add()
<add> summary_value.simple_value = value
<add> summary_value.tag = name
<add> self.writer.add_summary(summary, epoch)
<add> self.writer.flush()
<ide><path>keras/models.py
<ide> def _fit(self, f, ins, out_labels=[], batch_size=128,
<ide> Abstract fit function for f(ins).
<ide> Assume that f returns a list, labelled by out_labels.
<ide> '''
<add> self.training_data = ins
<add> self.validation_data = val_ins
<ide> do_validation = False
<ide> if val_f and val_ins:
<ide> do_validation = True
<ide><path>tests/keras/test_callbacks.py
<ide> from keras.utils.test_utils import get_test_data
<ide> from keras import backend as K
<ide> from keras.utils import np_utils
<del>from keras.callbacks import _BACKEND
<ide>
<ide> input_dim = 2
<ide> nb_hidden = 4
<ide> def test_LearningRateScheduler():
<ide> assert (float(K.get_value(model.optimizer.lr)) - 0.2) < K.epsilon()
<ide>
<ide>
<del>@pytest.mark.skipif(_BACKEND != 'tensorflow',
<add>@pytest.mark.skipif(K._BACKEND != 'tensorflow',
<ide> reason="Requires tensorflow backend")
<ide> def test_TensorBoard():
<ide> import shutil
<ide> def test_TensorBoard():
<ide> model.add(Dense(nb_class, activation='softmax'))
<ide> model.compile(loss='categorical_crossentropy', optimizer='sgd')
<ide>
<del> feed = {model._test.inputs[0]: X_train, model._test.inputs[1]: y_train,
<del> model._test.inputs[2]: np.ones(train_samples)}
<del> tsb = callbacks.TensorBoard(model=model, feed=feed, log_dir=filepath,
<del> show_accuracy=False)
<add> tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
<ide> cbks = [tsb]
<ide> model.fit(X_train, y_train, batch_size=batch_size, show_accuracy=True,
<del> validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=5)
<add> validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=2)
<ide> assert os.path.exists(filepath)
<ide> shutil.rmtree(filepath)
<ide>
<ide> def test_TensorBoard():
<ide> model.add(Dense(nb_class, activation='softmax'))
<ide> model.compile(loss='categorical_crossentropy', optimizer='sgd')
<ide>
<del> feed = {model._test.inputs[0]: X_train, model._test.inputs[1]: y_train,
<del> model._test.inputs[2]: np.ones(train_samples)}
<del> tsb = callbacks.TensorBoard(model=model, feed=feed, log_dir=filepath,
<del> show_accuracy=False)
<add> tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
<ide> cbks = [tsb]
<ide> model.fit(X_train, y_train, batch_size=batch_size, show_accuracy=True,
<del> validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=5)
<add> validation_data=(X_test, y_test), callbacks=cbks, nb_epoch=2)
<ide> assert os.path.exists(filepath)
<ide> shutil.rmtree(filepath)
<ide>
<ide> def test_TensorBoard():
<ide> model.add_output(name='output', input='last_dense')
<ide> model.compile(optimizer='sgd', loss={'output': 'mse'})
<ide>
<del> feed = {model._test.inputs[0]: X_train, model._test.inputs[1]: y_train,
<del> model._test.inputs[2]: np.ones(train_samples)}
<del> tsb = callbacks.TensorBoard(model=model, feed=feed, log_dir=filepath,
<del> show_accuracy=False)
<add> tsb = callbacks.TensorBoard(log_dir=filepath, histogram_freq=1)
<ide> cbks = [tsb]
<del> model.fit({'X_vars': X_train, 'output': y_train}, batch_size=batch_size,
<add> model.fit({'X_vars': X_train, 'output': y_train},
<add> batch_size=batch_size,
<ide> validation_data={'X_vars': X_test, 'output': y_test},
<del> callbacks=cbks, nb_epoch=5)
<add> callbacks=cbks, nb_epoch=2)
<ide> assert os.path.exists(filepath)
<ide> shutil.rmtree(filepath)
<ide>
<ide> KTF._set_session(old_session)
<ide>
<ide> if __name__ == '__main__':
<ide> pytest.main([__file__])
<del> | 4 |
Javascript | Javascript | correct spelling error | 95522cc11f01528b569f88a089def9acded938b4 | <ide><path>src/Angular.js
<ide> function isArrayLike(obj) {
<ide> * is the value of an object property or an array element and `key` is the object property key or
<ide> * array element index. Specifying a `context` for the function is optional.
<ide> *
<del> * It is worth nothing that `.forEach` does not iterate over inherited properties because it filters
<add> * It is worth noting that `.forEach` does not iterate over inherited properties because it filters
<ide> * using the `hasOwnProperty` method.
<ide> *
<ide> <pre> | 1 |
Javascript | Javascript | simulate input event instead of relying on native | 580e8f0dbbbb7963c27fee42fd843c6d2a89e2ca | <ide><path>src/core/ReactDefaultInjection.js
<ide> var ReactDOMForm = require('ReactDOMForm');
<ide> var DefaultEventPluginOrder = require('DefaultEventPluginOrder');
<ide> var EnterLeaveEventPlugin = require('EnterLeaveEventPlugin');
<ide> var EventPluginHub = require('EventPluginHub');
<add>var InputEventPlugin = require('InputEventPlugin');
<ide> var ReactInstanceHandles = require('ReactInstanceHandles');
<ide> var SimpleEventPlugin = require('SimpleEventPlugin');
<ide>
<ide> function inject() {
<ide> */
<ide> EventPluginHub.injection.injectEventPluginsByName({
<ide> 'SimpleEventPlugin': SimpleEventPlugin,
<del> 'EnterLeaveEventPlugin': EnterLeaveEventPlugin
<add> 'EnterLeaveEventPlugin': EnterLeaveEventPlugin,
<add> 'InputEventPlugin': InputEventPlugin
<ide> });
<ide>
<ide> /*
<ide><path>src/core/ReactEvent.js
<ide> function listenAtTopLevel(touchNotMouse) {
<ide> trapBubbledEvent(topLevelTypes.topKeyUp, 'keyup', mountAt);
<ide> trapBubbledEvent(topLevelTypes.topKeyPress, 'keypress', mountAt);
<ide> trapBubbledEvent(topLevelTypes.topKeyDown, 'keydown', mountAt);
<del> trapBubbledEvent(topLevelTypes.topInput, 'input', mountAt);
<ide> trapBubbledEvent(topLevelTypes.topChange, 'change', mountAt);
<add> trapBubbledEvent(topLevelTypes.topInput, 'input', mountAt);
<add> trapBubbledEvent(topLevelTypes.topCut, 'cut', mountAt);
<add> trapBubbledEvent(topLevelTypes.topPaste, 'paste', mountAt);
<ide> trapBubbledEvent(
<ide> topLevelTypes.topDOMCharacterDataModified,
<ide> 'DOMCharacterDataModified',
<ide><path>src/event/EventConstants.js
<ide> var topLevelTypes = keyMirror({
<ide> topBlur: null,
<ide> topChange: null,
<ide> topClick: null,
<add> topCut: null,
<ide> topDOMCharacterDataModified: null,
<ide> topDoubleClick: null,
<ide> topFocus: null,
<ide> var topLevelTypes = keyMirror({
<ide> topMouseOver: null,
<ide> topMouseUp: null,
<ide> topMouseWheel: null,
<add> topPaste: null,
<ide> topScroll: null,
<ide> topSubmit: null,
<ide> topTouchCancel: null,
<ide><path>src/eventPlugins/DefaultEventPluginOrder.js
<ide> var DefaultEventPluginOrder = [
<ide> keyOf({SimpleEventPlugin: null}),
<ide> keyOf({TapEventPlugin: null}),
<ide> keyOf({EnterLeaveEventPlugin: null}),
<add> keyOf({InputEventPlugin: null}),
<ide> keyOf({AnalyticsEventPlugin: null})
<ide> ];
<ide>
<ide><path>src/eventPlugins/InputEventPlugin.js
<add>/**
<add> * Copyright 2013 Facebook, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> *
<add> * @providesModule InputEventPlugin
<add> */
<add>
<add>"use strict";
<add>
<add>var AbstractEvent = require('AbstractEvent');
<add>var EventConstants = require('EventConstants');
<add>var EventPluginUtils = require('EventPluginUtils');
<add>var EventPropagators = require('EventPropagators');
<add>
<add>var keyOf = require('keyOf');
<add>
<add>var topLevelTypes = EventConstants.topLevelTypes;
<add>
<add>var abstractEventTypes = {
<add> input: {
<add> phasedRegistrationNames: {
<add> bubbled: keyOf({onInput: null}),
<add> captured: keyOf({onInputCapture: null})
<add> }
<add> }
<add>};
<add>
<add>/**
<add> * @see EventPluginHub.extractAbstractEvents
<add> */
<add>var extractAbstractEvents = function(
<add> topLevelType,
<add> nativeEvent,
<add> renderedTargetID,
<add> renderedTarget) {
<add>
<add> var defer, key;
<add> switch (topLevelType) {
<add> case topLevelTypes.topInput:
<add> // When the native input event is triggered, we definitely want to
<add> // forward it along. However, IE9's input event doesn't get triggered
<add> // when deleting text, and IE8 doesn't support input at all, so we
<add> // simulate it on change, cut, paste, and keydown.
<add> case topLevelTypes.topChange:
<add> defer = false;
<add> break;
<add> case topLevelTypes.topCut:
<add> case topLevelTypes.topPaste:
<add> defer = true;
<add> break;
<add> case topLevelTypes.topKeyDown:
<add> key = nativeEvent.keyCode;
<add> // Ignore command, modifiers, and arrow keys, respectively
<add> if (key === 91 || (15 < key && key < 19) || (37 <= key && key <= 40)) {
<add> return;
<add> }
<add> defer = true;
<add> break;
<add> default:
<add> return;
<add> }
<add>
<add> var type = abstractEventTypes.input;
<add> var abstractTargetID = renderedTargetID;
<add> var abstractEvent = AbstractEvent.getPooled(
<add> type,
<add> abstractTargetID,
<add> topLevelType,
<add> nativeEvent
<add> );
<add> EventPropagators.accumulateTwoPhaseDispatches(abstractEvent);
<add>
<add> if (defer) {
<add> setTimeout(function() {
<add> EventPluginHub.enqueueAbstractEvents(abstractEvent);
<add> EventPluginHub.processAbstractEventQueue();
<add> }, 0);
<add> } else {
<add> return abstractEvent;
<add> }
<add>};
<add>
<add>var InputEventPlugin = {
<add> abstractEventTypes: abstractEventTypes,
<add> extractAbstractEvents: extractAbstractEvents
<add>};
<add>
<add>module.exports = InputEventPlugin;
<ide><path>src/eventPlugins/SimpleEventPlugin.js
<ide> var SimpleEventPlugin = {
<ide> captured: keyOf({onKeyDownCapture: true})
<ide> }
<ide> },
<del> input: {
<del> phasedRegistrationNames: {
<del> bubbled: keyOf({onInput: true}),
<del> captured: keyOf({onInputCapture: true})
<del> }
<del> },
<ide> focus: {
<ide> phasedRegistrationNames: {
<ide> bubbled: keyOf({onFocus: true}),
<ide> SimpleEventPlugin.topLevelTypesToAbstract = {
<ide> topKeyUp: SimpleEventPlugin.abstractEventTypes.keyUp,
<ide> topKeyPress: SimpleEventPlugin.abstractEventTypes.keyPress,
<ide> topKeyDown: SimpleEventPlugin.abstractEventTypes.keyDown,
<del> topInput: SimpleEventPlugin.abstractEventTypes.input,
<ide> topFocus: SimpleEventPlugin.abstractEventTypes.focus,
<ide> topBlur: SimpleEventPlugin.abstractEventTypes.blur,
<ide> topScroll: SimpleEventPlugin.abstractEventTypes.scroll, | 6 |
Go | Go | add check for iptables xlock support | 034babf1753741184c1155a7346ecec86fc51e2c | <ide><path>pkg/iptables/iptables.go
<ide> const (
<ide> var (
<ide> ErrIptablesNotFound = errors.New("Iptables not found")
<ide> nat = []string{"-t", "nat"}
<add> supportsXlock = false
<ide> )
<ide>
<ide> type Chain struct {
<ide> Name string
<ide> Bridge string
<ide> }
<ide>
<add>func init() {
<add> supportsXlock = exec.Command("iptables", "--wait", "-L", "-n").Run() == nil
<add>}
<add>
<ide> func NewChain(name, bridge string) (*Chain, error) {
<ide> if output, err := Raw("-t", "nat", "-N", name); err != nil {
<ide> return nil, err
<ide> func Raw(args ...string) ([]byte, error) {
<ide> if err != nil {
<ide> return nil, ErrIptablesNotFound
<ide> }
<add>
<add> if supportsXlock {
<add> args = append([]string{"--wait"}, args...)
<add> }
<add>
<ide> if os.Getenv("DEBUG") != "" {
<ide> fmt.Printf("[DEBUG] [iptables]: %s, %v\n", path, args)
<ide> }
<del> output, err := exec.Command(path, append([]string{"--wait"}, args...)...).CombinedOutput()
<add>
<add> output, err := exec.Command(path, args...).CombinedOutput()
<ide> if err != nil {
<ide> return nil, fmt.Errorf("iptables failed: iptables %v: %s (%s)", strings.Join(args, " "), output, err)
<ide> }
<add>
<ide> return output, err
<ide> } | 1 |
Ruby | Ruby | ignore interrupts while cleaning up | a44f725c8b0ed36fa6d82641ace3034a17cedce2 | <ide><path>Library/Homebrew/formula_installer.rb
<ide> def install_dependency(dep, inherited_options)
<ide> fi.caveats
<ide> fi.finish
<ide> rescue Exception
<del> tmp_keg.rename(installed_keg) if tmp_keg && !installed_keg.directory?
<del> linked_keg.link if linked_keg
<add> ignore_interrupts do
<add> tmp_keg.rename(installed_keg) if tmp_keg && !installed_keg.directory?
<add> linked_keg.link if linked_keg
<add> end
<ide> raise
<ide> else
<del> tmp_keg.rmtree if tmp_keg && tmp_keg.directory?
<add> ignore_interrupts { tmp_keg.rmtree if tmp_keg && tmp_keg.directory? }
<ide> end
<ide>
<ide> def caveats | 1 |
Ruby | Ruby | use #any? instead of #detect | 5120429c3138d46490a1c4a611ebd93410f4f885 | <ide><path>activesupport/lib/active_support/time_with_zone.rb
<ide> def ago(other)
<ide> def advance(options)
<ide> # If we're advancing a value of variable length (i.e., years, weeks, months, days), advance from #time,
<ide> # otherwise advance from #utc, for accuracy when moving across DST boundaries
<del> if options.detect {|k,v| [:years, :weeks, :months, :days].include? k}
<add> if options.any? {|k,v| [:years, :weeks, :months, :days].include? k}
<ide> method_missing(:advance, options)
<ide> else
<ide> utc.advance(options).in_time_zone(time_zone) | 1 |
Java | Java | hide builder implementations in runtimehints api | 780d07217b35226c2827111d9a0cb2ed5f84176c | <ide><path>spring-core/src/main/java/org/springframework/aot/hint/ClassProxyHint.java
<ide> import java.util.LinkedList;
<ide> import java.util.List;
<ide> import java.util.Objects;
<del>import java.util.stream.Collectors;
<ide>
<ide> /**
<ide> * A hint that describes the need for a proxy against a concrete class.
<ide> public static Builder of(TypeReference targetClass) {
<ide> * @return a builder for the hint
<ide> */
<ide> public static Builder of(Class<?> targetClass) {
<add> if (targetClass.isInterface()) {
<add> throw new IllegalArgumentException("Should not be an interface: " + targetClass);
<add> }
<ide> return of(TypeReference.of(targetClass));
<ide> }
<ide>
<ide> public static class Builder {
<ide> private final LinkedList<TypeReference> proxiedInterfaces = new LinkedList<>();
<ide>
<ide>
<del> public Builder(TypeReference targetClass) {
<add> Builder(TypeReference targetClass) {
<ide> this.targetClass = targetClass;
<ide> }
<ide>
<ide> public Builder proxiedInterfaces(TypeReference... proxiedInterfaces) {
<ide> */
<ide> public Builder proxiedInterfaces(Class<?>... proxiedInterfaces) {
<ide> this.proxiedInterfaces.addAll(Arrays.stream(proxiedInterfaces)
<del> .map(TypeReference::of).collect(Collectors.toList()));
<add> .map(TypeReference::of).toList());
<ide> return this;
<ide> }
<ide>
<ide> /**
<ide> * Create a {@link ClassProxyHint} based on the state of this builder.
<ide> * @return a class proxy hint
<ide> */
<del> public ClassProxyHint build() {
<add> ClassProxyHint build() {
<ide> return new ClassProxyHint(this);
<ide> }
<ide>
<ide><path>spring-core/src/main/java/org/springframework/aot/hint/ExecutableHint.java
<ide> public List<ExecutableMode> getModes() {
<ide> /**
<ide> * Builder for {@link ExecutableHint}.
<ide> */
<del> public static final class Builder {
<add> public static class Builder {
<ide>
<ide> private final String name;
<ide>
<ide> public static final class Builder {
<ide> private final Set<ExecutableMode> modes = new LinkedHashSet<>();
<ide>
<ide>
<del> private Builder(String name, List<TypeReference> parameterTypes) {
<add> Builder(String name, List<TypeReference> parameterTypes) {
<ide> this.name = name;
<ide> this.parameterTypes = parameterTypes;
<ide> }
<ide> public Builder setModes(ExecutableMode... modes) {
<ide> * Create an {@link ExecutableHint} based on the state of this builder.
<ide> * @return an executable hint
<ide> */
<del> public ExecutableHint build() {
<add> ExecutableHint build() {
<ide> return new ExecutableHint(this);
<ide> }
<ide>
<ide><path>spring-core/src/main/java/org/springframework/aot/hint/FieldHint.java
<ide> public static class Builder {
<ide> private boolean allowUnsafeAccess;
<ide>
<ide>
<del> public Builder(String name) {
<add> Builder(String name) {
<ide> this.name = name;
<ide> }
<ide>
<ide> public Builder allowUnsafeAccess(boolean allowUnsafeAccess) {
<ide> * Create a {@link FieldHint} based on the state of this builder.
<ide> * @return a field hint
<ide> */
<del> public FieldHint build() {
<add> FieldHint build() {
<ide> return new FieldHint(this);
<ide> }
<ide>
<ide><path>spring-core/src/main/java/org/springframework/aot/hint/JdkProxyHint.java
<ide> import java.util.LinkedList;
<ide> import java.util.List;
<ide> import java.util.Objects;
<del>import java.util.stream.Collectors;
<ide>
<ide> /**
<ide> * A hint that describes the need of a JDK {@link Proxy}, that is an
<ide> private JdkProxyHint(Builder builder) {
<ide> this.proxiedInterfaces = List.copyOf(builder.proxiedInterfaces);
<ide> }
<ide>
<add> /**
<add> * Initialize a builder with the proxied interfaces to use.
<add> * @param proxiedInterfaces the interfaces the proxy should implement
<add> * @return a builder for the hint
<add> */
<add> public static Builder of(TypeReference... proxiedInterfaces) {
<add> return new Builder().proxiedInterfaces(proxiedInterfaces);
<add> }
<add>
<add> /**
<add> * Initialize a builder with the proxied interfaces to use.
<add> * @param proxiedInterfaces the interfaces the proxy should implement
<add> * @return a builder for the hint
<add> */
<add> public static Builder of(Class<?>... proxiedInterfaces) {
<add> return new Builder().proxiedInterfaces(proxiedInterfaces);
<add> }
<add>
<ide> /**
<ide> * Return the interfaces to be proxied.
<ide> * @return the interfaces that the proxy should implement
<ide> public int hashCode() {
<ide> */
<ide> public static class Builder {
<ide>
<del> private final LinkedList<TypeReference> proxiedInterfaces = new LinkedList<>();
<add> private final LinkedList<TypeReference> proxiedInterfaces;
<ide>
<add> Builder() {
<add> this.proxiedInterfaces = new LinkedList<>();
<add> }
<ide>
<ide> /**
<ide> * Add the specified interfaces that the proxy should implement.
<ide> public Builder proxiedInterfaces(TypeReference... proxiedInterfaces) {
<ide> * @return {@code this}, to facilitate method chaining
<ide> */
<ide> public Builder proxiedInterfaces(Class<?>... proxiedInterfaces) {
<del> this.proxiedInterfaces.addAll(Arrays.stream(proxiedInterfaces)
<del> .map(TypeReference::of).collect(Collectors.toList()));
<add> this.proxiedInterfaces.addAll(toTypeReferences(proxiedInterfaces));
<ide> return this;
<ide> }
<ide>
<ide> /**
<ide> * Create a {@link JdkProxyHint} based on the state of this builder.
<ide> * @return a jdk proxy hint
<ide> */
<del> public JdkProxyHint build() {
<add> JdkProxyHint build() {
<ide> return new JdkProxyHint(this);
<ide> }
<ide>
<add> private static List<TypeReference> toTypeReferences(Class<?>... proxiedInterfaces) {
<add> List<String> concreteTypes = Arrays.stream(proxiedInterfaces)
<add> .filter(candidate -> !candidate.isInterface()).map(Class::getName).toList();
<add> if (!concreteTypes.isEmpty()) {
<add> throw new IllegalArgumentException("Not an interface: " + concreteTypes);
<add> }
<add> return Arrays.stream(proxiedInterfaces).map(TypeReference::of).toList();
<add> }
<add>
<ide> }
<ide>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/aot/hint/ProxyHints.java
<ide>
<ide> package org.springframework.aot.hint;
<ide>
<del>import java.util.Arrays;
<ide> import java.util.LinkedHashSet;
<del>import java.util.List;
<ide> import java.util.Set;
<ide> import java.util.function.Consumer;
<del>import java.util.function.Supplier;
<del>import java.util.stream.Collectors;
<ide> import java.util.stream.Stream;
<ide>
<ide> import org.springframework.aot.hint.ClassProxyHint.Builder;
<ide> public Stream<ClassProxyHint> classProxies() {
<ide>
<ide> /**
<ide> * Register a {@link JdkProxyHint}.
<del> * @param hint the supplier to the hint
<add> * @param jdkProxyHint the supplier to the hint
<ide> * @return {@code this}, to facilitate method chaining
<ide> */
<del> public ProxyHints registerJdkProxy(Supplier<JdkProxyHint> hint) {
<del> this.jdkProxies.add(hint.get());
<add> public ProxyHints registerJdkProxy(Consumer<JdkProxyHint.Builder> jdkProxyHint) {
<add> JdkProxyHint.Builder builder = new JdkProxyHint.Builder();
<add> jdkProxyHint.accept(builder);
<add> this.jdkProxies.add(builder.build());
<ide> return this;
<ide> }
<ide>
<ide> public ProxyHints registerJdkProxy(Supplier<JdkProxyHint> hint) {
<ide> * @return {@code this}, to facilitate method chaining
<ide> */
<ide> public ProxyHints registerJdkProxy(TypeReference... proxiedInterfaces) {
<del> return registerJdkProxy(() -> new JdkProxyHint.Builder()
<del> .proxiedInterfaces(proxiedInterfaces).build());
<add> return registerJdkProxy(jdkProxyHint ->
<add> jdkProxyHint.proxiedInterfaces(proxiedInterfaces));
<ide> }
<ide>
<ide> /**
<ide> public ProxyHints registerJdkProxy(TypeReference... proxiedInterfaces) {
<ide> * @return {@code this}, to facilitate method chaining
<ide> */
<ide> public ProxyHints registerJdkProxy(Class<?>... proxiedInterfaces) {
<del> List<String> concreteTypes = Arrays.stream(proxiedInterfaces)
<del> .filter(candidate -> !candidate.isInterface()).map(Class::getName).collect(Collectors.toList());
<del> if (!concreteTypes.isEmpty()) {
<del> throw new IllegalArgumentException("Not an interface: " + concreteTypes);
<del> }
<del> return registerJdkProxy(() -> new JdkProxyHint.Builder()
<del> .proxiedInterfaces(proxiedInterfaces).build());
<add> return registerJdkProxy(jdkProxyHint ->
<add> jdkProxyHint.proxiedInterfaces(proxiedInterfaces));
<ide> }
<ide>
<ide> /**
<ide> public ProxyHints registerJdkProxy(Class<?>... proxiedInterfaces) {
<ide> * @return {@code this}, to facilitate method chaining
<ide> */
<ide> public ProxyHints registerClassProxy(TypeReference targetClass, Consumer<Builder> classProxyHint) {
<del> Builder builder = ClassProxyHint.of(targetClass);
<del> classProxyHint.accept(builder);
<del> this.classProxies.add(builder.build());
<del> return this;
<add> return addClassProxyHint(ClassProxyHint.of(targetClass), classProxyHint);
<ide> }
<ide>
<ide> /**
<ide> public ProxyHints registerClassProxy(TypeReference targetClass, Consumer<Builder
<ide> * @return {@code this}, to facilitate method chaining
<ide> */
<ide> public ProxyHints registerClassProxy(Class<?> targetClass, Consumer<Builder> classProxyHint) {
<del> if (targetClass.isInterface()) {
<del> throw new IllegalArgumentException("Should not be an interface: " + targetClass);
<del> }
<del> return registerClassProxy(TypeReference.of(targetClass), classProxyHint);
<add> return addClassProxyHint(ClassProxyHint.of(targetClass), classProxyHint);
<add> }
<add>
<add> private ProxyHints addClassProxyHint(ClassProxyHint.Builder builder, Consumer<ClassProxyHint.Builder> classProxyHint) {
<add> classProxyHint.accept(builder);
<add> this.classProxies.add(builder.build());
<add> return this;
<ide> }
<ide>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/aot/hint/ResourceHints.java
<ide> import java.util.stream.Stream;
<ide>
<ide> import org.springframework.aot.hint.ResourcePatternHint.Builder;
<add>import org.springframework.lang.Nullable;
<ide>
<ide> /**
<ide> * Gather the need for resources available at runtime.
<ide> public Stream<ResourceBundleHint> resourceBundles() {
<ide> * @param resourceHint a builder to further customize the resource pattern
<ide> * @return {@code this}, to facilitate method chaining
<ide> */
<del> public ResourceHints registerPattern(String include, Consumer<Builder> resourceHint) {
<add> public ResourceHints registerPattern(String include, @Nullable Consumer<Builder> resourceHint) {
<ide> Builder builder = new Builder().includes(include);
<ide> if (resourceHint != null) {
<ide> resourceHint.accept(builder);
<ide> private String toIncludePattern(TypeReference type) {
<ide> return candidate.replace(".", "/") + ".class";
<ide> }
<ide>
<del> private void buildName(TypeReference type, StringBuilder sb) {
<add> private void buildName(@Nullable TypeReference type, StringBuilder sb) {
<ide> if (type == null) {
<ide> return;
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/aot/hint/ResourcePatternHint.java
<ide> public Builder excludes(String... excludes) {
<ide> * builder.
<ide> * @return a resource pattern hint
<ide> */
<del> public ResourcePatternHint build() {
<add> ResourcePatternHint build() {
<ide> return new ResourcePatternHint(this);
<ide> }
<ide>
<ide><path>spring-core/src/main/java/org/springframework/aot/hint/TypeHint.java
<ide> public Builder withMembers(MemberCategory... memberCategories) {
<ide> * Create a {@link TypeHint} based on the state of this builder.
<ide> * @return a type hint
<ide> */
<del> public TypeHint build() {
<add> TypeHint build() {
<ide> return new TypeHint(this);
<ide> }
<ide>
<ide><path>spring-core/src/test/java/org/springframework/aot/hint/ProxyHintsTests.java
<ide> import java.util.Properties;
<ide> import java.util.function.Consumer;
<ide> import java.util.function.Function;
<del>import java.util.function.Supplier;
<ide> import java.util.stream.Stream;
<ide>
<ide> import org.junit.jupiter.api.Test;
<ide>
<del>import org.springframework.aot.hint.JdkProxyHint.Builder;
<del>
<ide> import static org.assertj.core.api.Assertions.assertThat;
<ide> import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
<ide>
<ide> void registerJdkProxyTwiceExposesOneHint() {
<ide> assertThat(this.proxyHints.jdkProxies()).singleElement().satisfies(proxiedInterfaces(Function.class));
<ide> }
<ide>
<add> @Test
<add> void registerClassProxyWithTargetClassName() {
<add> this.proxyHints.registerClassProxy(TypeReference.of(Properties.class.getName()), classProxyHint ->
<add> classProxyHint.proxiedInterfaces(Serializable.class));
<add> assertThat(this.proxyHints.classProxies()).singleElement().satisfies(classProxyHint -> {
<add> assertThat(classProxyHint.getTargetClass()).isEqualTo(TypeReference.of(Properties.class));
<add> assertThat(classProxyHint.getProxiedInterfaces()).containsOnly(TypeReference.of(Serializable.class));
<add> });
<add> }
<add>
<ide> @Test
<ide> void registerClassProxyWithTargetClass() {
<ide> this.proxyHints.registerClassProxy(Properties.class, classProxyHint ->
<ide> void registerClassProxyWithTargetInterface() {
<ide> })).withMessageContaining(Serializable.class.getName());
<ide> }
<ide>
<del> private static Supplier<JdkProxyHint> springProxy(TypeReference proxiedInterface) {
<del> return () -> new Builder().proxiedInterfaces(Stream.of("org.springframework.aop.SpringProxy",
<add> private static Consumer<JdkProxyHint.Builder> springProxy(TypeReference proxiedInterface) {
<add> return builder -> builder.proxiedInterfaces(Stream.of("org.springframework.aop.SpringProxy",
<ide> "org.springframework.aop.framework.Advised", "org.springframework.core.DecoratingProxy")
<ide> .map(TypeReference::of).toArray(TypeReference[]::new))
<del> .proxiedInterfaces(proxiedInterface).build();
<add> .proxiedInterfaces(proxiedInterface);
<ide> }
<ide>
<ide> private Consumer<JdkProxyHint> proxiedInterfaces(String... proxiedInterfaces) { | 9 |
PHP | PHP | use timing attack safe string comparison | bab2dc214636989bc67850e381a82acc81105e4d | <ide><path>src/Auth/DigestAuthenticate.php
<ide> public function getUser(ServerRequest $request)
<ide> unset($user[$field]);
<ide>
<ide> $hash = $this->generateResponseHash($digest, $password, $request->getEnv('ORIGINAL_REQUEST_METHOD'));
<del> if ($digest['response'] === $hash) {
<add> if (hash_equals($hash, $digest['response'])) {
<ide> return $user;
<ide> }
<ide> | 1 |
Javascript | Javascript | add license headers to .js files | eb48759675956d319f333c9757fbddea828bddc1 | <ide><path>Libraries/Components/ProgressBarAndroid/ProgressBarAndroid.ios.js
<ide> /**
<del> * Copyright 2004-present Facebook. All Rights Reserved.
<add> * Copyright (c) 2015-present, Facebook, Inc.
<add> * All rights reserved.
<add> *
<add> * This source code is licensed under the BSD-style license found in the
<add> * LICENSE file in the root directory of this source tree. An additional grant
<add> * of patent rights can be found in the PATENTS file in the same directory.
<ide> *
<ide> * @providesModule ProgressBarAndroid
<ide> */
<ide><path>Libraries/Utilities/BackAndroid.ios.js
<ide> /**
<add> * Copyright (c) 2015-present, Facebook, Inc.
<add> * All rights reserved.
<add> *
<add> * This source code is licensed under the BSD-style license found in the
<add> * LICENSE file in the root directory of this source tree. An additional grant
<add> * of patent rights can be found in the PATENTS file in the same directory.
<add> *
<ide> * iOS stub for BackAndroid.android.js
<ide> *
<ide> * @providesModule BackAndroid | 2 |
Go | Go | move testentrypoint & testbindmounts | 5b27fbc0e27358b794946fe9e84a83a6c4497be4 | <ide><path>integration-cli/docker_cli_run_test.go
<ide> import (
<ide> "io/ioutil"
<ide> "os"
<ide> "os/exec"
<add> "path"
<ide> "path/filepath"
<ide> "reflect"
<ide> "regexp"
<ide> func TestRunWithBadDevice(t *testing.T) {
<ide> }
<ide> logDone("run - error with bad device")
<ide> }
<add>
<add>func TestEntrypoint(t *testing.T) {
<add> name := "entrypoint"
<add> cmd := exec.Command(dockerBinary, "run", "--name", name, "--entrypoint", "/bin/echo", "busybox", "-n", "foobar")
<add> out, _, err := runCommandWithOutput(cmd)
<add> if err != nil {
<add> t.Fatal(err, out)
<add> }
<add> expected := "foobar"
<add> if out != expected {
<add> t.Fatalf("Output should be %q, actual out: %q", expected, out)
<add> }
<add> logDone("run - entrypoint")
<add>}
<add>
<add>func TestBindMounts(t *testing.T) {
<add> tmpDir, err := ioutil.TempDir("", "docker-test-container")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> defer os.RemoveAll(tmpDir)
<add> writeFile(path.Join(tmpDir, "touch-me"), "", t)
<add>
<add> // Test reading from a read-only bind mount
<add> cmd := exec.Command(dockerBinary, "run", "-v", fmt.Sprintf("%s:/tmp:ro", tmpDir), "busybox", "ls", "/tmp")
<add> out, _, err := runCommandWithOutput(cmd)
<add> if err != nil {
<add> t.Fatal(err, out)
<add> }
<add> if !strings.Contains(out, "touch-me") {
<add> t.Fatal("Container failed to read from bind mount")
<add> }
<add>
<add> // test writing to bind mount
<add> cmd = exec.Command(dockerBinary, "run", "-v", fmt.Sprintf("%s:/tmp:rw", tmpDir), "busybox", "touch", "/tmp/holla")
<add> out, _, err = runCommandWithOutput(cmd)
<add> if err != nil {
<add> t.Fatal(err, out)
<add> }
<add> readFile(path.Join(tmpDir, "holla"), t) // Will fail if the file doesn't exist
<add>
<add> // test mounting to an illegal destination directory
<add> cmd = exec.Command(dockerBinary, "run", "-v", fmt.Sprintf("%s:.", tmpDir), "busybox", "ls", ".")
<add> _, err = runCommand(cmd)
<add> if err == nil {
<add> t.Fatal("Container bind mounted illegal directory")
<add> }
<add>
<add> // test mount a file
<add> cmd = exec.Command(dockerBinary, "run", "-v", fmt.Sprintf("%s/holla:/tmp/holla:rw", tmpDir), "busybox", "sh", "-c", "echo -n 'yotta' > /tmp/holla")
<add> _, err = runCommand(cmd)
<add> if err != nil {
<add> t.Fatal(err, out)
<add> }
<add> content := readFile(path.Join(tmpDir, "holla"), t) // Will fail if the file doesn't exist
<add> expected := "yotta"
<add> if content != expected {
<add> t.Fatalf("Output should be %q, actual out: %q", expected, content)
<add> }
<add>}
<ide><path>integration-cli/docker_utils.go
<ide> package main
<ide>
<ide> import (
<ide> "fmt"
<add> "io"
<ide> "io/ioutil"
<ide> "net/http"
<ide> "net/http/httptest"
<ide> func fakeGIT(name string, files map[string]string) (*FakeGIT, error) {
<ide> RepoURL: fmt.Sprintf("%s/%s.git", server.URL, name),
<ide> }, nil
<ide> }
<add>
<add>// Write `content` to the file at path `dst`, creating it if necessary,
<add>// as well as any missing directories.
<add>// The file is truncated if it already exists.
<add>// Call t.Fatal() at the first error.
<add>func writeFile(dst, content string, t *testing.T) {
<add> // Create subdirectories if necessary
<add> if err := os.MkdirAll(path.Dir(dst), 0700); err != nil && !os.IsExist(err) {
<add> t.Fatal(err)
<add> }
<add> f, err := os.OpenFile(dst, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0700)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> // Write content (truncate if it exists)
<add> if _, err := io.Copy(f, strings.NewReader(content)); err != nil {
<add> t.Fatal(err)
<add> }
<add>}
<add>
<add>// Return the contents of file at path `src`.
<add>// Call t.Fatal() at the first error (including if the file doesn't exist)
<add>func readFile(src string, t *testing.T) (content string) {
<add> f, err := os.Open(src)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> data, err := ioutil.ReadAll(f)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> return string(data)
<add>}
<ide><path>integration/container_test.go
<ide> package docker
<ide>
<ide> import (
<del> "fmt"
<ide> "io"
<ide> "io/ioutil"
<del> "os"
<del> "path"
<del> "strings"
<ide> "testing"
<ide> "time"
<ide>
<ide> func TestTty(t *testing.T) {
<ide> }
<ide> }
<ide>
<del>func TestEntrypoint(t *testing.T) {
<del> daemon := mkDaemon(t)
<del> defer nuke(daemon)
<del> container, _, err := daemon.Create(
<del> &runconfig.Config{
<del> Image: GetTestImage(daemon).ID,
<del> Entrypoint: []string{"/bin/echo"},
<del> Cmd: []string{"-n", "foobar"},
<del> },
<del> "",
<del> )
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> defer daemon.Destroy(container)
<del> output, err := container.Output()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> if string(output) != "foobar" {
<del> t.Error(string(output))
<del> }
<del>}
<del>
<del>func TestEntrypointNoCmd(t *testing.T) {
<del> daemon := mkDaemon(t)
<del> defer nuke(daemon)
<del> container, _, err := daemon.Create(
<del> &runconfig.Config{
<del> Image: GetTestImage(daemon).ID,
<del> Entrypoint: []string{"/bin/echo", "foobar"},
<del> },
<del> "",
<del> )
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> defer daemon.Destroy(container)
<del> output, err := container.Output()
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> if strings.Trim(string(output), "\r\n") != "foobar" {
<del> t.Error(string(output))
<del> }
<del>}
<del>
<ide> func BenchmarkRunSequential(b *testing.B) {
<ide> daemon := mkDaemon(b)
<ide> defer nuke(daemon)
<ide> func BenchmarkRunParallel(b *testing.B) {
<ide> b.Fatal(errors)
<ide> }
<ide> }
<del>
<del>func tempDir(t *testing.T) string {
<del> tmpDir, err := ioutil.TempDir("", "docker-test-container")
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del> return tmpDir
<del>}
<del>
<del>func TestBindMounts(t *testing.T) {
<del> eng := NewTestEngine(t)
<del> r := mkDaemonFromEngine(eng, t)
<del> defer r.Nuke()
<del>
<del> tmpDir := tempDir(t)
<del> defer os.RemoveAll(tmpDir)
<del> writeFile(path.Join(tmpDir, "touch-me"), "", t)
<del>
<del> // Test reading from a read-only bind mount
<del> stdout, _ := runContainer(eng, r, []string{"-v", fmt.Sprintf("%s:/tmp:ro", tmpDir), "_", "ls", "/tmp"}, t)
<del> if !strings.Contains(stdout, "touch-me") {
<del> t.Fatal("Container failed to read from bind mount")
<del> }
<del>
<del> // test writing to bind mount
<del> runContainer(eng, r, []string{"-v", fmt.Sprintf("%s:/tmp:rw", tmpDir), "_", "touch", "/tmp/holla"}, t)
<del> readFile(path.Join(tmpDir, "holla"), t) // Will fail if the file doesn't exist
<del>
<del> // test mounting to an illegal destination directory
<del> if _, err := runContainer(eng, r, []string{"-v", fmt.Sprintf("%s:.", tmpDir), "_", "ls", "."}, nil); err == nil {
<del> t.Fatal("Container bind mounted illegal directory")
<del> }
<del>
<del> // test mount a file
<del> runContainer(eng, r, []string{"-v", fmt.Sprintf("%s/holla:/tmp/holla:rw", tmpDir), "_", "sh", "-c", "echo -n 'yotta' > /tmp/holla"}, t)
<del> content := readFile(path.Join(tmpDir, "holla"), t) // Will fail if the file doesn't exist
<del> if content != "yotta" {
<del> t.Fatal("Container failed to write to bind mount file")
<del> }
<del>} | 3 |
PHP | PHP | show the incorrect type received, in error message | a8b1b97d61da7c1899f8dcb0590992c9b47f4588 | <ide><path>src/Controller/Controller.php
<ide> public function invokeAction(): ?ResponseInterface
<ide> }
<ide>
<ide> if (!$result instanceof ResponseInterface) {
<del> throw new UnexpectedValueException(
<del> 'Controller actions can only return ResponseInterface instance or null.'
<del> );
<add> throw new UnexpectedValueException(sprintf(
<add> 'Controller actions can only return ResponseInterface instance or null. '
<add> . 'Got %s instead.',
<add> getTypeName($result)
<add> ));
<ide> }
<ide>
<ide> return $this->response = $result;
<ide><path>tests/TestCase/Controller/ControllerTest.php
<ide> public function testInvokeActionWithPassedParams(): void
<ide> public function testInvokeActionException()
<ide> {
<ide> $this->expectException(\UnexpectedValueException::class);
<del> $this->expectExceptionMessage('Controller actions can only return ResponseInterface instance or null');
<add> $this->expectExceptionMessage(
<add> 'Controller actions can only return ResponseInterface instance or null. '
<add> . 'Got string instead.'
<add> );
<ide>
<ide> $url = new ServerRequest([
<ide> 'url' => 'test/willCauseException', | 2 |
Text | Text | add body text and link | 0a67728ab1b18d6767e962268d4fdc85a15f6aed | <ide><path>guide/english/tools/calculators/combinations-and-permutations-calculator/index.md
<ide> title: Combinations and Permutations Calculator
<ide> ---
<ide> ## Combinations and Permutations Calculator
<ide>
<del>This is a stub. <a href='https://github.com/freecodecamp/guides/tree/master/src/pages/tools/calculators/combinations-and-permutations-calculator/index.md' target='_blank' rel='nofollow'>Help our community expand it</a>.
<add>A combinations and permutations calculator calculates the number of permutations or combinations possible given some elements from a set.
<ide>
<del><a href='https://github.com/freecodecamp/guides/blob/master/README.md' target='_blank' rel='nofollow'>This quick style guide will help ensure your pull request gets accepted</a>.
<del>
<del><!-- The article goes here, in GitHub-flavored Markdown. Feel free to add YouTube videos, images, and CodePen/JSBin embeds -->
<add>A permutation refers to a possible arrangement of the elements of the given set, where the order is important. A combination is the same, except order is disregarded.
<ide>
<ide> #### More Information:
<del><!-- Please add any articles you think might be helpful to read before writing the article -->
<add>* [An online combinations and permutations calculator](https://www.mathsisfun.com/combinatorics/combinations-permutations-calculator.html)
<add>* [Wikipedia article on permutations](https://en.wikipedia.org/wiki/Permutation)
<add>* [Wikipedia article on combinations](https://en.wikipedia.org/wiki/Combination)
<ide>
<ide> | 1 |
Ruby | Ruby | add newline after keg only reason | 7a2c5c0f2f91bf9e09d9eca993df0a4ec34a8f7b | <ide><path>Library/Homebrew/caveats.rb
<ide> def keg_only_text
<ide> return unless f.keg_only?
<ide>
<ide> s = "This formula is keg-only, which means it was not symlinked into #{HOMEBREW_PREFIX}."
<del> s << "\n\n#{f.keg_only_reason}"
<add> s << "\n\n#{f.keg_only_reason}\n"
<ide> if f.bin.directory? || f.sbin.directory?
<ide> s << "\nIf you need to have this software first in your PATH run:\n"
<ide> if f.bin.directory? | 1 |
Java | Java | integrate webview into fabric on android | 7c066ae9526a0ee570b159b3828ff2b5ff28889e | <ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/FabricComponents.java
<ide> public class FabricComponents {
<ide> sComponentNames.put("Video", "RCTVideo");
<ide> sComponentNames.put("StickerInputView", "RCTStickerInputView");
<ide> sComponentNames.put("Map", "RCTMap");
<add> sComponentNames.put("WebView", "RCTWebView");
<ide> }
<ide>
<ide> /** @return the name of component in the Fabric environment */ | 1 |
Java | Java | add observable.rangelong & flowable.rangelong | 7e89c1f1a8cde6b461f9a36869b1027961be01a7 | <ide><path>src/main/java/io/reactivex/Flowable.java
<ide> public static Flowable<Integer> range(int start, int count) {
<ide> return RxJavaPlugins.onAssembly(new FlowableRange(start, count));
<ide> }
<ide>
<add> /**
<add> * Returns a Flowable that emits a sequence of Longs within a specified range.
<add> * <p>
<add> * <img width="640" height="195" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/range.png" alt="">
<add> * <dl>
<add> * <dt><b>Backpressure:</b></dt>
<add> * <dd>The operator honors backpressure from downstream and signals values on-demand (i.e., when requested).</dd>
<add> * <dt><b>Scheduler:</b></dt>
<add> * <dd>{@code rangeLong} does not operate by default on a particular {@link Scheduler}.</dd>
<add> * </dl>
<add> *
<add> * @param start
<add> * the value of the first Long in the sequence
<add> * @param count
<add> * the number of sequential Longs to generate
<add> * @return a Flowable that emits a range of sequential Longs
<add> * @throws IllegalArgumentException
<add> * if {@code count} is less than zero, or if {@code start} + {@code count} − 1 exceeds
<add> * {@code Long.MAX_VALUE}
<add> * @see <a href="http://reactivex.io/documentation/operators/range.html">ReactiveX operators documentation: Range</a>
<add> */
<add> @BackpressureSupport(BackpressureKind.FULL)
<add> @SchedulerSupport(SchedulerSupport.NONE)
<add> public static Flowable<Long> rangeLong(long start, long count) {
<add> if (count < 0) {
<add> throw new IllegalArgumentException("count >= 0 required but it was " + count);
<add> }
<add>
<add> if (count == 0) {
<add> return empty();
<add> }
<add>
<add> if (count == 1) {
<add> return just(start);
<add> }
<add>
<add> long end = start + (count - 1);
<add> if (start > 0 && end < 0) {
<add> throw new IllegalArgumentException("Overflow! start + count is bigger than Long.MAX_VALUE");
<add> }
<add>
<add> return RxJavaPlugins.onAssembly(new FlowableRangeLong(start, count));
<add> }
<add>
<ide> /**
<ide> * Returns a Flowable that emits a Boolean value that indicates whether two Publisher sequences are the
<ide> * same by comparing the items emitted by each Publisher pairwise.
<ide><path>src/main/java/io/reactivex/Observable.java
<ide> public static Observable<Integer> range(final int start, final int count) {
<ide> return RxJavaPlugins.onAssembly(new ObservableRange(start, count));
<ide> }
<ide>
<add> /**
<add> * Returns an Observable that emits a sequence of Longs within a specified range.
<add> * <p>
<add> * <img width="640" height="195" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/range.png" alt="">
<add> * <dl>
<add> * <dt><b>Scheduler:</b></dt>
<add> * <dd>{@code rangeLong} does not operate by default on a particular {@link Scheduler}.</dd>
<add> * </dl>
<add> *
<add> * @param start
<add> * the value of the first Long in the sequence
<add> * @param count
<add> * the number of sequential Longs to generate
<add> * @return an Observable that emits a range of sequential Longs
<add> * @throws IllegalArgumentException
<add> * if {@code count} is less than zero, or if {@code start} + {@code count} − 1 exceeds
<add> * {@code Long.MAX_VALUE}
<add> * @see <a href="http://reactivex.io/documentation/operators/range.html">ReactiveX operators documentation: Range</a>
<add> */
<add> @SchedulerSupport(SchedulerSupport.NONE)
<add> public static Observable<Long> rangeLong(long start, long count) {
<add> if (count < 0) {
<add> throw new IllegalArgumentException("count >= 0 required but it was " + count);
<add> }
<add>
<add> if (count == 0) {
<add> return empty();
<add> }
<add>
<add> if (count == 1) {
<add> return just(start);
<add> }
<add>
<add> long end = start + (count - 1);
<add> if (start > 0 && end < 0) {
<add> throw new IllegalArgumentException("Overflow! start + count is bigger than Long.MAX_VALUE");
<add> }
<add>
<add> return RxJavaPlugins.onAssembly(new ObservableRangeLong(start, count));
<add> }
<add>
<ide> /**
<ide> * Returns an Observable that emits a Boolean value that indicates whether two ObservableSource sequences are the
<ide> * same by comparing the items emitted by each ObservableSource pairwise.
<ide><path>src/main/java/io/reactivex/internal/operators/flowable/FlowableRangeLong.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.operators.flowable;
<add>
<add>import io.reactivex.Flowable;
<add>import io.reactivex.internal.fuseable.ConditionalSubscriber;
<add>import io.reactivex.internal.subscriptions.BasicQueueSubscription;
<add>import io.reactivex.internal.subscriptions.SubscriptionHelper;
<add>import io.reactivex.internal.util.BackpressureHelper;
<add>import org.reactivestreams.Subscriber;
<add>
<add>/**
<add> * Emits a range of long values.
<add> */
<add>public final class FlowableRangeLong extends Flowable<Long> {
<add> final long start;
<add> final long end;
<add>
<add> public FlowableRangeLong(long start, long count) {
<add> this.start = start;
<add> this.end = start + count;
<add> }
<add>
<add> @Override
<add> public void subscribeActual(Subscriber<? super Long> s) {
<add> if (s instanceof ConditionalSubscriber) {
<add> s.onSubscribe(new RangeConditionalSubscription(
<add> (ConditionalSubscriber<? super Long>)s, start, end));
<add> } else {
<add> s.onSubscribe(new RangeSubscription(s, start, end));
<add> }
<add> }
<add>
<add> abstract static class BaseRangeSubscription extends BasicQueueSubscription<Long> {
<add>
<add> private static final long serialVersionUID = -2252972430506210021L;
<add>
<add> final long end;
<add>
<add> long index;
<add>
<add> volatile boolean cancelled;
<add>
<add> BaseRangeSubscription(long index, long end) {
<add> this.index = index;
<add> this.end = end;
<add> }
<add>
<add> @Override
<add> public final int requestFusion(int mode) {
<add> return mode & SYNC;
<add> }
<add>
<add> @Override
<add> public final Long poll() {
<add> long i = index;
<add> if (i == end) {
<add> return null;
<add> }
<add> index = i + 1;
<add> return i;
<add> }
<add>
<add> @Override
<add> public final boolean isEmpty() {
<add> return index == end;
<add> }
<add>
<add> @Override
<add> public final void clear() {
<add> index = end;
<add> }
<add>
<add> @Override
<add> public final void request(long n) {
<add> if (SubscriptionHelper.validate(n)) {
<add> if (BackpressureHelper.add(this, n) == 0L) {
<add> if (n == Long.MAX_VALUE) {
<add> fastPath();
<add> } else {
<add> slowPath(n);
<add> }
<add> }
<add> }
<add> }
<add>
<add> @Override
<add> public final void cancel() {
<add> cancelled = true;
<add> }
<add>
<add>
<add> abstract void fastPath();
<add>
<add> abstract void slowPath(long r);
<add> }
<add>
<add> static final class RangeSubscription extends BaseRangeSubscription {
<add>
<add> private static final long serialVersionUID = 2587302975077663557L;
<add>
<add> final Subscriber<? super Long> actual;
<add>
<add> RangeSubscription(Subscriber<? super Long> actual, long index, long end) {
<add> super(index, end);
<add> this.actual = actual;
<add> }
<add>
<add> @Override
<add> void fastPath() {
<add> long f = end;
<add> Subscriber<? super Long> a = actual;
<add>
<add> for (long i = index; i != f; i++) {
<add> if (cancelled) {
<add> return;
<add> }
<add> a.onNext(i);
<add> }
<add> if (cancelled) {
<add> return;
<add> }
<add> a.onComplete();
<add> }
<add>
<add> @Override
<add> void slowPath(long r) {
<add> long e = 0;
<add> long f = end;
<add> long i = index;
<add> Subscriber<? super Long> a = actual;
<add>
<add> for (;;) {
<add>
<add> while (e != r && i != f) {
<add> if (cancelled) {
<add> return;
<add> }
<add>
<add> a.onNext(i);
<add>
<add> e++;
<add> i++;
<add> }
<add>
<add> if (i == f) {
<add> if (!cancelled) {
<add> a.onComplete();
<add> }
<add> return;
<add> }
<add>
<add> r = get();
<add> if (e == r) {
<add> index = i;
<add> r = addAndGet(-e);
<add> if (r == 0L) {
<add> return;
<add> }
<add> e = 0L;
<add> }
<add> }
<add> }
<add> }
<add>
<add> static final class RangeConditionalSubscription extends BaseRangeSubscription {
<add>
<add>
<add> private static final long serialVersionUID = 2587302975077663557L;
<add>
<add> final ConditionalSubscriber<? super Long> actual;
<add>
<add> RangeConditionalSubscription(ConditionalSubscriber<? super Long> actual, long index, long end) {
<add> super(index, end);
<add> this.actual = actual;
<add> }
<add>
<add> @Override
<add> void fastPath() {
<add> long f = end;
<add> ConditionalSubscriber<? super Long> a = actual;
<add>
<add> for (long i = index; i != f; i++) {
<add> if (cancelled) {
<add> return;
<add> }
<add> a.tryOnNext(i);
<add> }
<add> if (cancelled) {
<add> return;
<add> }
<add> a.onComplete();
<add> }
<add>
<add> @Override
<add> void slowPath(long r) {
<add> long e = 0;
<add> long f = end;
<add> long i = index;
<add> ConditionalSubscriber<? super Long> a = actual;
<add>
<add> for (;;) {
<add>
<add> while (e != r && i != f) {
<add> if (cancelled) {
<add> return;
<add> }
<add>
<add> if (a.tryOnNext(i)) {
<add> e++;
<add> }
<add>
<add> i++;
<add> }
<add>
<add> if (i == f) {
<add> if (!cancelled) {
<add> a.onComplete();
<add> }
<add> return;
<add> }
<add>
<add> r = get();
<add> if (e == r) {
<add> index = i;
<add> r = addAndGet(-e);
<add> if (r == 0) {
<add> return;
<add> }
<add> e = 0;
<add> }
<add> }
<add> }
<add> }
<add>}
<ide><path>src/main/java/io/reactivex/internal/operators/observable/ObservableRangeLong.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>package io.reactivex.internal.operators.observable;
<add>
<add>import io.reactivex.Observable;
<add>import io.reactivex.Observer;
<add>import io.reactivex.internal.fuseable.QueueDisposable;
<add>import java.util.concurrent.atomic.AtomicLong;
<add>
<add>public final class ObservableRangeLong extends Observable<Long> {
<add> private final long start;
<add> private final long count;
<add>
<add> public ObservableRangeLong(long start, long count) {
<add> this.start = start;
<add> this.count = count;
<add> }
<add>
<add> @Override
<add> protected void subscribeActual(Observer<? super Long> o) {
<add> RangeDisposable parent = new RangeDisposable(o, start, start + count);
<add> o.onSubscribe(parent);
<add> parent.run();
<add> }
<add>
<add> static final class RangeDisposable
<add> extends AtomicLong
<add> implements QueueDisposable<Long> {
<add>
<add> private static final long serialVersionUID = 396518478098735504L;
<add>
<add> final Observer<? super Long> actual;
<add>
<add> final long end;
<add>
<add> long index;
<add>
<add> boolean fused;
<add>
<add> RangeDisposable(Observer<? super Long> actual, long start, long end) {
<add> this.actual = actual;
<add> this.index = start;
<add> this.end = end;
<add> }
<add>
<add> void run() {
<add> if (fused) {
<add> return;
<add> }
<add> Observer<? super Long> actual = this.actual;
<add> long e = end;
<add> for (long i = index; i != e && get() == 0; i++) {
<add> actual.onNext(i);
<add> }
<add> if (get() == 0) {
<add> lazySet(1);
<add> actual.onComplete();
<add> }
<add> }
<add>
<add> @Override
<add> public boolean offer(Long value) {
<add> throw new UnsupportedOperationException("Should not be called!");
<add> }
<add>
<add> @Override
<add> public boolean offer(Long v1, Long v2) {
<add> throw new UnsupportedOperationException("Should not be called!");
<add> }
<add>
<add> @Override
<add> public Long poll() throws Exception {
<add> long i = index;
<add> if (i != end) {
<add> index = i + 1;
<add> return i;
<add> }
<add> lazySet(1);
<add> return null;
<add> }
<add>
<add> @Override
<add> public boolean isEmpty() {
<add> return index == end;
<add> }
<add>
<add> @Override
<add> public void clear() {
<add> index = end;
<add> lazySet(1);
<add> }
<add>
<add> @Override
<add> public void dispose() {
<add> set(1);
<add> }
<add>
<add> @Override
<add> public boolean isDisposed() {
<add> return get() != 0;
<add> }
<add>
<add> @Override
<add> public int requestFusion(int mode) {
<add> if ((mode & SYNC) != 0) {
<add> fused = true;
<add> return SYNC;
<add> }
<add> return NONE;
<add> }
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/internal/operators/flowable/FlowableRangeLongTest.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.operators.flowable;
<add>
<add>import io.reactivex.Flowable;
<add>import io.reactivex.TestHelper;
<add>import io.reactivex.functions.Consumer;
<add>import io.reactivex.subscribers.DefaultSubscriber;
<add>import io.reactivex.subscribers.TestSubscriber;
<add>import java.util.ArrayList;
<add>import java.util.List;
<add>import java.util.concurrent.atomic.AtomicBoolean;
<add>import java.util.concurrent.atomic.AtomicInteger;
<add>import org.junit.Test;
<add>import org.reactivestreams.Subscriber;
<add>
<add>import static org.junit.Assert.assertEquals;
<add>import static org.junit.Assert.assertFalse;
<add>import static org.junit.Assert.assertTrue;
<add>import static org.junit.Assert.fail;
<add>import static org.mockito.Mockito.any;
<add>import static org.mockito.Mockito.never;
<add>import static org.mockito.Mockito.times;
<add>import static org.mockito.Mockito.verify;
<add>
<add>public class FlowableRangeLongTest {
<add>
<add> @Test
<add> public void testRangeStartAt2Count3() {
<add> Subscriber<Long> observer = TestHelper.mockSubscriber();
<add>
<add> Flowable.rangeLong(2, 3).subscribe(observer);
<add>
<add> verify(observer, times(1)).onNext(2L);
<add> verify(observer, times(1)).onNext(3L);
<add> verify(observer, times(1)).onNext(4L);
<add> verify(observer, never()).onNext(5L);
<add> verify(observer, never()).onError(any(Throwable.class));
<add> verify(observer, times(1)).onComplete();
<add> }
<add>
<add> @Test
<add> public void testRangeUnsubscribe() {
<add> Subscriber<Long> observer = TestHelper.mockSubscriber();
<add>
<add> final AtomicInteger count = new AtomicInteger();
<add>
<add> Flowable.rangeLong(1, 1000).doOnNext(new Consumer<Long>() {
<add> @Override
<add> public void accept(Long t1) {
<add> count.incrementAndGet();
<add> }
<add> })
<add> .take(3).subscribe(observer);
<add>
<add> verify(observer, times(1)).onNext(1L);
<add> verify(observer, times(1)).onNext(2L);
<add> verify(observer, times(1)).onNext(3L);
<add> verify(observer, never()).onNext(4L);
<add> verify(observer, never()).onError(any(Throwable.class));
<add> verify(observer, times(1)).onComplete();
<add> assertEquals(3, count.get());
<add> }
<add>
<add> @Test
<add> public void testRangeWithZero() {
<add> Flowable.rangeLong(1, 0);
<add> }
<add>
<add> @Test
<add> public void testRangeWithOverflow2() {
<add> Flowable.rangeLong(Long.MAX_VALUE, 0);
<add> }
<add>
<add> @Test
<add> public void testRangeWithOverflow3() {
<add> Flowable.rangeLong(1, Long.MAX_VALUE);
<add> }
<add>
<add> @Test(expected = IllegalArgumentException.class)
<add> public void testRangeWithOverflow4() {
<add> Flowable.rangeLong(2, Long.MAX_VALUE);
<add> }
<add>
<add> @Test
<add> public void testRangeWithOverflow5() {
<add> assertFalse(Flowable.rangeLong(Long.MIN_VALUE, 0).blockingIterable().iterator().hasNext());
<add> }
<add>
<add> @Test
<add> public void testBackpressureViaRequest() {
<add> Flowable<Long> o = Flowable.rangeLong(1, Flowable.bufferSize());
<add>
<add> TestSubscriber<Long> ts = new TestSubscriber<Long>(0L);
<add>
<add> ts.assertNoValues();
<add> ts.request(1);
<add>
<add> o.subscribe(ts);
<add>
<add> ts.assertValue(1L);
<add>
<add> ts.request(2);
<add> ts.assertValues(1L, 2L, 3L);
<add>
<add> ts.request(3);
<add> ts.assertValues(1L, 2L, 3L, 4L, 5L, 6L);
<add>
<add> ts.request(Flowable.bufferSize());
<add> ts.assertTerminated();
<add> }
<add>
<add> @Test
<add> public void testNoBackpressure() {
<add> ArrayList<Long> list = new ArrayList<Long>(Flowable.bufferSize() * 2);
<add> for (long i = 1; i <= Flowable.bufferSize() * 2 + 1; i++) {
<add> list.add(i);
<add> }
<add>
<add> Flowable<Long> o = Flowable.rangeLong(1, list.size());
<add>
<add> TestSubscriber<Long> ts = new TestSubscriber<Long>(0L);
<add>
<add> ts.assertNoValues();
<add> ts.request(Long.MAX_VALUE); // infinite
<add>
<add> o.subscribe(ts);
<add>
<add> ts.assertValueSequence(list);
<add> ts.assertTerminated();
<add> }
<add> void testWithBackpressureOneByOne(long start) {
<add> Flowable<Long> source = Flowable.rangeLong(start, 100);
<add>
<add> TestSubscriber<Long> ts = new TestSubscriber<Long>(0L);
<add> ts.request(1);
<add> source.subscribe(ts);
<add>
<add> List<Long> list = new ArrayList<Long>(100);
<add> for (long i = 0; i < 100; i++) {
<add> list.add(i + start);
<add> ts.request(1);
<add> }
<add> ts.assertValueSequence(list);
<add> ts.assertTerminated();
<add> }
<add> void testWithBackpressureAllAtOnce(long start) {
<add> Flowable<Long> source = Flowable.rangeLong(start, 100);
<add>
<add> TestSubscriber<Long> ts = new TestSubscriber<Long>(0L);
<add> ts.request(100);
<add> source.subscribe(ts);
<add>
<add> List<Long> list = new ArrayList<Long>(100);
<add> for (long i = 0; i < 100; i++) {
<add> list.add(i + start);
<add> }
<add> ts.assertValueSequence(list);
<add> ts.assertTerminated();
<add> }
<add> @Test
<add> public void testWithBackpressure1() {
<add> for (long i = 0; i < 100; i++) {
<add> testWithBackpressureOneByOne(i);
<add> }
<add> }
<add> @Test
<add> public void testWithBackpressureAllAtOnce() {
<add> for (long i = 0; i < 100; i++) {
<add> testWithBackpressureAllAtOnce(i);
<add> }
<add> }
<add> @Test
<add> public void testWithBackpressureRequestWayMore() {
<add> Flowable<Long> source = Flowable.rangeLong(50, 100);
<add>
<add> TestSubscriber<Long> ts = new TestSubscriber<Long>(0L);
<add> ts.request(150);
<add> source.subscribe(ts);
<add>
<add> List<Long> list = new ArrayList<Long>(100);
<add> for (long i = 0; i < 100; i++) {
<add> list.add(i + 50);
<add> }
<add>
<add> ts.request(50); // and then some
<add>
<add> ts.assertValueSequence(list);
<add> ts.assertTerminated();
<add> }
<add>
<add> @Test
<add> public void testRequestOverflow() {
<add> final AtomicInteger count = new AtomicInteger();
<add> int n = 10;
<add> Flowable.rangeLong(1, n).subscribe(new DefaultSubscriber<Long>() {
<add>
<add> @Override
<add> public void onStart() {
<add> request(2);
<add> }
<add>
<add> @Override
<add> public void onComplete() {
<add> //do nothing
<add> }
<add>
<add> @Override
<add> public void onError(Throwable e) {
<add> throw new RuntimeException(e);
<add> }
<add>
<add> @Override
<add> public void onNext(Long t) {
<add> count.incrementAndGet();
<add> request(Long.MAX_VALUE - 1);
<add> }});
<add> assertEquals(n, count.get());
<add> }
<add>
<add> @Test
<add> public void testEmptyRangeSendsOnCompleteEagerlyWithRequestZero() {
<add> final AtomicBoolean completed = new AtomicBoolean(false);
<add> Flowable.rangeLong(1, 0).subscribe(new DefaultSubscriber<Long>() {
<add>
<add> @Override
<add> public void onStart() {
<add>// request(0);
<add> }
<add>
<add> @Override
<add> public void onComplete() {
<add> completed.set(true);
<add> }
<add>
<add> @Override
<add> public void onError(Throwable e) {
<add>
<add> }
<add>
<add> @Override
<add> public void onNext(Long t) {
<add>
<add> }});
<add> assertTrue(completed.get());
<add> }
<add>
<add> @Test(timeout = 1000)
<add> public void testNearMaxValueWithoutBackpressure() {
<add> TestSubscriber<Long> ts = new TestSubscriber<Long>();
<add> Flowable.rangeLong(Long.MAX_VALUE - 1L, 2L).subscribe(ts);
<add>
<add> ts.assertComplete();
<add> ts.assertNoErrors();
<add> ts.assertValues(Long.MAX_VALUE - 1L, Long.MAX_VALUE);
<add> }
<add>
<add> @Test(timeout = 1000)
<add> public void testNearMaxValueWithBackpressure() {
<add> TestSubscriber<Long> ts = new TestSubscriber<Long>(3L);
<add> Flowable.rangeLong(Long.MAX_VALUE - 1L, 2L).subscribe(ts);
<add>
<add> ts.assertComplete();
<add> ts.assertNoErrors();
<add> ts.assertValues(Long.MAX_VALUE - 1L, Long.MAX_VALUE);
<add> }
<add>
<add> @Test
<add> public void negativeCount() {
<add> try {
<add> Flowable.rangeLong(1L, -1L);
<add> fail("Should have thrown IllegalArgumentException");
<add> } catch (IllegalArgumentException ex) {
<add> assertEquals("count >= 0 required but it was -1", ex.getMessage());
<add> }
<add> }
<add>
<add> @Test
<add> public void countOne() {
<add> Flowable.rangeLong(5495454L, 1L)
<add> .test()
<add> .assertResult(5495454L);
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/internal/operators/observable/ObservableRangeLongTest.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.operators.observable;
<add>
<add>import io.reactivex.Flowable;
<add>import io.reactivex.Observable;
<add>import io.reactivex.Observer;
<add>import io.reactivex.TestHelper;
<add>import io.reactivex.functions.Consumer;
<add>import io.reactivex.observers.DefaultObserver;
<add>import io.reactivex.observers.TestObserver;
<add>import java.util.ArrayList;
<add>import java.util.concurrent.atomic.AtomicBoolean;
<add>import java.util.concurrent.atomic.AtomicInteger;
<add>import org.junit.Test;
<add>
<add>import static org.junit.Assert.assertEquals;
<add>import static org.junit.Assert.assertFalse;
<add>import static org.junit.Assert.assertTrue;
<add>import static org.junit.Assert.fail;
<add>import static org.mockito.Mockito.any;
<add>import static org.mockito.Mockito.never;
<add>import static org.mockito.Mockito.times;
<add>import static org.mockito.Mockito.verify;
<add>
<add>public class ObservableRangeLongTest {
<add> @Test
<add> public void testRangeStartAt2Count3() {
<add> Observer<Long> observer = TestHelper.mockObserver();
<add>
<add> Observable.rangeLong(2, 3).subscribe(observer);
<add>
<add> verify(observer, times(1)).onNext(2L);
<add> verify(observer, times(1)).onNext(3L);
<add> verify(observer, times(1)).onNext(4L);
<add> verify(observer, never()).onNext(5L);
<add> verify(observer, never()).onError(any(Throwable.class));
<add> verify(observer, times(1)).onComplete();
<add> }
<add>
<add> @Test
<add> public void testRangeUnsubscribe() {
<add> Observer<Long> observer = TestHelper.mockObserver();
<add>
<add> final AtomicInteger count = new AtomicInteger();
<add>
<add> Observable.rangeLong(1, 1000).doOnNext(new Consumer<Long>() {
<add> @Override
<add> public void accept(Long t1) {
<add> count.incrementAndGet();
<add> }
<add> })
<add> .take(3).subscribe(observer);
<add>
<add> verify(observer, times(1)).onNext(1L);
<add> verify(observer, times(1)).onNext(2L);
<add> verify(observer, times(1)).onNext(3L);
<add> verify(observer, never()).onNext(4L);
<add> verify(observer, never()).onError(any(Throwable.class));
<add> verify(observer, times(1)).onComplete();
<add> assertEquals(3, count.get());
<add> }
<add>
<add> @Test
<add> public void testRangeWithZero() {
<add> Observable.rangeLong(1L, 0L);
<add> }
<add>
<add> @Test
<add> public void testRangeWithOverflow2() {
<add> Observable.rangeLong(Long.MAX_VALUE, 0L);
<add> }
<add>
<add> @Test
<add> public void testRangeWithOverflow3() {
<add> Observable.rangeLong(1L, Long.MAX_VALUE);
<add> }
<add>
<add> @Test(expected = IllegalArgumentException.class)
<add> public void testRangeWithOverflow4() {
<add> Observable.rangeLong(2L, Long.MAX_VALUE);
<add> }
<add>
<add> @Test
<add> public void testRangeWithOverflow5() {
<add> assertFalse(Observable.rangeLong(Long.MIN_VALUE, 0).blockingIterable().iterator().hasNext());
<add> }
<add>
<add> @Test
<add> public void testNoBackpressure() {
<add> ArrayList<Long> list = new ArrayList<Long>(Flowable.bufferSize() * 2);
<add> for (long i = 1; i <= Flowable.bufferSize() * 2 + 1; i++) {
<add> list.add(i);
<add> }
<add>
<add> Observable<Long> o = Observable.rangeLong(1, list.size());
<add>
<add> TestObserver<Long> ts = new TestObserver<Long>();
<add>
<add> o.subscribe(ts);
<add>
<add> ts.assertValueSequence(list);
<add> ts.assertTerminated();
<add> }
<add>
<add> @Test
<add> public void testEmptyRangeSendsOnCompleteEagerlyWithRequestZero() {
<add> final AtomicBoolean completed = new AtomicBoolean(false);
<add> Observable.rangeLong(1L, 0L).subscribe(new DefaultObserver<Long>() {
<add>
<add> @Override
<add> public void onStart() {
<add>// request(0);
<add> }
<add>
<add> @Override
<add> public void onComplete() {
<add> completed.set(true);
<add> }
<add>
<add> @Override
<add> public void onError(Throwable e) {
<add>
<add> }
<add>
<add> @Override
<add> public void onNext(Long t) {
<add>
<add> }});
<add> assertTrue(completed.get());
<add> }
<add>
<add> @Test(timeout = 1000)
<add> public void testNearMaxValueWithoutBackpressure() {
<add> TestObserver<Long> ts = new TestObserver<Long>();
<add> Observable.rangeLong(Long.MAX_VALUE - 1L, 2L).subscribe(ts);
<add>
<add> ts.assertComplete();
<add> ts.assertNoErrors();
<add> ts.assertValues(Long.MAX_VALUE - 1, Long.MAX_VALUE);
<add> }
<add>
<add> @Test
<add> public void negativeCount() {
<add> try {
<add> Observable.rangeLong(1L, -1L);
<add> fail("Should have thrown IllegalArgumentException");
<add> } catch (IllegalArgumentException ex) {
<add> assertEquals("count >= 0 required but it was -1", ex.getMessage());
<add> }
<add> }
<add>
<add> @Test
<add> public void countOne() {
<add> Observable.rangeLong(5495454L, 1L)
<add> .test()
<add> .assertResult(5495454L);
<add> }
<add>} | 6 |
Javascript | Javascript | use common.fixtures module | 8d3e82fd6c722b8bd41eda828b982d89c2e6ce76 | <ide><path>test/parallel/test-https-agent-session-eviction.js
<ide> 'use strict';
<ide>
<ide> const common = require('../common');
<add>const { readKey } = require('../common/fixtures');
<ide>
<ide> if (!common.hasCrypto)
<ide> common.skip('missing crypto');
<ide>
<ide> const assert = require('assert');
<ide> const https = require('https');
<del>const fs = require('fs');
<ide> const SSL_OP_NO_TICKET = require('crypto').constants.SSL_OP_NO_TICKET;
<ide>
<ide> const options = {
<del> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`),
<del> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`),
<add> key: readKey('agent1-key.pem'),
<add> cert: readKey('agent1-cert.pem'),
<ide> secureOptions: SSL_OP_NO_TICKET
<ide> };
<ide> | 1 |
Text | Text | clarify `length` param in `buffer.write` | d03d9a05f0318604d2aa71a1c3dbd6cbca8894e4 | <ide><path>doc/api/buffer.md
<ide> added: v0.1.90
<ide> * `string` {string} String to write to `buf`.
<ide> * `offset` {integer} Number of bytes to skip before starting to write `string`.
<ide> **Default:** `0`.
<del>* `length` {integer} Maximum number of bytes to write. **Default:**
<del> `buf.length - offset`.
<add>* `length` {integer} Maximum number of bytes to write (written bytes will not
<add> exceed `buf.length - offset`). **Default:** `buf.length - offset`.
<ide> * `encoding` {string} The character encoding of `string`. **Default:** `'utf8'`.
<ide> * Returns: {integer} Number of bytes written.
<ide>
<ide> const len = buf.write('\u00bd + \u00bc = \u00be', 0);
<ide>
<ide> console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`);
<ide> // Prints: 12 bytes: ½ + ¼ = ¾
<add>
<add>const buffer = Buffer.alloc(10);
<add>
<add>const length = buffer.write('abcd', 8);
<add>
<add>console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`);
<add>// Prints: 2 bytes : ab
<ide> ```
<ide>
<ide> ### `buf.writeBigInt64BE(value[, offset])` | 1 |
Javascript | Javascript | improve the description | f4fe28bd92b35cbb558502697dfdd72bee435fee | <ide><path>src/ng/document.js
<ide> * @requires $window
<ide> *
<ide> * @description
<del> * A {@link angular.element jQuery (lite)}-wrapped reference to the browser's `window.document`
<del> * element.
<add> * A {@link angular.element jQuery or jqLite} wrapper for the browser's `window.document` element.
<ide> */
<ide> function $DocumentProvider(){
<ide> this.$get = ['$window', function(window){ | 1 |
Python | Python | clarify behavior of genfromtxt names field | 0efac01e2b9d9ce40e508f14a1a5d53fef62fbc8 | <ide><path>numpy/lib/npyio.py
<ide> def genfromtxt(fname, dtype=float, comments='#', delimiter=None,
<ide> Which columns to read, with 0 being the first. For example,
<ide> ``usecols = (1, 4, 5)`` will extract the 2nd, 5th and 6th columns.
<ide> names : {None, True, str, sequence}, optional
<del> If `names` is True, the field names are read from the first valid line
<del> after the first `skip_header` lines.
<del> If `names` is a sequence or a single-string of comma-separated names,
<del> the names will be used to define the field names in a structured dtype.
<del> If `names` is None, the names of the dtype fields will be used, if any.
<add> If `names` is True, the field names are read from the first line after
<add> the first `skip_header` lines. This line can optionally be proceeded
<add> by a comment delimeter. If `names` is a sequence or a single-string of
<add> comma-separated names, the names will be used to define the field names
<add> in a structured dtype. If `names` is None, the names of the dtype
<add> fields will be used, if any.
<ide> excludelist : sequence, optional
<ide> A list of names to exclude. This list is appended to the default list
<ide> ['return','file','print']. Excluded names are appended an underscore: | 1 |
PHP | PHP | fix a test | 1df406b5a689c51b424837e6e64b2e66a5ce8403 | <ide><path>tests/Foundation/FoundationApplicationTest.php
<ide> public function testExceptionHandlingSendsResponseFromCustomHandler()
<ide> public function testNoResponseFromCustomHandlerCallsKernelExceptionHandler()
<ide> {
<ide> $app = new Application;
<add> $app['config'] = array('app.debug' => false);
<ide> $exception = new Exception;
<ide> $errorHandler = m::mock('stdClass');
<ide> $exceptionHandler = m::mock('stdClass'); | 1 |
Python | Python | remove deprecated local_data_dir | 470cdf5bf909a0095195a763de366966981f5db1 | <ide><path>examples/InventoryCount/main.py
<ide> import inventoryCount as mainModule
<ide> import os
<del>from spacy.en import English, LOCAL_DATA_DIR
<del>data_dir = os.environ.get('SPACY_DATA', LOCAL_DATA_DIR)
<add>from spacy.en import English
<ide>
<ide> if __name__ == '__main__':
<ide> """
<ide><path>spacy/en/__init__.py
<ide> STOPWORDS = set(w for w in STOPWORDS.split() if w)
<ide>
<ide>
<del># This is deprecated as of v100
<del>LOCAL_DATA_DIR = path.join(path.dirname(__file__), 'data')
<del>
<del>
<ide> class English(Language):
<ide> lang = 'en'
<ide> | 2 |
PHP | PHP | apply fixes from styleci | 768d3212939dcebba54f9f3304d24dfe315953b7 | <ide><path>src/Illuminate/Broadcasting/Broadcasters/Broadcaster.php
<ide> protected function retrieveUser($request, $channel)
<ide> }
<ide>
<ide> /**
<del> * Retrieve options for a certain channel
<add> * Retrieve options for a certain channel.
<ide> *
<ide> * @param string $channel
<ide> * @return array
<ide> protected function retrieveChannelOptions($channel)
<ide> }
<ide>
<ide> /**
<del> * Check if channel name from request match a pattern from registered channels
<add> * Check if channel name from request match a pattern from registered channels.
<ide> *
<ide> * @param string $channel
<ide> * @param string $pattern
<ide><path>src/Illuminate/Broadcasting/Broadcasters/UsePusherChannelConventions.php
<ide> trait UsePusherChannelConventions
<ide> {
<ide> /**
<del> * Return true if channel is protected by authentication
<add> * Return true if channel is protected by authentication.
<ide> *
<ide> * @param string $channel
<ide> * @return bool
<ide> public function isGuardedChannel($channel)
<ide> }
<ide>
<ide> /**
<del> * Remove prefix from channel name
<add> * Remove prefix from channel name.
<ide> *
<ide> * @param string $channel
<ide> * @return string
<ide> public function normalizeChannelName($channel)
<ide> ? Str::replaceFirst('private-', '', $channel)
<ide> : Str::replaceFirst('presence-', '', $channel);
<ide> }
<add>
<ide> return $channel;
<ide> }
<ide> }
<ide><path>tests/Broadcasting/BroadcasterTest.php
<ide> public function testNotFoundThrowsHttpException()
<ide>
<ide> public function testCanRegisterChannelsWithoutOptions()
<ide> {
<del> $this->broadcaster->channel('somechannel', function () {});
<add> $this->broadcaster->channel('somechannel', function () {
<add> });
<ide> }
<ide>
<ide> public function testCanRegisterChannelsWithOptions()
<ide> {
<del> $options = [ 'a' => [ 'b', 'c' ] ];
<del> $this->broadcaster->channel('somechannel', function () {}, $options);
<add> $options = ['a' => ['b', 'c']];
<add> $this->broadcaster->channel('somechannel', function () {
<add> }, $options);
<ide> }
<ide>
<ide> public function testCanRetrieveChannelsOptions()
<ide> {
<del> $options = [ 'a' => [ 'b', 'c' ] ];
<del> $this->broadcaster->channel('somechannel', function () {}, $options);
<add> $options = ['a' => ['b', 'c']];
<add> $this->broadcaster->channel('somechannel', function () {
<add> }, $options);
<ide>
<ide> $this->assertEquals(
<ide> $options,
<ide> public function testCanRetrieveChannelsOptions()
<ide>
<ide> public function testCanRetrieveChannelsOptionsUsingAChannelNameContainingArgs()
<ide> {
<del> $options = [ 'a' => [ 'b', 'c' ] ];
<del> $this->broadcaster->channel('somechannel.{id}.test.{text}', function () {}, $options);
<add> $options = ['a' => ['b', 'c']];
<add> $this->broadcaster->channel('somechannel.{id}.test.{text}', function () {
<add> }, $options);
<ide>
<ide> $this->assertEquals(
<ide> $options,
<ide> public function testCanRetrieveChannelsOptionsUsingAChannelNameContainingArgs()
<ide>
<ide> public function testCanRetrieveChannelsOptionsWhenMultipleChannelsAreRegistered()
<ide> {
<del> $options = [ 'a' => [ 'b', 'c' ] ];
<del> $this->broadcaster->channel('somechannel', function () {});
<del> $this->broadcaster->channel('someotherchannel', function () {}, $options);
<add> $options = ['a' => ['b', 'c']];
<add> $this->broadcaster->channel('somechannel', function () {
<add> });
<add> $this->broadcaster->channel('someotherchannel', function () {
<add> }, $options);
<ide>
<ide> $this->assertEquals(
<ide> $options,
<ide> public function testCanRetrieveChannelsOptionsWhenMultipleChannelsAreRegistered(
<ide>
<ide> public function testDontRetrieveChannelsOptionsWhenChannelDoesntExists()
<ide> {
<del> $options = [ 'a' => [ 'b', 'c' ] ];
<del> $this->broadcaster->channel('somechannel', function () {}, $options);
<add> $options = ['a' => ['b', 'c']];
<add> $this->broadcaster->channel('somechannel', function () {
<add> }, $options);
<ide>
<ide> $this->assertEquals(
<ide> [],
<ide> public function testDontRetrieveChannelsOptionsWhenChannelDoesntExists()
<ide>
<ide> public function testRetrieveUserWithoutGuard()
<ide> {
<del> $this->broadcaster->channel('somechannel', function () {});
<add> $this->broadcaster->channel('somechannel', function () {
<add> });
<ide>
<ide> $request = m::mock(\Illuminate\Http\Request::class);
<ide> $request->shouldReceive('user')
<ide> public function testRetrieveUserWithoutGuard()
<ide>
<ide> public function testRetrieveUserWithOneGuardUsingAStringForSpecifyingGuard()
<ide> {
<del> $this->broadcaster->channel('somechannel', function () {}, ['guards' => 'myguard']);
<add> $this->broadcaster->channel('somechannel', function () {
<add> }, ['guards' => 'myguard']);
<ide>
<ide> $request = m::mock(\Illuminate\Http\Request::class);
<ide> $request->shouldReceive('user')
<ide> public function testRetrieveUserWithOneGuardUsingAStringForSpecifyingGuard()
<ide>
<ide> public function testRetrieveUserWithMultipleGuardsAndRespectGuardsOrder()
<ide> {
<del> $this->broadcaster->channel('somechannel', function () {}, ['guards' => ['myguard1', 'myguard2']]);
<del> $this->broadcaster->channel('someotherchannel', function () {}, ['guards' => ['myguard2', 'myguard1']]);
<del>
<add> $this->broadcaster->channel('somechannel', function () {
<add> }, ['guards' => ['myguard1', 'myguard2']]);
<add> $this->broadcaster->channel('someotherchannel', function () {
<add> }, ['guards' => ['myguard2', 'myguard1']]);
<ide>
<ide> $request = m::mock(\Illuminate\Http\Request::class);
<ide> $request->shouldReceive('user')
<ide> public function testRetrieveUserWithMultipleGuardsAndRespectGuardsOrder()
<ide>
<ide> public function testRetrieveUserDontUseDefaultGuardWhenOneGuardSpecified()
<ide> {
<del> $this->broadcaster->channel('somechannel', function () {}, ['guards' => 'myguard']);
<add> $this->broadcaster->channel('somechannel', function () {
<add> }, ['guards' => 'myguard']);
<ide>
<ide> $request = m::mock(\Illuminate\Http\Request::class);
<ide> $request->shouldReceive('user')
<ide> public function testRetrieveUserDontUseDefaultGuardWhenOneGuardSpecified()
<ide>
<ide> public function testRetrieveUserDontUseDefaultGuardWhenMultipleGuardsSpecified()
<ide> {
<del> $this->broadcaster->channel('somechannel', function () {}, ['guards' => ['myguard1', 'myguard2']]);
<add> $this->broadcaster->channel('somechannel', function () {
<add> }, ['guards' => ['myguard1', 'myguard2']]);
<ide>
<ide> $request = m::mock(\Illuminate\Http\Request::class);
<ide> $request->shouldReceive('user')
<ide> public function testChannelNameMatchPattern($channel, $pattern, $shouldMatch)
<ide> $this->assertEquals($shouldMatch, $this->broadcaster->channelNameMatchesPattern($channel, $pattern));
<ide> }
<ide>
<del> public function channelNameMatchPatternProvider() {
<add> public function channelNameMatchPatternProvider()
<add> {
<ide> return [
<ide> ['something', 'something', true],
<ide> ['something.23', 'something.{id}', true],
<ide> public function join($user, BroadcasterTestEloquentModelStub $model, $nonModel)
<ide>
<ide> class DummyUser
<ide> {
<del>
<ide> }
<ide><path>tests/Broadcasting/PusherBroadcasterTest.php
<ide>
<ide> namespace Illuminate\Tests\Broadcasting;
<ide>
<del>use Illuminate\Broadcasting\Broadcasters\PusherBroadcaster;
<ide> use Mockery as m;
<ide> use PHPUnit\Framework\TestCase;
<add>use Illuminate\Broadcasting\Broadcasters\PusherBroadcaster;
<ide>
<ide> class PusherBroadcasterTest extends TestCase
<ide> {
<ide> public function setUp()
<ide>
<ide> public function testAuthCallValidAuthenticationResponseWithPrivateChannelWhenCallbackReturnTrue()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return true;
<ide> });
<ide>
<ide> public function testAuthCallValidAuthenticationResponseWithPrivateChannelWhenCal
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenCallbackReturnFalse()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return false;
<ide> });
<ide>
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenCall
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenRequestUserNotFound()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return true;
<ide> });
<ide>
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenRequ
<ide> public function testAuthCallValidAuthenticationResponseWithPresenceChannelWhenCallbackReturnAnArray()
<ide> {
<ide> $returnData = [1, 2, 3, 4];
<del> $this->broadcaster->channel('test', function() use ($returnData) {
<add> $this->broadcaster->channel('test', function () use ($returnData) {
<ide> return $returnData;
<ide> });
<ide>
<ide> public function testAuthCallValidAuthenticationResponseWithPresenceChannelWhenCa
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPresenceChannelWhenCallbackReturnNull()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<del> return;
<add> $this->broadcaster->channel('test', function () {
<ide> });
<ide>
<ide> $this->broadcaster->auth(
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPresenceChannelWhenCal
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPresenceChannelWhenRequestUserNotFound()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return [1, 2, 3, 4];
<ide> });
<ide>
<ide> public function testValidAuthenticationResponseCallPusherSocketAuthMethodWithPri
<ide> $request = $this->getMockRequestWithUserForChannel('private-test');
<ide>
<ide> $data = [
<del> 'auth' => 'abcd:efgh'
<add> 'auth' => 'abcd:efgh',
<ide> ];
<ide>
<ide> $this->pusher->shouldReceive('socket_auth')
<ide><path>tests/Broadcasting/RedisBroadcasterTest.php
<ide>
<ide> namespace Illuminate\Tests\Broadcasting;
<ide>
<del>use Illuminate\Broadcasting\Broadcasters\RedisBroadcaster;
<ide> use Mockery as m;
<ide> use PHPUnit\Framework\TestCase;
<add>use Illuminate\Broadcasting\Broadcasters\RedisBroadcaster;
<ide>
<ide> class RedisBroadcasterTest extends TestCase
<ide> {
<ide> public function tearDown()
<ide>
<ide> public function testAuthCallValidAuthenticationResponseWithPrivateChannelWhenCallbackReturnTrue()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return true;
<ide> });
<ide>
<ide> public function testAuthCallValidAuthenticationResponseWithPrivateChannelWhenCal
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenCallbackReturnFalse()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return false;
<ide> });
<ide>
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenCall
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenRequestUserNotFound()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return true;
<ide> });
<ide>
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPrivateChannelWhenRequ
<ide> public function testAuthCallValidAuthenticationResponseWithPresenceChannelWhenCallbackReturnAnArray()
<ide> {
<ide> $returnData = [1, 2, 3, 4];
<del> $this->broadcaster->channel('test', function() use ($returnData) {
<add> $this->broadcaster->channel('test', function () use ($returnData) {
<ide> return $returnData;
<ide> });
<ide>
<ide> public function testAuthCallValidAuthenticationResponseWithPresenceChannelWhenCa
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPresenceChannelWhenCallbackReturnNull()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<del> return;
<add> $this->broadcaster->channel('test', function () {
<ide> });
<ide>
<ide> $this->broadcaster->auth(
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPresenceChannelWhenCal
<ide> */
<ide> public function testAuthThrowAccessDeniedHttpExceptionWithPresenceChannelWhenRequestUserNotFound()
<ide> {
<del> $this->broadcaster->channel('test', function() {
<add> $this->broadcaster->channel('test', function () {
<ide> return [1, 2, 3, 4];
<ide> });
<ide>
<ide> public function testValidAuthenticationResponseWithPresenceChannel()
<ide> ]),
<ide> $this->broadcaster->validAuthenticationResponse($request, [
<ide> 'a' => 'b',
<del> 'c' => 'd'
<add> 'c' => 'd',
<ide> ])
<ide> );
<ide> }
<ide><path>tests/Broadcasting/UsePusherChannelsNamesTest.php
<ide>
<ide> namespace Illuminate\Tests\Broadcasting;
<ide>
<del>use Illuminate\Broadcasting\Broadcasters\Broadcaster;
<del>use Illuminate\Broadcasting\Broadcasters\UsePusherChannelConventions;
<ide> use Mockery as m;
<ide> use PHPUnit\Framework\TestCase;
<add>use Illuminate\Broadcasting\Broadcasters\Broadcaster;
<add>use Illuminate\Broadcasting\Broadcasters\UsePusherChannelConventions;
<ide>
<ide> class UsePusherChannelConventionsTest extends TestCase
<ide> {
<ide> public function channelsProvider()
<ide> foreach ($prefixesInfos as $prefixInfos) {
<ide> foreach ($channels as $channel) {
<ide> $tests[] = [
<del> $prefixInfos['prefix'] . $channel,
<add> $prefixInfos['prefix'].$channel,
<ide> $channel,
<ide> $prefixInfos['guarded'],
<ide> ];
<ide> }
<ide> }
<ide>
<del> $tests[] = ['private-private-test' , 'private-test', true];
<del> $tests[] = ['private-presence-test' , 'presence-test', true];
<del> $tests[] = ['presence-private-test' , 'private-test', true];
<del> $tests[] = ['presence-presence-test' , 'presence-test', true];
<del> $tests[] = ['public-test' , 'public-test', false];
<add> $tests[] = ['private-private-test', 'private-test', true];
<add> $tests[] = ['private-presence-test', 'presence-test', true];
<add> $tests[] = ['presence-private-test', 'private-test', true];
<add> $tests[] = ['presence-presence-test', 'presence-test', true];
<add> $tests[] = ['public-test', 'public-test', false];
<ide>
<ide> return $tests;
<ide> } | 6 |
Javascript | Javascript | fix typo in parsehooknames.js | 64931821a9df262f2b6475404d025bcff97c3e0a | <ide><path>packages/react-devtools-extensions/src/parseHookNames/parseHookNames.js
<ide> function findHookNames(
<ide> const position = sourceConsumer.originalPositionFor({
<ide> line: lineNumber,
<ide>
<del> // Column numbers are representated differently between tools/engines.
<add> // Column numbers are represented differently between tools/engines.
<ide> // Error.prototype.stack columns are 1-based (like most IDEs) but ASTs are 0-based.
<ide> // For more info see https://github.com/facebook/react/issues/21792#issuecomment-873171991
<ide> column: columnNumber - 1,
<ide> async function parseSourceAST(
<ide> const {source} = sourceConsumer.originalPositionFor({
<ide> line: lineNumber,
<ide>
<del> // Column numbers are representated differently between tools/engines.
<add> // Column numbers are represented differently between tools/engines.
<ide> // Error.prototype.stack columns are 1-based (like most IDEs) but ASTs are 0-based.
<ide> // For more info see https://github.com/facebook/react/issues/21792#issuecomment-873171991
<ide> column: columnNumber - 1, | 1 |
Ruby | Ruby | use a stylesheet instead of a javascript in test | e63695535e5ce20f1cc75bcebbfed698eceeba64 | <ide><path>railties/test/application/assets_test.rb
<ide> class User < ActiveRecord::Base; raise 'should not be reached'; end
<ide> end
<ide>
<ide> test "the manifest file should be saved by default in the same assets folder" do
<del> app_file "app/assets/javascripts/application.js", "alert();"
<add> app_file "app/assets/stylesheets/test.css", "a{color: red}"
<ide> add_to_config "config.assets.prefix = '/x'"
<ide>
<ide> precompile!
<ide>
<ide> manifest = Dir["#{app_path}/public/x/.sprockets-manifest-*.json"].first
<ide> assets = ActiveSupport::JSON.decode(File.read(manifest))
<del> assert_match(/application-([0-z]+)\.js/, assets["assets"]["application.js"])
<add> assert_match(/test-([0-z]+)\.css/, assets["assets"]["test.css"])
<ide> end
<ide>
<ide> test "assets do not require any assets group gem when manifest file is present" do | 1 |
Javascript | Javascript | add unique glyph names for cff fonts | 8a596ef5d5347f9342cfbde1b1174d38359cbd1c | <ide><path>src/core/cff_parser.js
<ide> var CFFStandardStrings = [
<ide> 'Black', 'Bold', 'Book', 'Light', 'Medium', 'Regular', 'Roman', 'Semibold'
<ide> ];
<ide>
<add>const NUM_STANDARD_CFF_STRINGS = 391;
<add>
<ide> var CFFParser = (function CFFParserClosure() {
<ide> var CharstringValidationData = [
<ide> null,
<ide> var CFFStrings = (function CFFStringsClosure() {
<ide> }
<ide> CFFStrings.prototype = {
<ide> get: function CFFStrings_get(index) {
<del> if (index >= 0 && index <= 390) {
<add> if (index >= 0 && index <= (NUM_STANDARD_CFF_STRINGS - 1)) {
<ide> return CFFStandardStrings[index];
<ide> }
<del> if (index - 391 <= this.strings.length) {
<del> return this.strings[index - 391];
<add> if (index - NUM_STANDARD_CFF_STRINGS <= this.strings.length) {
<add> return this.strings[index - NUM_STANDARD_CFF_STRINGS];
<ide> }
<ide> return CFFStandardStrings[0];
<ide> },
<add> getSID: function CFFStrings_getSID(str) {
<add> let index = CFFStandardStrings.indexOf(str);
<add> if (index !== -1) {
<add> return index;
<add> }
<add> index = this.strings.indexOf(str);
<add> if (index !== -1) {
<add> return index + NUM_STANDARD_CFF_STRINGS;
<add> }
<add> return -1;
<add> },
<ide> add: function CFFStrings_add(value) {
<del> this.strings.push(value);
<add> return this.strings.push(value) + NUM_STANDARD_CFF_STRINGS - 1;
<ide> },
<ide> get count() {
<ide> return this.strings.length;
<ide> var CFFCompiler = (function CFFCompilerClosure() {
<ide> output.add(encoding);
<ide> }
<ide> }
<del> var charset = this.compileCharset(cff.charset);
<add> var charset = this.compileCharset(cff.charset, cff.charStrings.count,
<add> cff.strings, cff.isCIDFont);
<ide> topDictTracker.setEntryLocation('charset', [output.length], output);
<ide> output.add(charset);
<ide>
<ide> var CFFCompiler = (function CFFCompilerClosure() {
<ide> }
<ide> return this.compileIndex(charStringsIndex);
<ide> },
<del> compileCharset: function CFFCompiler_compileCharset(charset) {
<del> let length = 1 + (this.cff.charStrings.count - 1) * 2;
<del> // The contents of the charset doesn't matter, it's just there to make
<del> // freetype happy.
<del> let out = new Uint8Array(length);
<add> compileCharset: function CFFCompiler_compileCharset(charset, numGlyphs,
<add> strings, isCIDFont) {
<add> // Freetype requires the number of charset strings be correct and MacOS
<add> // requires a valid mapping for printing.
<add> let out;
<add> let numGlyphsLessNotDef = numGlyphs - 1;
<add> if (isCIDFont) {
<add> // In a CID font, the charset is a mapping of CIDs not SIDs so just
<add> // create an identity mapping.
<add> out = new Uint8Array([
<add> 2, // format
<add> 0, // first CID upper byte
<add> 0, // first CID lower byte
<add> (numGlyphsLessNotDef >> 8) & 0xFF,
<add> numGlyphsLessNotDef & 0xFF,
<add> ]);
<add> } else {
<add> let length = 1 + numGlyphsLessNotDef * 2;
<add> out = new Uint8Array(length);
<add> out[0] = 0; // format 0
<add> let charsetIndex = 0;
<add> let numCharsets = charset.charset.length;
<add> for (let i = 1; i < out.length; i += 2) {
<add> let sid = 0;
<add> if (charsetIndex < numCharsets) {
<add> let name = charset.charset[charsetIndex++];
<add> sid = strings.getSID(name);
<add> if (sid === -1) {
<add> sid = 0;
<add> warn(`Couldn't find ${name} in CFF strings`);
<add> }
<add> }
<add> out[i] = (sid >> 8) & 0xFF;
<add> out[i + 1] = sid & 0xFF;
<add> }
<add> }
<ide> return this.compileTypedArray(out);
<ide> },
<ide> compileEncoding: function CFFCompiler_compileEncoding(encoding) {
<ide><path>src/core/fonts.js
<ide> var Type1Font = (function Type1FontClosure() {
<ide> var i, ii;
<ide> for (i = 0; i < count; i++) {
<ide> var index = CFFStandardStrings.indexOf(charstrings[i].glyphName);
<del> // TODO: Insert the string and correctly map it. Previously it was
<del> // thought mapping names that aren't in the standard strings to .notdef
<del> // was fine, however in issue818 when mapping them all to .notdef the
<del> // adieresis glyph no longer worked.
<ide> if (index === -1) {
<del> index = 0;
<add> index = strings.add(charstrings[i].glyphName);
<ide> }
<del> charsetArray.push((index >> 8) & 0xff, index & 0xff);
<add> charsetArray.push(index);
<ide> }
<del> cff.charset = new CFFCharset(false, 0, [], charsetArray);
<add> cff.charset = new CFFCharset(false, 0, charsetArray);
<ide>
<ide> var charStringsIndex = new CFFIndex();
<ide> charStringsIndex.add([0x8B, 0x0E]); // .notdef
<ide><path>test/unit/cff_parser_spec.js
<ide> */
<ide>
<ide> import {
<del> CFFCompiler, CFFFDSelect, CFFParser, CFFStrings
<add> CFFCharset, CFFCompiler, CFFFDSelect, CFFParser, CFFStrings
<ide> } from '../../src/core/cff_parser';
<ide> import { SEAC_ANALYSIS_ENABLED } from '../../src/core/fonts';
<ide> import { Stream } from '../../src/core/stream';
<ide> describe('CFFCompiler', function() {
<ide> ]);
<ide> });
<ide>
<add> it('compiles charset of CID font', function() {
<add> var charset = new CFFCharset();
<add> var c = new CFFCompiler();
<add> var numGlyphs = 7;
<add> var out = c.compileCharset(charset, numGlyphs, new CFFStrings(), true);
<add> // All CID charsets get turned into a simple format 2.
<add> expect(out).toEqual([
<add> 2, // format
<add> 0, // cid (high)
<add> 0, // cid (low)
<add> 0, // nLeft (high)
<add> numGlyphs - 1, // nLeft (low)
<add> ]);
<add> });
<add>
<add> it('compiles charset of non CID font', function() {
<add> var charset = new CFFCharset(false, 0, ['space', 'exclam']);
<add> var c = new CFFCompiler();
<add> var numGlyphs = 3;
<add> var out = c.compileCharset(charset, numGlyphs, new CFFStrings(), false);
<add> // All non-CID fonts use a format 0 charset.
<add> expect(out).toEqual([
<add> 0, // format
<add> 0, // sid of 'space' (high)
<add> 1, // sid of 'space' (low)
<add> 0, // sid of 'exclam' (high)
<add> 2, // sid of 'exclam' (low)
<add> ]);
<add> });
<add>
<ide> // TODO a lot more compiler tests
<ide> }); | 3 |
Javascript | Javascript | fix lint in local-cli | 8dbb025959a560302761e02bad807a3991f9a271 | <ide><path>local-cli/bundle/output/unbundle/as-assets.js
<ide> const writeSourceMap = require('./write-sourcemap');
<ide>
<ide> const {joinModules} = require('./util');
<ide>
<del>const MAGIC_UNBUNDLE_FILENAME = 'UNBUNDLE'; // must not start with a dot, as that won't go into the apk
<add>// must not start with a dot, as that won't go into the apk
<add>const MAGIC_UNBUNDLE_FILENAME = 'UNBUNDLE';
<ide> const MODULES_DIR = 'js-modules';
<ide>
<ide> /**
<ide><path>local-cli/library/library.js
<ide> function library(argv, config, args) {
<ide> console.log('Created library in', libraryDest);
<ide> console.log('Next Steps:');
<ide> console.log(' Link your library in Xcode:');
<del> console.log(' https://facebook.github.io/react-native/docs/linking-libraries-ios.html#content\n');
<add> console.log(
<add> ' https://facebook.github.io/react-native/docs/' +
<add> 'linking-libraries-ios.html#content\n'
<add> );
<ide> }
<ide>
<ide> module.exports = {
<ide><path>local-cli/runAndroid/runAndroid.js
<ide> function buildAndRun(args) {
<ide> gradleArgs.push('install');
<ide> }
<ide>
<del> // Append the build type to the current gradle install configuration. By default it will generate `installDebug`.
<del> gradleArgs[0] = gradleArgs[0] + args.configuration[0].toUpperCase() + args.configuration.slice(1);
<add> // Append the build type to the current gradle install configuration.
<add> // By default it will generate `installDebug`.
<add> gradleArgs[0] =
<add> gradleArgs[0] + args.configuration[0].toUpperCase() + args.configuration.slice(1);
<ide>
<ide> // Get the Android project directory.
<ide> const androidProjectDir = path.join(args.root, 'android');
<ide> function buildAndRun(args) {
<ide> 'Generating the bundle for the release build...'
<ide> ));
<ide>
<del> child_process.execSync(`react-native bundle --platform android --dev false --entry-file index.android.js --bundle-output ${androidProjectDir}/app/src/main/assets/index.android.bundle --assets-dest ${androidProjectDir}/app/src/main/res/`, {
<del> stdio: [process.stdin, process.stdout, process.stderr]
<del> });
<add> child_process.execSync(
<add> 'react-native bundle ' +
<add> '--platform android ' +
<add> '--dev false ' +
<add> '--entry-file index.android.js ' +
<add> `--bundle-output ${androidProjectDir}/app/src/main/assets/index.android.bundle ` +
<add> `--assets-dest ${androidProjectDir}/app/src/main/res/`,
<add> {
<add> stdio: [process.stdin, process.stdout, process.stderr],
<add> }
<add> );
<ide> }
<ide>
<ide> // Change to the Android directory.
<ide> function buildAndRun(args) {
<ide> : './gradlew';
<ide>
<ide> console.log(chalk.bold(
<del> `Building and installing the app on the device (cd android && ${cmd} ${gradleArgs.join(' ')})...`
<add> 'Building and installing the app on the device ' +
<add> `(cd android && ${cmd} ${gradleArgs.join(' ')})...`
<ide> ));
<ide>
<ide> child_process.execFileSync(cmd, gradleArgs, {
<ide> function buildAndRun(args) {
<ide> if (devices && devices.length > 0) {
<ide> devices.forEach((device) => {
<ide>
<del> const adbArgs = ['-s', device, 'shell', 'am', 'start', '-n', packageName + '/.MainActivity'];
<add> const adbArgs =
<add> ['-s', device, 'shell', 'am', 'start', '-n', packageName + '/.MainActivity'];
<ide>
<ide> console.log(chalk.bold(
<ide> `Starting the app on ${device} (${adbPath} ${adbArgs.join(' ')})...`
<ide> function startServerInNewWindow() {
<ide>
<ide> if (process.platform === 'darwin') {
<ide> if (yargV.open) {
<del> return child_process.spawnSync('open', ['-a', yargV.open, launchPackagerScript], procConfig);
<add> return (
<add> child_process.spawnSync('open', ['-a', yargV.open, launchPackagerScript], procConfig)
<add> );
<ide> }
<ide> return child_process.spawnSync('open', [launchPackagerScript], procConfig);
<ide>
<ide> module.exports = {
<ide> func: runAndroid,
<ide> options: [{
<ide> command: '--root [string]',
<del> description: 'Override the root directory for the android build (which contains the android directory)',
<add> description:
<add> 'Override the root directory for the android build ' +
<add> '(which contains the android directory)',
<ide> default: '',
<ide> }, {
<ide> command: '--flavor [string]',
<ide> description: '--flavor has been deprecated. Use --variant instead',
<ide> }, {
<ide> command: '--configuration [string]',
<del> description: 'You can use `Release` or `Debug`. This creates a build based on the selected configuration. If you want to use the `Release` configuration make sure you have the `signingConfig` configured at `app/build.gradle`.',
<add> description:
<add> 'You can use `Release` or `Debug`. ' +
<add> 'This creates a build based on the selected configuration. ' +
<add> 'If you want to use the `Release` configuration make sure you have the ' +
<add> '`signingConfig` configured at `app/build.gradle`.',
<ide> default: 'Debug'
<ide> }, {
<ide> command: '--variant [string]',
<ide><path>local-cli/upgrade/upgrade.js
<ide> function validateAndUpgrade() {
<ide> const projectName = packageJSON.name;
<ide> if (!projectName) {
<ide> warn(
<del> "Your project needs to have a name, declared in package.json, " +
<del> "such as \"name\": \"AwesomeApp\". Please add a project name. Aborting."
<add> 'Your project needs to have a name, declared in package.json, ' +
<add> 'such as "name": "AwesomeApp". Please add a project name. Aborting.'
<ide> );
<ide> return;
<ide> }
<ide>
<ide> const version = packageJSON.dependencies['react-native'];
<ide> if (!version) {
<ide> warn(
<del> "Your 'package.json' file doesn't seem to declare 'react-native' as " +
<del> "a dependency. Nothing to upgrade. Aborting."
<add> 'Your "package.json" file doesn\'t seem to declare "react-native" as ' +
<add> 'a dependency. Nothing to upgrade. Aborting.'
<ide> );
<ide> return;
<ide> }
<ide>
<ide> if (version === 'latest' || version === '*') {
<ide> warn(
<del> "Some major releases introduce breaking changes.\n" +
<del> "Please use a caret version number in your 'package.json' file \n" +
<del> "to avoid breakage. Use e.g. react-native: ^0.38.0. Aborting."
<add> 'Some major releases introduce breaking changes.\n' +
<add> 'Please use a caret version number in your "package.json" file \n' +
<add> 'to avoid breakage. Use e.g. react-native: ^0.38.0. Aborting.'
<ide> );
<ide> return;
<ide> }
<ide> function validateAndUpgrade() {
<ide>
<ide> if (!semver.satisfies(installed.version, version)) {
<ide> warn(
<del> "react-native version in 'package.json' doesn't match the installed version in 'node_modules'.\n" +
<del> "Try running 'npm install' to fix this. Aborting."
<add> 'react-native version in "package.json" doesn\'t match ' +
<add> 'the installed version in "node_modules".\n' +
<add> 'Try running "npm install" to fix this. Aborting.'
<ide> );
<ide> return;
<ide> }
<ide> function validateAndUpgrade() {
<ide> console.log(
<ide> 'Upgrading project to react-native v' + installed.version + '\n' +
<ide> 'Check out the release notes and breaking changes: ' +
<del> 'https://github.com/facebook/react-native/releases/tag/v' + semver.major(v) + '.' + semver.minor(v) + '.0'
<add> 'https://github.com/facebook/react-native/releases/tag/v' +
<add> semver.major(v) + '.' + semver.minor(v) + '.0'
<ide> );
<ide>
<ide> // >= v0.21.0, we require react to be a peer dependency
<ide> if (semver.gte(v, '0.21.0') && !packageJSON.dependencies.react) {
<ide> warn(
<del> "Your 'package.json' file doesn't seem to have 'react' as a dependency.\n" +
<del> "'react' was changed from a dependency to a peer dependency in react-native v0.21.0.\n" +
<del> "Therefore, it's necessary to include 'react' in your project's dependencies.\n" +
<del> "Please run 'npm install --save react', then re-run 'react-native upgrade'.\n"
<add> 'Your "package.json" file doesn\'t seem to have "react" as a dependency.\n' +
<add> '"react" was changed from a dependency to a peer dependency in react-native v0.21.0.\n' +
<add> 'Therefore, it\'s necessary to include "react" in your project\'s dependencies.\n' +
<add> 'Please run "npm install --save react", then re-run "react-native upgrade".\n'
<ide> );
<ide> return;
<ide> }
<ide>
<ide> if (semver.satisfies(v, '~0.26.0')) {
<ide> warn(
<del> "React Native 0.26 introduced some breaking changes to the native files on iOS. You can\n" +
<del> "perform them manually by checking the release notes or use \'rnpm\' to do it automatically.\n" +
<del> "Just run:\n" +
<del> "\'npm install -g rnpm && npm install [email protected] --save-dev\', then run \'rnpm upgrade\'"
<add> 'React Native 0.26 introduced some breaking changes to the native files on iOS. You can\n' +
<add> 'perform them manually by checking the release notes or use "rnpm" ' +
<add> 'to do it automatically.\n' +
<add> 'Just run:\n' +
<add> '"npm install -g rnpm && npm install [email protected] --save-dev", ' +
<add> 'then run "rnpm upgrade".'
<ide> );
<ide> }
<ide> | 4 |
Python | Python | remove colorambient from blender exporter | d31d47352a0febb4540a26e916c26324a0189382 | <ide><path>utils/exporters/blender/addons/io_three/constants.py
<ide> TIME = 'time'
<ide> KEYS = 'keys'
<ide>
<del>AMBIENT = 'ambient'
<ide> COLOR = 'color'
<ide> EMISSIVE = 'emissive'
<ide> SPECULAR = 'specular'
<ide>
<ide> SHADING = 'shading'
<ide> COLOR_DIFFUSE = 'colorDiffuse'
<del>COLOR_AMBIENT = 'colorAmbient'
<ide> COLOR_EMISSIVE = 'colorEmissive'
<ide> COLOR_SPECULAR = 'colorSpecular'
<ide> DBG_NAME = 'DbgName'
<ide><path>utils/exporters/blender/addons/io_three/exporter/api/material.py
<ide> def inner(name, *args, **kwargs):
<ide> return inner
<ide>
<ide>
<del>@_material
<del>def ambient_color(material):
<del> """
<del>
<del> :param material:
<del> :return: rgb value
<del> :rtype: tuple
<del>
<del> """
<del> logger.debug("material.ambient_color(%s)", material)
<del> diffuse = diffuse_color(material)
<del> return (material.ambient * diffuse[0],
<del> material.ambient * diffuse[1],
<del> material.ambient * diffuse[2])
<del>
<del>
<ide> @_material
<ide> def blending(material):
<ide> """
<ide><path>utils/exporters/blender/addons/io_three/exporter/api/mesh.py
<ide> def materials(mesh, options):
<ide>
<ide> logger.info("Compiling attributes for %s", mat.name)
<ide> attributes = {
<del> constants.COLOR_AMBIENT: material.ambient_color(mat),
<ide> constants.COLOR_EMISSIVE: material.emissive_color(mat),
<ide> constants.SHADING: material.shading(mat),
<ide> constants.OPACITY: material.opacity(mat),
<ide><path>utils/exporters/blender/addons/io_three/exporter/material.py
<ide> def _common_attributes(self):
<ide> self[constants.COLOR] = utilities.rgb2int(diffuse)
<ide>
<ide> if self[constants.TYPE] != constants.THREE_BASIC:
<del> ambient = api.material.ambient_color(self.node)
<del> self[constants.AMBIENT] = utilities.rgb2int(ambient)
<del>
<ide> emissive = api.material.emissive_color(self.node)
<ide> self[constants.EMISSIVE] = utilities.rgb2int(emissive)
<ide> | 4 |
Text | Text | add docs for commit --change | 5767548fa79342cee94dbb66b9c41671d4a1879f | <ide><path>docs/man/docker-commit.1.md
<ide> docker-commit - Create a new image from a container's changes
<ide> **docker commit**
<ide> [**-a**|**--author**[=*AUTHOR*]]
<ide> [**--help**]
<add>[**-c**|**--change**[= []**]]
<ide> [**-m**|**--message**[=*MESSAGE*]]
<ide> [**-p**|**--pause**[=*true*]]
<ide> CONTAINER [REPOSITORY[:TAG]]
<ide> Using an existing container's name or ID you can create a new image.
<ide> **-a**, **--author**=""
<ide> Author (e.g., "John Hannibal Smith <[email protected]>")
<ide>
<add>**-c** , **--change**=[]
<add> Apply a modification in Dockerfile format before committing the image.
<add>
<ide> **--help**
<ide> Print usage statement
<ide>
<ide> create a new image run docker ps to find the container's ID and then run:
<ide> # docker commit -m="Added Apache to Fedora base image" \
<ide> -a="A D Ministrator" 98bd7fc99854 fedora/fedora_httpd:20
<ide>
<add>## Modify configuration settings before committing the image
<add>An existing container was created without the necessary environment variable
<add>DEBUG set to "true". To create a new image based on the container with a
<add>correct DEBUG environment variable, run docker ps to find the container's ID
<add>and then run
<add>
<add> # docker commit -c="ENV DEBUG true" 98bd7fc99854 debug-image
<add>
<ide> # HISTORY
<ide> April 2014, Originally compiled by William Henry (whenry at redhat dot com)
<ide> based on docker.com source material and in
<ide> June 2014, updated by Sven Dowideit <[email protected]>
<ide> July 2014, updated by Sven Dowideit <[email protected]>
<add>Oct 2014, updated by Daniel, Dao Quang Minh <daniel at nitrous dot io>
<ide><path>docs/sources/reference/commandline/cli.md
<ide> you refer to it on the command line.
<ide> Create a new image from a container's changes
<ide>
<ide> -a, --author="" Author (e.g., "John Hannibal Smith <[email protected]>")
<add> -c, --change=[] Apply a modification in Dockerfile format before committing the image
<ide> -m, --message="" Commit message
<ide> -p, --pause=true Pause container during commit
<ide>
<ide> If this behavior is undesired, set the 'p' option to false.
<ide> REPOSITORY TAG ID CREATED VIRTUAL SIZE
<ide> SvenDowideit/testimage version3 f5283438590d 16 seconds ago 335.7 MB
<ide>
<add>#### Commit an existing container with new configurations
<add>
<add> $ sudo docker ps
<add> ID IMAGE COMMAND CREATED STATUS PORTS
<add> c3f279d17e0a ubuntu:12.04 /bin/bash 7 days ago Up 25 hours
<add> 197387f1b436 ubuntu:12.04 /bin/bash 7 days ago Up 25 hours
<add> $ sudo docker inspect -f "{{ .Config.Env }}" c3f279d17e0a
<add> [HOME=/ PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin]
<add> $ sudo docker commit --change "ENV DEBUG true" c3f279d17e0a SvenDowideit/testimage:version3
<add> f5283438590d
<add> $ sudo docker inspect -f "{{ .Config.Env }}" f5283438590d
<add> [HOME=/ PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin DEBUG=true]
<add>
<ide> ## cp
<ide>
<ide> Copy files/folders from a container's filesystem to the host | 2 |
Javascript | Javascript | fix wrong variable name | 9ecb2847374ba334ad6e42a1752065569abe90f1 | <ide><path>script/vsts/get-release-version.js
<ide> async function getReleaseVersion () {
<ide> buildBranch.startsWith('electron-') ||
<ide> buildBranch === 'master' && !process.env.SYSTEM_PULLREQUEST_PULLREQUESTNUMBER
<ide> console.log(`##vso[task.setvariable variable=IsReleaseBranch;isOutput=true]${isReleaseBranch}`)
<del> console.log(`##vso[task.setvariable variable=IsSignedBuildBranch;isOutput=true]${isSignedBuildBranch}`)
<add> console.log(`##vso[task.setvariable variable=IsSignedBuildBranch;isOutput=true]${isSignedZipBranch}`)
<ide> }
<ide>
<ide> getReleaseVersion() | 1 |
Ruby | Ruby | change query strategy based on adapter | e7f5317ff9696e7e38c56d403ec822ee94aa8e24 | <ide><path>activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
<ide> def to_sql(arel, binds = [])
<ide> end
<ide> end
<ide>
<add> # This is used in the StatementCache object. It returns an object that
<add> # can be used to query the database repeatedly.
<add> def cacheable_query(arel) # :nodoc:
<add> ActiveRecord::StatementCache.query self, visitor, arel.ast
<add> end
<add>
<ide> # Returns an ActiveRecord::Result instance.
<ide> def select_all(arel, name = nil, binds = [])
<ide> select(to_sql(arel, binds), name, binds)
<ide><path>activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
<ide> def initialize(connection, logger, connection_options, config)
<ide> configure_connection
<ide> end
<ide>
<add> def cacheable_query(arel)
<add> ActiveRecord::StatementCache.partial_query self, visitor, arel.ast
<add> end
<add>
<ide> MAX_INDEX_LENGTH_FOR_UTF8MB4 = 191
<ide> def initialize_schema_migrations_table
<ide> if @config[:encoding] == 'utf8mb4'
<ide><path>activerecord/lib/active_record/statement_cache.rb
<ide> module ActiveRecord
<ide> class StatementCache
<ide> Substitute = Struct.new :name
<ide>
<add> class Query
<add> def initialize(connection, sql)
<add> @connection = connection
<add> @sql = sql
<add> end
<add>
<add> def sql_for(binds)
<add> @sql
<add> end
<add> end
<add>
<add> class PartialQuery < Query
<add> def sql_for(binds)
<add> @sql.gsub(/\?/) { @connection.quote(*binds.shift.reverse) }
<add> end
<add> end
<add>
<add> def self.query(connection, visitor, ast)
<add> Query.new connection, visitor.accept(ast)
<add> end
<add>
<add> def self.partial_query(connection, visitor, ast)
<add> sql = visitor.accept(ast) { "?" }
<add> PartialQuery.new connection, sql
<add> end
<add>
<ide> class Params
<ide> def [](name); Substitute.new name; end
<ide> end
<ide> def initialize(block = Proc.new)
<ide> @sql = nil
<ide> @binds = nil
<ide> @block = block
<add> @query_builder = nil
<ide> @params = Params.new
<ide> end
<ide>
<ide> def execute(params)
<ide> rel = relation @params
<ide>
<del> arel = rel.arel
<del> klass = rel.klass
<del> bv = binds rel
<add> arel = rel.arel
<add> klass = rel.klass
<add> bind_map = binds rel
<add> bind_values = bind_map.bind params
<ide>
<del> klass.find_by_sql sql(klass, arel, bv), bv.bind(params)
<add> builder = query_builder klass.connection, arel
<add> sql = builder.sql_for bind_values
<add>
<add> klass.find_by_sql sql, bind_values
<ide> end
<ide> alias :call :execute
<ide>
<ide> def binds(rel)
<ide> @binds || @mutex.synchronize { @binds ||= BindMap.new rel.bind_values }
<ide> end
<ide>
<add> def query_builder(connection, arel)
<add> @query_builder || @mutex.synchronize {
<add> @query_builder ||= connection.cacheable_query(arel)
<add> }
<add> end
<add>
<ide> def sql(klass, arel, bv)
<ide> @sql || @mutex.synchronize {
<ide> @sql ||= klass.connection.to_sql arel, bv
<ide><path>activerecord/test/cases/statement_cache_test.rb
<ide> module ActiveRecord
<ide> class StatementCacheTest < ActiveRecord::TestCase
<ide> def setup
<del> skip if current_adapter?(:Mysql2Adapter)
<ide> @connection = ActiveRecord::Base.connection
<ide> end
<ide> | 4 |
Ruby | Ruby | check mirror urls | 8848857cead43e54a0d2fde85d902096160d95bb | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_formula_urls f
<ide>
<ide> urls = [(f.url rescue nil), (f.head rescue nil)].reject {|p| p.nil?}
<ide>
<add> f.mirrors.each do |m|
<add> mirror = m.values_at :url
<add> urls << (mirror.to_s rescue nil)
<add> end
<add>
<ide> # Check SourceForge urls
<ide> urls.each do |p|
<ide> # Is it a filedownload (instead of svnroot) | 1 |
Java | Java | add client http request and response mocks | c6081031400ddaf1424751ca75c17d572ff808aa | <ide><path>spring-web/src/test/java/org/springframework/web/client/reactive/test/MockClientHttpRequest.java
<add>/*
<add> * Copyright 2002-2016 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.web.client.reactive.test;
<add>
<add>import java.net.URI;
<add>
<add>import org.reactivestreams.Publisher;
<add>import reactor.core.publisher.Flux;
<add>import reactor.core.publisher.Mono;
<add>
<add>import org.springframework.core.io.buffer.DataBuffer;
<add>import org.springframework.core.io.buffer.DataBufferFactory;
<add>import org.springframework.core.io.buffer.DefaultDataBufferFactory;
<add>import org.springframework.http.HttpMethod;
<add>import org.springframework.http.client.reactive.AbstractClientHttpRequest;
<add>import org.springframework.http.client.reactive.ClientHttpRequest;
<add>
<add>/**
<add> * Mock implementation of {@link ClientHttpRequest}.
<add> * @author Brian Clozel
<add> */
<add>public class MockClientHttpRequest extends AbstractClientHttpRequest {
<add>
<add> private HttpMethod httpMethod;
<add>
<add> private URI uri;
<add>
<add> private final DataBufferFactory bufferFactory = new DefaultDataBufferFactory();
<add>
<add> private Publisher<DataBuffer> body;
<add>
<add> private Publisher<Publisher<DataBuffer>> bodyWithFlushes;
<add>
<add>
<add> public MockClientHttpRequest() {
<add> }
<add>
<add> public MockClientHttpRequest(HttpMethod httpMethod, String uri) {
<add> this(httpMethod, (uri != null ? URI.create(uri) : null));
<add> }
<add>
<add> public MockClientHttpRequest(HttpMethod httpMethod, URI uri) {
<add> super();
<add> this.httpMethod = httpMethod;
<add> this.uri = uri;
<add> }
<add>
<add> @Override
<add> public HttpMethod getMethod() {
<add> return this.httpMethod;
<add> }
<add>
<add> public MockClientHttpRequest setMethod(HttpMethod httpMethod) {
<add> this.httpMethod = httpMethod;
<add> return this;
<add> }
<add>
<add> @Override
<add> public URI getURI() {
<add> return this.uri;
<add> }
<add>
<add> public MockClientHttpRequest setUri(String uri) {
<add> this.uri = URI.create(uri);
<add> return this;
<add> }
<add>
<add> public MockClientHttpRequest setUri(URI uri) {
<add> this.uri = uri;
<add> return this;
<add> }
<add>
<add> @Override
<add> public DataBufferFactory bufferFactory() {
<add> return this.bufferFactory;
<add> }
<add>
<add> @Override
<add> public Mono<Void> writeWith(Publisher<DataBuffer> body) {
<add> this.body = body;
<add> return applyBeforeCommit().then(Flux.from(this.body).then());
<add> }
<add>
<add> @Override
<add> public Mono<Void> writeAndFlushWith(Publisher<Publisher<DataBuffer>> body) {
<add> this.bodyWithFlushes = body;
<add> return applyBeforeCommit().then(Flux.from(this.bodyWithFlushes).then());
<add> }
<add>
<add> public Publisher<DataBuffer> getBody() {
<add> return body;
<add> }
<add>
<add> public Publisher<Publisher<DataBuffer>> getBodyWithFlush() {
<add> return bodyWithFlushes;
<add> }
<add>
<add> @Override
<add> public Mono<Void> setComplete() {
<add> return applyBeforeCommit().then();
<add> }
<add>
<add> @Override
<add> protected void writeHeaders() { }
<add>
<add> @Override
<add> protected void writeCookies() { }
<add>}
<ide><path>spring-web/src/test/java/org/springframework/web/client/reactive/test/MockClientHttpResponse.java
<add>/*
<add> * Copyright 2002-2016 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.web.client.reactive.test;
<add>
<add>import java.nio.ByteBuffer;
<add>import java.nio.charset.Charset;
<add>import java.nio.charset.StandardCharsets;
<add>
<add>import org.reactivestreams.Publisher;
<add>import reactor.core.publisher.Flux;
<add>
<add>import org.springframework.core.io.buffer.DataBuffer;
<add>import org.springframework.core.io.buffer.DefaultDataBufferFactory;
<add>import org.springframework.http.HttpHeaders;
<add>import org.springframework.http.HttpStatus;
<add>import org.springframework.http.ResponseCookie;
<add>import org.springframework.http.client.reactive.ClientHttpResponse;
<add>import org.springframework.util.LinkedMultiValueMap;
<add>import org.springframework.util.MultiValueMap;
<add>
<add>/**
<add> * Mock implementation of {@link ClientHttpResponse}.
<add> * @author Brian Clozel
<add> */
<add>public class MockClientHttpResponse implements ClientHttpResponse {
<add>
<add> private HttpStatus status;
<add>
<add> private final HttpHeaders headers = new HttpHeaders();
<add>
<add> private final MultiValueMap<String, ResponseCookie> cookies = new LinkedMultiValueMap<>();
<add>
<add> private Flux<DataBuffer> body = Flux.empty();
<add>
<add> @Override
<add> public HttpHeaders getHeaders() {
<add> return headers;
<add> }
<add>
<add> public MockClientHttpResponse addHeader(String name, String value) {
<add> getHeaders().add(name, value);
<add> return this;
<add> }
<add>
<add> public MockClientHttpResponse setHeader(String name, String value) {
<add> getHeaders().set(name, value);
<add> return this;
<add> }
<add>
<add> @Override
<add> public HttpStatus getStatusCode() {
<add> return this.status;
<add> }
<add>
<add> public void setStatus(HttpStatus status) {
<add> this.status = status;
<add> }
<add>
<add> @Override
<add> public Flux<DataBuffer> getBody() {
<add> return this.body;
<add> }
<add>
<add> public MockClientHttpResponse setBody(Publisher<DataBuffer> body) {
<add> this.body = Flux.from(body);
<add> return this;
<add> }
<add>
<add> public MockClientHttpResponse setBody(String body) {
<add> DataBuffer buffer = toDataBuffer(body, StandardCharsets.UTF_8);
<add> this.body = Flux.just(buffer);
<add> return this;
<add> }
<add>
<add> public MockClientHttpResponse setBody(String body, Charset charset) {
<add> DataBuffer buffer = toDataBuffer(body, charset);
<add> this.body = Flux.just(buffer);
<add> return this;
<add> }
<add>
<add> private DataBuffer toDataBuffer(String body, Charset charset) {
<add> byte[] bytes = body.getBytes(charset);
<add> ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
<add> return new DefaultDataBufferFactory().wrap(byteBuffer);
<add> }
<add>
<add> @Override
<add> public MultiValueMap<String, ResponseCookie> getCookies() {
<add> return this.cookies;
<add> }
<add>} | 2 |
Python | Python | improve padding side documentation | 9aeb0b9b8ad23edf4aa89ea5ad4139754fa6e49f | <ide><path>src/transformers/tokenization_utils.py
<ide> def encode(
<ide> - 'do_not_truncate': Does not truncate (raise an error if the input sequence is longer than max_length)
<ide> pad_to_max_length: if set to True, the returned sequences will be padded according to the model's padding side and
<ide> padding index, up to their max length. If no max length is specified, the padding is done up to the model's max length.
<del> The tokenizer padding sides are handled by the following strings:
<add> The tokenizer padding sides are handled by the class attribute `padding_side` which can be set to the following strings:
<ide> - 'left': pads on the left of the sequences
<ide> - 'right': pads on the right of the sequences
<ide> Defaults to False: no padding. | 1 |
Ruby | Ruby | make `postgresql_version` public | 8ad1eeafc2d4b0d96f0218b5272ea734564bd875 | <ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
<ide> def column_name_for_operation(operation, node) # :nodoc:
<ide> "average" => "avg",
<ide> }
<ide>
<del> protected
<add> # Returns the version of the connected PostgreSQL server.
<add> def postgresql_version
<add> @connection.server_version
<add> end
<ide>
<del> # Returns the version of the connected PostgreSQL server.
<del> def postgresql_version
<del> @connection.server_version
<del> end
<add> protected
<ide>
<ide> # See http://www.postgresql.org/docs/current/static/errcodes-appendix.html
<ide> FOREIGN_KEY_VIOLATION = "23503" | 1 |
PHP | PHP | add new saveorfail and deleteorfail method | c7e09a483aaf00054addee6b1518ae24d9f1e04b | <ide><path>src/ORM/Exception/PersistenceFailedException.php
<add><?php
<add>/**
<add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
<add> * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> *
<add> * Licensed under The MIT License
<add> * Redistributions of files must retain the above copyright notice.
<add> *
<add> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org)
<add> * @since 3.4.0
<add> * @license http://www.opensource.org/licenses/mit-license.php MIT License
<add> */
<add>namespace Cake\ORM\Exception;
<add>
<add>use Cake\Core\Exception\Exception;
<add>
<add>/**
<add> * Used when a strict save or delete fails
<add> */
<add>class PersistenceFailedException extends Exception
<add>{
<add>
<add> protected $_messageTemplate = 'Entity %s failure.';
<add>}
<ide><path>src/ORM/Table.php
<ide> use Cake\ORM\Association\HasMany;
<ide> use Cake\ORM\Association\HasOne;
<ide> use Cake\ORM\Exception\MissingEntityException;
<add>use Cake\ORM\Exception\PersistenceFailedException;
<ide> use Cake\ORM\Exception\RolledbackTransactionException;
<ide> use Cake\ORM\Rule\IsUnique;
<ide> use Cake\Utility\Inflector;
<ide> public function save(EntityInterface $entity, $options = [])
<ide> return $success;
<ide> }
<ide>
<add> /**
<add> * Try to save an entity or throw a PersistenceFailedException if the application rules checks failed,
<add> * the entity contains errors or the save was aborted by a callback.
<add> *
<add> * @param \Cake\Datasource\EntityInterface $entity the entity to be saved
<add> * @param array|\ArrayAccess $options The options to use when saving.
<add> * @return \Cake\Datasource\EntityInterface
<add> * @throws \Cake\ORM\Exception\PersistenceFailedException When the entity couldn't be saved
<add> * @see \Cake\ORM\Table::save()
<add> */
<add> public function saveOrFail(EntityInterface $entity, $options = [])
<add> {
<add> $saved = $this->save($entity, $options);
<add> if ($saved === false) {
<add> throw new PersistenceFailedException(['save', $entity]);
<add> }
<add>
<add> return $saved;
<add> }
<add>
<ide> /**
<ide> * Performs the actual saving of an entity based on the passed options.
<ide> *
<ide> public function delete(EntityInterface $entity, $options = [])
<ide> return $success;
<ide> }
<ide>
<add> /**
<add> * Try to delete an entity or throw a PersistenceFailedException if the entity is new,
<add> * has no primary key value, application rules checks failed or the delete was aborted by a callback.
<add> *
<add> * @param \Cake\Datasource\EntityInterface $entity The entity to remove.
<add> * @param array|\ArrayAccess $options The options for the delete.
<add> * @return bool success
<add> * @throws \Cake\ORM\Exception\PersistenceFailedException
<add> * @see \Cake\ORM\Table::delete()
<add> */
<add> public function deleteOrFail(EntityInterface $entity, $options = [])
<add> {
<add> $deleted = $this->delete($entity, $options);
<add> if ($deleted === false) {
<add> throw new PersistenceFailedException(['delete', $entity]);
<add> }
<add>
<add> return $deleted;
<add> }
<add>
<ide> /**
<ide> * Perform the delete operation.
<ide> *
<ide><path>tests/TestCase/ORM/TableTest.php
<ide> public function testLoadIntoMany()
<ide> $this->assertEquals($expected, $result);
<ide> }
<ide>
<add> /**
<add> * Tests that saveOrFail triggers an exception on not successful save
<add> *
<add> * @return void
<add> * @expectedException \Cake\ORM\Exception\PersistenceFailedException
<add> * @expectedExceptionMessage Entity save failure.
<add> */
<add> public function testSaveOrFail()
<add> {
<add> $entity = new Entity([
<add> 'foo' => 'bar'
<add> ]);
<add> $table = TableRegistry::get('users');
<add>
<add> $table->saveOrFail($entity);
<add>
<add> $row = $table->find('all')->where(['foo' => 'bar'])->toArray();
<add> $this->assertSame([], $row->toArray());
<add> }
<add>
<add> /**
<add> * Tests that deleteOrFail triggers an exception on not successful delete
<add> *
<add> * @return void
<add> * @expectedException \Cake\ORM\Exception\PersistenceFailedException
<add> * @expectedExceptionMessage Entity delete failure.
<add> */
<add> public function testDeleteOrFail()
<add> {
<add> $entity = new Entity([
<add> 'id' => 999
<add> ]);
<add> $table = TableRegistry::get('users');
<add>
<add> $result = $table->deleteOrFail($entity);
<add> }
<add>
<ide> /**
<ide> * Test getting the save options builder.
<ide> * | 3 |
Javascript | Javascript | fix pixel ratio | 3a619a826ce2427b824e062acd5da461e44eef7e | <ide><path>examples/js/nodes/utils/ResolutionNode.js
<ide> THREE.ResolutionNode.prototype.constructor = THREE.ResolutionNode;
<ide>
<ide> THREE.ResolutionNode.prototype.updateFrame = function( delta ) {
<ide>
<del> var size = this.renderer.getSize();
<add> var size = this.renderer.getSize(),
<add> pixelRatio = this.renderer.getPixelRatio();
<ide>
<del> this.x = size.width;
<del> this.y = size.height;
<add> this.x = size.width * pixelRatio;
<add> this.y = size.height * pixelRatio;
<ide>
<ide> }; | 1 |
Javascript | Javascript | add fixer for prefer-assert-iferror.js | d797775fb82caae5f721140625dfb0f984ce929e | <ide><path>test/parallel/test-eslint-prefer-assert-iferror.js
<ide> new RuleTester().run('prefer-assert-iferror', rule, {
<ide> ],
<ide> invalid: [
<ide> {
<del> code: 'if (err) throw err;',
<del> errors: [{ message: 'Use assert.ifError(err) instead.' }]
<add> code: 'require("assert");\n' +
<add> 'if (err) throw err;',
<add> errors: [{ message: 'Use assert.ifError(err) instead.' }],
<add> output: 'require("assert");\n' +
<add> 'assert.ifError(err);'
<ide> },
<ide> {
<del> code: 'if (error) { throw error; }',
<del> errors: [{ message: 'Use assert.ifError(error) instead.' }]
<add> code: 'require("assert");\n' +
<add> 'if (error) { throw error; }',
<add> errors: [{ message: 'Use assert.ifError(error) instead.' }],
<add> output: 'require("assert");\n' +
<add> 'assert.ifError(error);'
<ide> }
<ide> ]
<ide> });
<ide><path>tools/eslint-rules/prefer-assert-iferror.js
<ide>
<ide> 'use strict';
<ide>
<add>const utils = require('./rules-utils.js');
<add>
<ide> module.exports = {
<ide> create(context) {
<ide> const sourceCode = context.getSourceCode();
<add> var assertImported = false;
<ide>
<ide> function hasSameTokens(nodeA, nodeB) {
<ide> const aTokens = sourceCode.getTokens(nodeA);
<ide> module.exports = {
<ide> });
<ide> }
<ide>
<add> function checkAssertNode(node) {
<add> if (utils.isRequired(node, ['assert'])) {
<add> assertImported = true;
<add> }
<add> }
<add>
<ide> return {
<del> IfStatement(node) {
<add> 'CallExpression': (node) => checkAssertNode(node),
<add> 'IfStatement': (node) => {
<ide> const firstStatement = node.consequent.type === 'BlockStatement' ?
<ide> node.consequent.body[0] :
<ide> node.consequent;
<ide> module.exports = {
<ide> firstStatement.type === 'ThrowStatement' &&
<ide> hasSameTokens(node.test, firstStatement.argument)
<ide> ) {
<add> const argument = sourceCode.getText(node.test);
<ide> context.report({
<ide> node: firstStatement,
<ide> message: 'Use assert.ifError({{argument}}) instead.',
<del> data: { argument: sourceCode.getText(node.test) }
<add> data: { argument },
<add> fix: (fixer) => {
<add> if (assertImported) {
<add> return fixer.replaceText(
<add> node,
<add> `assert.ifError(${argument});`
<add> );
<add> }
<add> }
<ide> });
<ide> }
<ide> } | 2 |
Javascript | Javascript | reuse memoized result from first pass | 5450dd409863b31fa7ef4dfcf8aeb06ac16c4c10 | <ide><path>packages/react-reconciler/src/ReactFiberBeginWork.new.js
<ide> function updateForwardRef(
<ide> renderLanes,
<ide> );
<ide> hasId = checkDidRenderIdHook();
<del> if (
<del> debugRenderPhaseSideEffectsForStrictMode &&
<del> workInProgress.mode & StrictLegacyMode
<del> ) {
<del> setIsStrictModeForDevtools(true);
<del> try {
<del> nextChildren = renderWithHooks(
<del> current,
<del> workInProgress,
<del> render,
<del> nextProps,
<del> ref,
<del> renderLanes,
<del> );
<del> hasId = checkDidRenderIdHook();
<del> } finally {
<del> setIsStrictModeForDevtools(false);
<del> }
<del> }
<ide> setIsRendering(false);
<ide> } else {
<ide> nextChildren = renderWithHooks(
<ide> function updateFunctionComponent(
<ide> renderLanes,
<ide> );
<ide> hasId = checkDidRenderIdHook();
<del> if (
<del> debugRenderPhaseSideEffectsForStrictMode &&
<del> workInProgress.mode & StrictLegacyMode
<del> ) {
<del> setIsStrictModeForDevtools(true);
<del> try {
<del> nextChildren = renderWithHooks(
<del> current,
<del> workInProgress,
<del> Component,
<del> nextProps,
<del> context,
<del> renderLanes,
<del> );
<del> hasId = checkDidRenderIdHook();
<del> } finally {
<del> setIsStrictModeForDevtools(false);
<del> }
<del> }
<ide> setIsRendering(false);
<ide> } else {
<ide> nextChildren = renderWithHooks(
<ide> function mountIndeterminateComponent(
<ide> getComponentNameFromType(Component) || 'Unknown',
<ide> );
<ide> }
<del>
<del> if (
<del> debugRenderPhaseSideEffectsForStrictMode &&
<del> workInProgress.mode & StrictLegacyMode
<del> ) {
<del> setIsStrictModeForDevtools(true);
<del> try {
<del> value = renderWithHooks(
<del> null,
<del> workInProgress,
<del> Component,
<del> props,
<del> context,
<del> renderLanes,
<del> );
<del> hasId = checkDidRenderIdHook();
<del> } finally {
<del> setIsStrictModeForDevtools(false);
<del> }
<del> }
<ide> }
<ide>
<ide> if (getIsHydrating() && hasId) {
<ide><path>packages/react-reconciler/src/ReactFiberBeginWork.old.js
<ide> function updateForwardRef(
<ide> renderLanes,
<ide> );
<ide> hasId = checkDidRenderIdHook();
<del> if (
<del> debugRenderPhaseSideEffectsForStrictMode &&
<del> workInProgress.mode & StrictLegacyMode
<del> ) {
<del> setIsStrictModeForDevtools(true);
<del> try {
<del> nextChildren = renderWithHooks(
<del> current,
<del> workInProgress,
<del> render,
<del> nextProps,
<del> ref,
<del> renderLanes,
<del> );
<del> hasId = checkDidRenderIdHook();
<del> } finally {
<del> setIsStrictModeForDevtools(false);
<del> }
<del> }
<ide> setIsRendering(false);
<ide> } else {
<ide> nextChildren = renderWithHooks(
<ide> function updateFunctionComponent(
<ide> renderLanes,
<ide> );
<ide> hasId = checkDidRenderIdHook();
<del> if (
<del> debugRenderPhaseSideEffectsForStrictMode &&
<del> workInProgress.mode & StrictLegacyMode
<del> ) {
<del> setIsStrictModeForDevtools(true);
<del> try {
<del> nextChildren = renderWithHooks(
<del> current,
<del> workInProgress,
<del> Component,
<del> nextProps,
<del> context,
<del> renderLanes,
<del> );
<del> hasId = checkDidRenderIdHook();
<del> } finally {
<del> setIsStrictModeForDevtools(false);
<del> }
<del> }
<ide> setIsRendering(false);
<ide> } else {
<ide> nextChildren = renderWithHooks(
<ide> function mountIndeterminateComponent(
<ide> getComponentNameFromType(Component) || 'Unknown',
<ide> );
<ide> }
<del>
<del> if (
<del> debugRenderPhaseSideEffectsForStrictMode &&
<del> workInProgress.mode & StrictLegacyMode
<del> ) {
<del> setIsStrictModeForDevtools(true);
<del> try {
<del> value = renderWithHooks(
<del> null,
<del> workInProgress,
<del> Component,
<del> props,
<del> context,
<del> renderLanes,
<del> );
<del> hasId = checkDidRenderIdHook();
<del> } finally {
<del> setIsStrictModeForDevtools(false);
<del> }
<del> }
<ide> }
<ide>
<ide> if (getIsHydrating() && hasId) {
<ide><path>packages/react-reconciler/src/ReactFiberHooks.new.js
<ide> import {
<ide> enableUseMemoCacheHook,
<ide> enableUseEventHook,
<ide> enableLegacyCache,
<add> debugRenderPhaseSideEffectsForStrictMode,
<ide> } from 'shared/ReactFeatureFlags';
<ide> import {
<ide> REACT_CONTEXT_TYPE,
<ide> import {
<ide> ConcurrentMode,
<ide> DebugTracingMode,
<ide> StrictEffectsMode,
<add> StrictLegacyMode,
<ide> } from './ReactTypeOfMode';
<ide> import {
<ide> NoLane,
<ide> import {
<ide> warnAboutMultipleRenderersDEV,
<ide> } from './ReactMutableSource.new';
<ide> import {logStateUpdateScheduled} from './DebugTracing';
<del>import {markStateUpdateScheduled} from './ReactFiberDevToolsHook.new';
<add>import {
<add> markStateUpdateScheduled,
<add> setIsStrictModeForDevtools,
<add>} from './ReactFiberDevToolsHook.new';
<ide> import {createCache} from './ReactFiberCacheComponent.new';
<ide> import {
<ide> createUpdate as createLegacyQueueUpdate,
<ide> import {
<ide> trackUsedThenable,
<ide> checkIfUseWrappedInTryCatch,
<ide> } from './ReactFiberThenable.new';
<add>import type {ThenableState} from './ReactFiberThenable.new';
<ide>
<ide> const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals;
<ide>
<ide> let didScheduleRenderPhaseUpdate: boolean = false;
<ide> // TODO: Maybe there's some way to consolidate this with
<ide> // `didScheduleRenderPhaseUpdate`. Or with `numberOfReRenders`.
<ide> let didScheduleRenderPhaseUpdateDuringThisPass: boolean = false;
<add>let shouldDoubleInvokeUserFnsInHooksDEV: boolean = false;
<ide> // Counts the number of useId hooks in this component.
<ide> let localIdCounter: number = 0;
<ide> // Counts number of `use`-d thenables
<ide> export function renderWithHooks<Props, SecondArg>(
<ide> // If this is a replay, restore the thenable state from the previous attempt.
<ide> const prevThenableState = getSuspendedThenableState();
<ide> prepareThenableState(prevThenableState);
<add>
<add> // In Strict Mode, during development, user functions are double invoked to
<add> // help detect side effects. The logic for how this is implemented for in
<add> // hook components is a bit complex so let's break it down.
<add> //
<add> // We will invoke the entire component function twice. However, during the
<add> // second invocation of the component, the hook state from the first
<add> // invocation will be reused. That means things like `useMemo` functions won't
<add> // run again, because the deps will match and the memoized result will
<add> // be reused.
<add> //
<add> // We want memoized functions to run twice, too, so account for this, user
<add> // functions are double invoked during the *first* invocation of the component
<add> // function, and are *not* double invoked during the second incovation:
<add> //
<add> // - First execution of component function: user functions are double invoked
<add> // - Second execution of component function (in Strict Mode, during
<add> // development): user functions are not double invoked.
<add> //
<add> // This is intentional for a few reasons; most importantly, it's because of
<add> // how `use` works when something suspends: it reuses the promise that was
<add> // passed during the first attempt. This is itself a form of memoization.
<add> // We need to be able to memoize the reactive inputs to the `use` call using
<add> // a hook (i.e. `useMemo`), which means, the reactive inputs to `use` must
<add> // come from the same component invocation as the output.
<add> //
<add> // There are plenty of tests to ensure this behavior is correct.
<add> const shouldDoubleRenderDEV =
<add> __DEV__ &&
<add> debugRenderPhaseSideEffectsForStrictMode &&
<add> (workInProgress.mode & StrictLegacyMode) !== NoMode;
<add>
<add> shouldDoubleInvokeUserFnsInHooksDEV = shouldDoubleRenderDEV;
<ide> let children = Component(props, secondArg);
<add> shouldDoubleInvokeUserFnsInHooksDEV = false;
<ide>
<ide> // Check if there was a render phase update
<ide> if (didScheduleRenderPhaseUpdateDuringThisPass) {
<del> // Keep rendering in a loop for as long as render phase updates continue to
<del> // be scheduled. Use a counter to prevent infinite loops.
<del> let numberOfReRenders: number = 0;
<del> do {
<del> didScheduleRenderPhaseUpdateDuringThisPass = false;
<del> localIdCounter = 0;
<del> thenableIndexCounter = 0;
<del>
<del> if (numberOfReRenders >= RE_RENDER_LIMIT) {
<del> throw new Error(
<del> 'Too many re-renders. React limits the number of renders to prevent ' +
<del> 'an infinite loop.',
<del> );
<del> }
<del>
<del> numberOfReRenders += 1;
<del> if (__DEV__) {
<del> // Even when hot reloading, allow dependencies to stabilize
<del> // after first render to prevent infinite render phase updates.
<del> ignorePreviousDependencies = false;
<del> }
<del>
<del> // Start over from the beginning of the list
<del> currentHook = null;
<del> workInProgressHook = null;
<del>
<del> workInProgress.updateQueue = null;
<del>
<del> if (__DEV__) {
<del> // Also validate hook order for cascading updates.
<del> hookTypesUpdateIndexDev = -1;
<del> }
<del>
<del> ReactCurrentDispatcher.current = __DEV__
<del> ? HooksDispatcherOnRerenderInDEV
<del> : HooksDispatcherOnRerender;
<add> // Keep rendering until the component stabilizes (there are no more render
<add> // phase updates).
<add> children = renderWithHooksAgain(
<add> workInProgress,
<add> Component,
<add> props,
<add> secondArg,
<add> prevThenableState,
<add> );
<add> }
<ide>
<del> prepareThenableState(prevThenableState);
<del> children = Component(props, secondArg);
<del> } while (didScheduleRenderPhaseUpdateDuringThisPass);
<add> if (shouldDoubleRenderDEV) {
<add> // In development, components are invoked twice to help detect side effects.
<add> setIsStrictModeForDevtools(true);
<add> try {
<add> children = renderWithHooksAgain(
<add> workInProgress,
<add> Component,
<add> props,
<add> secondArg,
<add> prevThenableState,
<add> );
<add> } finally {
<add> setIsStrictModeForDevtools(false);
<add> }
<ide> }
<ide>
<ide> // We can assume the previous dispatcher is always this one, since we set it
<ide> export function renderWithHooks<Props, SecondArg>(
<ide> return children;
<ide> }
<ide>
<add>function renderWithHooksAgain<Props, SecondArg>(
<add> workInProgress: Fiber,
<add> Component: (p: Props, arg: SecondArg) => any,
<add> props: Props,
<add> secondArg: SecondArg,
<add> prevThenableState: ThenableState | null,
<add>) {
<add> // This is used to perform another render pass. It's used when setState is
<add> // called during render, and for double invoking components in Strict Mode
<add> // during development.
<add> //
<add> // The state from the previous pass is reused whenever possible. So, state
<add> // updates that were already processed are not processed again, and memoized
<add> // functions (`useMemo`) are not invoked again.
<add> //
<add> // Keep rendering in a loop for as long as render phase updates continue to
<add> // be scheduled. Use a counter to prevent infinite loops.
<add> let numberOfReRenders: number = 0;
<add> let children;
<add> do {
<add> didScheduleRenderPhaseUpdateDuringThisPass = false;
<add> localIdCounter = 0;
<add> thenableIndexCounter = 0;
<add>
<add> if (numberOfReRenders >= RE_RENDER_LIMIT) {
<add> throw new Error(
<add> 'Too many re-renders. React limits the number of renders to prevent ' +
<add> 'an infinite loop.',
<add> );
<add> }
<add>
<add> numberOfReRenders += 1;
<add> if (__DEV__) {
<add> // Even when hot reloading, allow dependencies to stabilize
<add> // after first render to prevent infinite render phase updates.
<add> ignorePreviousDependencies = false;
<add> }
<add>
<add> // Start over from the beginning of the list
<add> currentHook = null;
<add> workInProgressHook = null;
<add>
<add> workInProgress.updateQueue = null;
<add>
<add> if (__DEV__) {
<add> // Also validate hook order for cascading updates.
<add> hookTypesUpdateIndexDev = -1;
<add> }
<add>
<add> ReactCurrentDispatcher.current = __DEV__
<add> ? HooksDispatcherOnRerenderInDEV
<add> : HooksDispatcherOnRerender;
<add>
<add> prepareThenableState(prevThenableState);
<add> children = Component(props, secondArg);
<add> } while (didScheduleRenderPhaseUpdateDuringThisPass);
<add> return children;
<add>}
<add>
<ide> export function checkDidRenderIdHook(): boolean {
<ide> // This should be called immediately after every renderWithHooks call.
<ide> // Conceptually, it's part of the return value of renderWithHooks; it's only a
<ide> function updateReducer<S, I, A>(
<ide> }
<ide>
<ide> // Process this update.
<add> const action = update.action;
<add> if (shouldDoubleInvokeUserFnsInHooksDEV) {
<add> reducer(newState, action);
<add> }
<ide> if (update.hasEagerState) {
<ide> // If this update is a state update (not a reducer) and was processed eagerly,
<ide> // we can use the eagerly computed state
<ide> newState = ((update.eagerState: any): S);
<ide> } else {
<del> const action = update.action;
<ide> newState = reducer(newState, action);
<ide> }
<ide> }
<ide> function mountMemo<T>(
<ide> ): T {
<ide> const hook = mountWorkInProgressHook();
<ide> const nextDeps = deps === undefined ? null : deps;
<add> if (shouldDoubleInvokeUserFnsInHooksDEV) {
<add> nextCreate();
<add> }
<ide> const nextValue = nextCreate();
<ide> hook.memoizedState = [nextValue, nextDeps];
<ide> return nextValue;
<ide> function updateMemo<T>(
<ide> }
<ide> }
<ide> }
<add> if (shouldDoubleInvokeUserFnsInHooksDEV) {
<add> nextCreate();
<add> }
<ide> const nextValue = nextCreate();
<ide> hook.memoizedState = [nextValue, nextDeps];
<ide> return nextValue;
<ide><path>packages/react-reconciler/src/ReactFiberHooks.old.js
<ide> import {
<ide> enableUseMemoCacheHook,
<ide> enableUseEventHook,
<ide> enableLegacyCache,
<add> debugRenderPhaseSideEffectsForStrictMode,
<ide> } from 'shared/ReactFeatureFlags';
<ide> import {
<ide> REACT_CONTEXT_TYPE,
<ide> import {
<ide> ConcurrentMode,
<ide> DebugTracingMode,
<ide> StrictEffectsMode,
<add> StrictLegacyMode,
<ide> } from './ReactTypeOfMode';
<ide> import {
<ide> NoLane,
<ide> import {
<ide> warnAboutMultipleRenderersDEV,
<ide> } from './ReactMutableSource.old';
<ide> import {logStateUpdateScheduled} from './DebugTracing';
<del>import {markStateUpdateScheduled} from './ReactFiberDevToolsHook.old';
<add>import {
<add> markStateUpdateScheduled,
<add> setIsStrictModeForDevtools,
<add>} from './ReactFiberDevToolsHook.old';
<ide> import {createCache} from './ReactFiberCacheComponent.old';
<ide> import {
<ide> createUpdate as createLegacyQueueUpdate,
<ide> import {
<ide> trackUsedThenable,
<ide> checkIfUseWrappedInTryCatch,
<ide> } from './ReactFiberThenable.old';
<add>import type {ThenableState} from './ReactFiberThenable.old';
<ide>
<ide> const {ReactCurrentDispatcher, ReactCurrentBatchConfig} = ReactSharedInternals;
<ide>
<ide> let didScheduleRenderPhaseUpdate: boolean = false;
<ide> // TODO: Maybe there's some way to consolidate this with
<ide> // `didScheduleRenderPhaseUpdate`. Or with `numberOfReRenders`.
<ide> let didScheduleRenderPhaseUpdateDuringThisPass: boolean = false;
<add>let shouldDoubleInvokeUserFnsInHooksDEV: boolean = false;
<ide> // Counts the number of useId hooks in this component.
<ide> let localIdCounter: number = 0;
<ide> // Counts number of `use`-d thenables
<ide> export function renderWithHooks<Props, SecondArg>(
<ide> // If this is a replay, restore the thenable state from the previous attempt.
<ide> const prevThenableState = getSuspendedThenableState();
<ide> prepareThenableState(prevThenableState);
<add>
<add> // In Strict Mode, during development, user functions are double invoked to
<add> // help detect side effects. The logic for how this is implemented for in
<add> // hook components is a bit complex so let's break it down.
<add> //
<add> // We will invoke the entire component function twice. However, during the
<add> // second invocation of the component, the hook state from the first
<add> // invocation will be reused. That means things like `useMemo` functions won't
<add> // run again, because the deps will match and the memoized result will
<add> // be reused.
<add> //
<add> // We want memoized functions to run twice, too, so account for this, user
<add> // functions are double invoked during the *first* invocation of the component
<add> // function, and are *not* double invoked during the second incovation:
<add> //
<add> // - First execution of component function: user functions are double invoked
<add> // - Second execution of component function (in Strict Mode, during
<add> // development): user functions are not double invoked.
<add> //
<add> // This is intentional for a few reasons; most importantly, it's because of
<add> // how `use` works when something suspends: it reuses the promise that was
<add> // passed during the first attempt. This is itself a form of memoization.
<add> // We need to be able to memoize the reactive inputs to the `use` call using
<add> // a hook (i.e. `useMemo`), which means, the reactive inputs to `use` must
<add> // come from the same component invocation as the output.
<add> //
<add> // There are plenty of tests to ensure this behavior is correct.
<add> const shouldDoubleRenderDEV =
<add> __DEV__ &&
<add> debugRenderPhaseSideEffectsForStrictMode &&
<add> (workInProgress.mode & StrictLegacyMode) !== NoMode;
<add>
<add> shouldDoubleInvokeUserFnsInHooksDEV = shouldDoubleRenderDEV;
<ide> let children = Component(props, secondArg);
<add> shouldDoubleInvokeUserFnsInHooksDEV = false;
<ide>
<ide> // Check if there was a render phase update
<ide> if (didScheduleRenderPhaseUpdateDuringThisPass) {
<del> // Keep rendering in a loop for as long as render phase updates continue to
<del> // be scheduled. Use a counter to prevent infinite loops.
<del> let numberOfReRenders: number = 0;
<del> do {
<del> didScheduleRenderPhaseUpdateDuringThisPass = false;
<del> localIdCounter = 0;
<del> thenableIndexCounter = 0;
<del>
<del> if (numberOfReRenders >= RE_RENDER_LIMIT) {
<del> throw new Error(
<del> 'Too many re-renders. React limits the number of renders to prevent ' +
<del> 'an infinite loop.',
<del> );
<del> }
<del>
<del> numberOfReRenders += 1;
<del> if (__DEV__) {
<del> // Even when hot reloading, allow dependencies to stabilize
<del> // after first render to prevent infinite render phase updates.
<del> ignorePreviousDependencies = false;
<del> }
<del>
<del> // Start over from the beginning of the list
<del> currentHook = null;
<del> workInProgressHook = null;
<del>
<del> workInProgress.updateQueue = null;
<del>
<del> if (__DEV__) {
<del> // Also validate hook order for cascading updates.
<del> hookTypesUpdateIndexDev = -1;
<del> }
<del>
<del> ReactCurrentDispatcher.current = __DEV__
<del> ? HooksDispatcherOnRerenderInDEV
<del> : HooksDispatcherOnRerender;
<add> // Keep rendering until the component stabilizes (there are no more render
<add> // phase updates).
<add> children = renderWithHooksAgain(
<add> workInProgress,
<add> Component,
<add> props,
<add> secondArg,
<add> prevThenableState,
<add> );
<add> }
<ide>
<del> prepareThenableState(prevThenableState);
<del> children = Component(props, secondArg);
<del> } while (didScheduleRenderPhaseUpdateDuringThisPass);
<add> if (shouldDoubleRenderDEV) {
<add> // In development, components are invoked twice to help detect side effects.
<add> setIsStrictModeForDevtools(true);
<add> try {
<add> children = renderWithHooksAgain(
<add> workInProgress,
<add> Component,
<add> props,
<add> secondArg,
<add> prevThenableState,
<add> );
<add> } finally {
<add> setIsStrictModeForDevtools(false);
<add> }
<ide> }
<ide>
<ide> // We can assume the previous dispatcher is always this one, since we set it
<ide> export function renderWithHooks<Props, SecondArg>(
<ide> return children;
<ide> }
<ide>
<add>function renderWithHooksAgain<Props, SecondArg>(
<add> workInProgress: Fiber,
<add> Component: (p: Props, arg: SecondArg) => any,
<add> props: Props,
<add> secondArg: SecondArg,
<add> prevThenableState: ThenableState | null,
<add>) {
<add> // This is used to perform another render pass. It's used when setState is
<add> // called during render, and for double invoking components in Strict Mode
<add> // during development.
<add> //
<add> // The state from the previous pass is reused whenever possible. So, state
<add> // updates that were already processed are not processed again, and memoized
<add> // functions (`useMemo`) are not invoked again.
<add> //
<add> // Keep rendering in a loop for as long as render phase updates continue to
<add> // be scheduled. Use a counter to prevent infinite loops.
<add> let numberOfReRenders: number = 0;
<add> let children;
<add> do {
<add> didScheduleRenderPhaseUpdateDuringThisPass = false;
<add> localIdCounter = 0;
<add> thenableIndexCounter = 0;
<add>
<add> if (numberOfReRenders >= RE_RENDER_LIMIT) {
<add> throw new Error(
<add> 'Too many re-renders. React limits the number of renders to prevent ' +
<add> 'an infinite loop.',
<add> );
<add> }
<add>
<add> numberOfReRenders += 1;
<add> if (__DEV__) {
<add> // Even when hot reloading, allow dependencies to stabilize
<add> // after first render to prevent infinite render phase updates.
<add> ignorePreviousDependencies = false;
<add> }
<add>
<add> // Start over from the beginning of the list
<add> currentHook = null;
<add> workInProgressHook = null;
<add>
<add> workInProgress.updateQueue = null;
<add>
<add> if (__DEV__) {
<add> // Also validate hook order for cascading updates.
<add> hookTypesUpdateIndexDev = -1;
<add> }
<add>
<add> ReactCurrentDispatcher.current = __DEV__
<add> ? HooksDispatcherOnRerenderInDEV
<add> : HooksDispatcherOnRerender;
<add>
<add> prepareThenableState(prevThenableState);
<add> children = Component(props, secondArg);
<add> } while (didScheduleRenderPhaseUpdateDuringThisPass);
<add> return children;
<add>}
<add>
<ide> export function checkDidRenderIdHook(): boolean {
<ide> // This should be called immediately after every renderWithHooks call.
<ide> // Conceptually, it's part of the return value of renderWithHooks; it's only a
<ide> function updateReducer<S, I, A>(
<ide> }
<ide>
<ide> // Process this update.
<add> const action = update.action;
<add> if (shouldDoubleInvokeUserFnsInHooksDEV) {
<add> reducer(newState, action);
<add> }
<ide> if (update.hasEagerState) {
<ide> // If this update is a state update (not a reducer) and was processed eagerly,
<ide> // we can use the eagerly computed state
<ide> newState = ((update.eagerState: any): S);
<ide> } else {
<del> const action = update.action;
<ide> newState = reducer(newState, action);
<ide> }
<ide> }
<ide> function mountMemo<T>(
<ide> ): T {
<ide> const hook = mountWorkInProgressHook();
<ide> const nextDeps = deps === undefined ? null : deps;
<add> if (shouldDoubleInvokeUserFnsInHooksDEV) {
<add> nextCreate();
<add> }
<ide> const nextValue = nextCreate();
<ide> hook.memoizedState = [nextValue, nextDeps];
<ide> return nextValue;
<ide> function updateMemo<T>(
<ide> }
<ide> }
<ide> }
<add> if (shouldDoubleInvokeUserFnsInHooksDEV) {
<add> nextCreate();
<add> }
<ide> const nextValue = nextCreate();
<ide> hook.memoizedState = [nextValue, nextDeps];
<ide> return nextValue;
<ide><path>packages/react/src/__tests__/ReactStrictMode-test.js
<ide> let ReactDOMClient;
<ide> let ReactDOMServer;
<ide> let Scheduler;
<ide> let PropTypes;
<add>let act;
<add>let useMemo;
<add>let useState;
<add>let useReducer;
<ide>
<ide> const ReactFeatureFlags = require('shared/ReactFeatureFlags');
<ide>
<ide> describe('ReactStrictMode', () => {
<ide> ReactDOM = require('react-dom');
<ide> ReactDOMClient = require('react-dom/client');
<ide> ReactDOMServer = require('react-dom/server');
<add> act = require('jest-react').act;
<add> useMemo = React.useMemo;
<add> useState = React.useState;
<add> useReducer = React.useReducer;
<ide> });
<ide>
<ide> it('should appear in the client component stack', () => {
<ide> describe('ReactStrictMode', () => {
<ide> // But each time `state` should be the previous value
<ide> expect(instance.state.count).toBe(2);
<ide> });
<add>
<add> // @gate debugRenderPhaseSideEffectsForStrictMode
<add> it('double invokes useMemo functions', async () => {
<add> let log = [];
<add>
<add> function Uppercased({text}) {
<add> return useMemo(() => {
<add> const uppercased = text.toUpperCase();
<add> log.push('Compute toUpperCase: ' + uppercased);
<add> return uppercased;
<add> }, [text]);
<add> }
<add>
<add> const container = document.createElement('div');
<add> const root = ReactDOMClient.createRoot(container);
<add>
<add> // Mount
<add> await act(() => {
<add> root.render(
<add> <React.StrictMode>
<add> <Uppercased text="hello" />
<add> </React.StrictMode>,
<add> );
<add> });
<add> expect(container.textContent).toBe('HELLO');
<add> expect(log).toEqual([
<add> 'Compute toUpperCase: HELLO',
<add> 'Compute toUpperCase: HELLO',
<add> ]);
<add>
<add> log = [];
<add>
<add> // Update
<add> await act(() => {
<add> root.render(
<add> <React.StrictMode>
<add> <Uppercased text="goodbye" />
<add> </React.StrictMode>,
<add> );
<add> });
<add> expect(container.textContent).toBe('GOODBYE');
<add> expect(log).toEqual([
<add> 'Compute toUpperCase: GOODBYE',
<add> 'Compute toUpperCase: GOODBYE',
<add> ]);
<add> });
<add>
<add> // @gate debugRenderPhaseSideEffectsForStrictMode
<add> it('double invokes useMemo functions', async () => {
<add> let log = [];
<add> function Uppercased({text}) {
<add> const memoizedResult = useMemo(() => {
<add> const uppercased = text.toUpperCase();
<add> log.push('Compute toUpperCase: ' + uppercased);
<add> return {uppercased};
<add> }, [text]);
<add>
<add> // Push this to the log so we can check whether the same memoized result
<add> // it returned during both invocations.
<add> log.push(memoizedResult);
<add>
<add> return memoizedResult.uppercased;
<add> }
<add>
<add> const container = document.createElement('div');
<add> const root = ReactDOMClient.createRoot(container);
<add>
<add> // Mount
<add> await act(() => {
<add> root.render(
<add> <React.StrictMode>
<add> <Uppercased text="hello" />
<add> </React.StrictMode>,
<add> );
<add> });
<add> expect(container.textContent).toBe('HELLO');
<add> expect(log).toEqual([
<add> 'Compute toUpperCase: HELLO',
<add> 'Compute toUpperCase: HELLO',
<add> {uppercased: 'HELLO'},
<add> {uppercased: 'HELLO'},
<add> ]);
<add>
<add> // Even though the memoized function is invoked twice, the same object
<add> // is returned both times.
<add> expect(log[2]).toBe(log[3]);
<add>
<add> log = [];
<add>
<add> // Update
<add> await act(() => {
<add> root.render(
<add> <React.StrictMode>
<add> <Uppercased text="goodbye" />
<add> </React.StrictMode>,
<add> );
<add> });
<add> expect(container.textContent).toBe('GOODBYE');
<add> expect(log).toEqual([
<add> 'Compute toUpperCase: GOODBYE',
<add> 'Compute toUpperCase: GOODBYE',
<add> {uppercased: 'GOODBYE'},
<add> {uppercased: 'GOODBYE'},
<add> ]);
<add>
<add> // Even though the memoized function is invoked twice, the same object
<add> // is returned both times.
<add> expect(log[2]).toBe(log[3]);
<add> });
<add>
<add> // @gate debugRenderPhaseSideEffectsForStrictMode
<add> it('double invokes setState updater functions', async () => {
<add> const log = [];
<add>
<add> let setCount;
<add> function App() {
<add> const [count, _setCount] = useState(0);
<add> setCount = _setCount;
<add> return count;
<add> }
<add>
<add> const container = document.createElement('div');
<add> const root = ReactDOMClient.createRoot(container);
<add>
<add> await act(() => {
<add> root.render(
<add> <React.StrictMode>
<add> <App />
<add> </React.StrictMode>,
<add> );
<add> });
<add> expect(container.textContent).toBe('0');
<add>
<add> await act(() => {
<add> setCount(() => {
<add> log.push('Compute count: 1');
<add> return 1;
<add> });
<add> });
<add> expect(container.textContent).toBe('1');
<add> expect(log).toEqual(['Compute count: 1', 'Compute count: 1']);
<add> });
<add>
<add> // @gate debugRenderPhaseSideEffectsForStrictMode
<add> it('double invokes reducer functions', async () => {
<add> const log = [];
<add>
<add> function reducer(prevState, action) {
<add> log.push('Compute new state: ' + action);
<add> return action;
<add> }
<add>
<add> let dispatch;
<add> function App() {
<add> const [count, _dispatch] = useReducer(reducer, 0);
<add> dispatch = _dispatch;
<add> return count;
<add> }
<add>
<add> const container = document.createElement('div');
<add> const root = ReactDOMClient.createRoot(container);
<add>
<add> await act(() => {
<add> root.render(
<add> <React.StrictMode>
<add> <App />
<add> </React.StrictMode>,
<add> );
<add> });
<add> expect(container.textContent).toBe('0');
<add>
<add> await act(() => {
<add> dispatch(1);
<add> });
<add> expect(container.textContent).toBe('1');
<add> expect(log).toEqual(['Compute new state: 1', 'Compute new state: 1']);
<add> });
<ide> });
<ide>
<ide> describe('Concurrent Mode', () => { | 5 |
PHP | PHP | remove unneeded variable | 29c64cb6c1d8cc712463c63fcf60c0a46fe3455e | <ide><path>src/Illuminate/Database/Concerns/ManagesTransactions.php
<ide> public function transaction(Closure $callback, $attempts = 1)
<ide> }
<ide>
<ide> try {
<del> $this->commit();
<add> if ($this->transactions == 1) {
<add> $this->getPdo()->commit();
<add> }
<add>
<add> $this->transactions = max(0, $this->transactions - 1);
<ide> } catch (Throwable $e) {
<ide> $this->handleCommitTransactionException(
<ide> $e, $currentAttempt, $attempts
<ide> public function transaction(Closure $callback, $attempts = 1)
<ide> continue;
<ide> }
<ide>
<add> $this->fireConnectionEvent('committed');
<add>
<ide> return $callbackResult;
<ide> }
<ide> }
<ide> public function commit()
<ide> */
<ide> protected function handleCommitTransactionException(Throwable $e, $currentAttempt, $maxAttempts)
<ide> {
<del> $this->transactions--;
<add> $this->transactions = max(0, $this->transactions - 1);
<ide>
<ide> if ($this->causedByConcurrencyError($e) &&
<ide> $currentAttempt < $maxAttempts) { | 1 |
Java | Java | add more information in ending marker tags | fe97458b03de3272a0fa67e5e123992397a68e4f | <ide><path>ReactAndroid/src/main/java/com/facebook/react/bridge/JavaModuleWrapper.java
<ide> public List<MethodDescriptor> getMethodDescriptors() {
<ide> try {
<ide> return Arguments.makeNativeMap(map);
<ide> } finally {
<del> ReactMarker.logMarker(CONVERT_CONSTANTS_END);
<add> ReactMarker.logMarker(CONVERT_CONSTANTS_END, moduleName);
<ide> Systrace.endSection(TRACE_TAG_REACT_JAVA_BRIDGE);
<ide>
<del> ReactMarker.logMarker(GET_CONSTANTS_END);
<add> ReactMarker.logMarker(GET_CONSTANTS_END, moduleName);
<ide> SystraceMessage.endSection(TRACE_TAG_REACT_JAVA_BRIDGE).flush();
<ide> }
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/bridge/ModuleHolder.java
<ide> private NativeModule create() {
<ide> doInitialize(module);
<ide> }
<ide> } finally {
<del> ReactMarker.logMarker(CREATE_MODULE_END, mInstanceKey);
<add> ReactMarker.logMarker(CREATE_MODULE_END, mName ,mInstanceKey);
<ide> SystraceMessage.endSection(TRACE_TAG_REACT_JAVA_BRIDGE).flush();
<ide> }
<ide> return module;
<ide> private void doInitialize(NativeModule module) {
<ide> }
<ide> }
<ide> } finally {
<del> ReactMarker.logMarker(ReactMarkerConstants.INITIALIZE_MODULE_END, mInstanceKey);
<add> ReactMarker.logMarker(ReactMarkerConstants.INITIALIZE_MODULE_END, mName, mInstanceKey);
<ide> SystraceMessage.endSection(TRACE_TAG_REACT_JAVA_BRIDGE).flush();
<ide> }
<ide> } | 2 |
Python | Python | keep lowercase configuration | 24b0bb388ae4c85b7e745927a052d0d84f7bc718 | <ide><path>celery/loaders/default.py
<ide> def read_configuration(self):
<ide> return self.setup_settings(usercfg)
<ide>
<ide> def wanted_module_item(self, item):
<del> return item[0].isupper() and not item.startswith('_')
<add> return not item.startswith('_') | 1 |
Javascript | Javascript | fix failing test in ie 10 | 79049b9fee5690b8370c51e0fbb82d7a16b787fc | <ide><path>test/ng/directive/formSpec.js
<ide> describe('form', function() {
<ide> // the issue in the wild, I'm not going to bother to do it
<ide> // now. (i)
<ide>
<del> // IE9 is special and it doesn't fire submit event when form was destroyed
<del> if (msie != 9) {
<add> // IE9 and IE10 are special and don't fire submit event when form was destroyed
<add> if (msie < 9) {
<ide> expect(reloadPrevented).toBe(true);
<ide> $timeout.flush();
<ide> } | 1 |
Text | Text | fix naming convention for sdist provider packages | e36e061c19d24aeaf80674dd40be13e7049cec53 | <ide><path>dev/README_RELEASE_PROVIDER_PACKAGES.md
<ide> This can be done with the Apache RAT tool.
<ide>
<ide> * Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the binary,
<ide> the jar is inside)
<del>* Unpack the release source archive (the `<package + version>-source.tar.gz` file) to a folder
<add>* Unpack the release source archive (the `<package + version>.tar.gz` file) to a folder
<ide> * Enter the sources folder run the check
<ide>
<ide> ```shell script | 1 |
Ruby | Ruby | remove unnecessary qualified constant lookups | 803f4385c6c30217e3d2cf81cbaba92c7bc58476 | <ide><path>activejob/lib/active_job/serializers/array_serializer.rb
<ide> class << self
<ide> alias_method :deserialize?, :serialize?
<ide>
<ide> def serialize(array)
<del> array.map { |arg| ::ActiveJob::Serializers.serialize(arg) }
<add> array.map { |arg| Serializers.serialize(arg) }
<ide> end
<ide>
<ide> def deserialize(array)
<del> array.map { |arg| ::ActiveJob::Serializers.deserialize(arg) }
<add> array.map { |arg| Serializers.deserialize(arg) }
<ide> end
<ide>
<ide> private
<ide>
<ide> def klass
<del> ::Array
<add> Array
<ide> end
<ide> end
<ide> end
<ide><path>activejob/lib/active_job/serializers/global_id_serializer.rb
<ide> def key
<ide> private
<ide>
<ide> def klass
<del> ::GlobalID::Identification
<add> GlobalID::Identification
<ide> end
<ide> end
<ide> end
<ide><path>activejob/lib/active_job/serializers/hash_serializer.rb
<ide> def deserialize?(argument)
<ide> end
<ide>
<ide> def deserialize(hash)
<del> result = hash.transform_values { |v| ::ActiveJob::Serializers::deserialize(v) }
<add> result = hash.transform_values { |v| Serializers::deserialize(v) }
<ide> symbol_keys = result.delete(key)
<ide> transform_symbol_keys(result, symbol_keys)
<ide> end
<ide> def key
<ide>
<ide> def serialize_hash(hash)
<ide> hash.each_with_object({}) do |(key, value), result|
<del> result[serialize_hash_key(key)] = ::ActiveJob::Serializers.serialize(value)
<add> result[serialize_hash_key(key)] = Serializers.serialize(value)
<ide> end
<ide> end
<ide>
<ide> def transform_symbol_keys(hash, symbol_keys)
<ide> end
<ide>
<ide> def klass
<del> ::Hash
<add> Hash
<ide> end
<ide> end
<ide> end
<ide><path>activejob/lib/active_job/serializers/hash_with_indifferent_access_serializer.rb
<ide> class HashWithIndifferentAccessSerializer < HashSerializer
<ide> class << self
<ide> def serialize(hash)
<ide> result = serialize_hash(hash)
<del> result[key] = ::ActiveJob::Serializers.serialize(true)
<add> result[key] = Serializers.serialize(true)
<ide> result
<ide> end
<ide>
<ide> def deserialize?(argument)
<ide> end
<ide>
<ide> def deserialize(hash)
<del> result = hash.transform_values { |v| ::ActiveJob::Serializers.deserialize(v) }
<add> result = hash.transform_values { |v| Serializers.deserialize(v) }
<ide> result.delete(key)
<ide> result.with_indifferent_access
<ide> end
<ide> def key
<ide> private
<ide>
<ide> def klass
<del> ::ActiveSupport::HashWithIndifferentAccess
<add> ActiveSupport::HashWithIndifferentAccess
<ide> end
<ide> end
<ide> end
<ide><path>activejob/lib/active_job/serializers/standard_type_serializer.rb
<ide> module Serializers
<ide> class StandardTypeSerializer < BaseSerializer
<ide> class << self
<ide> def serialize?(argument)
<del> ::ActiveJob::Arguments::TYPE_WHITELIST.include? argument.class
<add> Arguments::TYPE_WHITELIST.include? argument.class
<ide> end
<ide>
<ide> def serialize(argument) | 5 |
Javascript | Javascript | simplify trim fn now that ie9 has string#trim | e6a9f9e1302c1bb8ac75b2e2b5fa9fc617862f7f | <ide><path>src/Angular.js
<ide> function isPromiseLike(obj) {
<ide> }
<ide>
<ide>
<del>var trim = (function() {
<del> // native trim is way faster: http://jsperf.com/angular-trim-test
<del> // but IE doesn't have it... :-(
<del> // TODO: we should move this into IE/ES5 polyfill
<del> if (!String.prototype.trim) {
<del> return function(value) {
<del> return isString(value) ? value.replace(/^\s\s*/, '').replace(/\s\s*$/, '') : value;
<del> };
<del> }
<del> return function(value) {
<del> return isString(value) ? value.trim() : value;
<del> };
<del>})();
<add>var trim = function(value) {
<add> return isString(value) ? value.trim() : value;
<add>};
<ide>
<ide>
<ide> /** | 1 |
Python | Python | remove unused code from gannt view | 54d3c9a55c974ecb1ea2b4524294ffbc42905ac2 | <ide><path>airflow/www/views.py
<ide> def gantt(self, session=None):
<ide> d['try_number'] = try_count
<ide> tasks.append(d)
<ide>
<del> task_types = {}
<del> extra_links = {}
<del> for t in dag.tasks:
<del> task_types[t.task_id] = t.task_type
<del> extra_links[t.task_id] = t.extra_links
<del>
<ide> data = {
<ide> 'taskNames': [ti.task_id for ti in tis],
<ide> 'tasks': tasks, | 1 |
Text | Text | add some missing changelog entries [ci skip] | 313ce30dfaea576f126dd02a067864756f06567b | <ide><path>CHANGELOG.md
<ide>
<ide> - [#17130](https://github.com/emberjs/ember.js/pull/17130) [BUGFIX] Ensure that timers scheduled after a system sleep are fired properly.
<ide> - [#17137](https://github.com/emberjs/ember.js/pull/17137) [BUGFIX] Assert when local variables shadow modifier invocations
<add>- [#17132](https://github.com/emberjs/ember.js/pull/17132) [BUGFIX] Assert when local variables shadow helper invocations
<add>- [#17135](https://github.com/emberjs/ember.js/pull/17135) [BUGFIX] Ensure local variables win over helper invocations
<ide> - [#16923](https://github.com/emberjs/ember.js/pull/16923) [BUGFIX] ES6 classes on/removeListener and observes/removeObserver interop v2
<ide> - [#17128](https://github.com/emberjs/ember.js/pull/17128) [BUGFIX] Fix sourcemaping issues due to multiple sourcemap directives.
<ide> - [#17115](https://github.com/emberjs/ember.js/pull/17115) [BUGFIX] Pass the event parameter to sendAction
<ide>
<ide> - [#16956](https://github.com/emberjs/ember.js/pull/16956) [DEPRECATION] Deprecate Ember.merge
<ide> - [#16795](https://github.com/emberjs/ember.js/pull/16795) [FEATURE] Native Class Constructor Update (see [emberjs/rfcs#337](https://github.com/emberjs/rfcs/blob/master/text/0337-native-class-constructor-update.md)
<del>- [#16865](https://github.com/emberjs/ember.js/pull/16865) / [#16899](https://github.com/emberjs/ember.js/pull/16899) / [#16914](https://github.com/emberjs/ember.js/pull/16914) / [#16897](https://github.com/emberjs/ember.js/pull/16897) / [#16913](https://github.com/emberjs/ember.js/pull/16913) / [#16894](https://github.com/emberjs/ember.js/pull/16894) / [#16896](https://github.com/emberjs/ember.js/pull/16896) [BUGFIX] Support RFC 232 and RFC 268 style tests with Mocha blueprints
<add>- [#16865](https://github.com/emberjs/ember.js/pull/16865) / [#16899](https://github.com/emberjs/ember.js/pull/16899) / [#16914](https://github.com/emberjs/ember.js/pull/16914) / [#16897](https://github.com/emberjs/ember.js/pull/16897) / [#16913](https://github.com/emberjs/ember.js/pull/16913) / [#16894](https://github.com/emberjs/ember.js/pull/16894) / [#16896](https://github.com/emberjs/ember.js/pull/16896) [BUGFIX] Support RFC 232 and RFC 268 style tests with Mocha blueprints
<ide> - [#17025](https://github.com/emberjs/ember.js/pull/17025) / [#17034](https://github.com/emberjs/ember.js/pull/17034) / [#17036](https://github.com/emberjs/ember.js/pull/17036) / [#17038](https://github.com/emberjs/ember.js/pull/17038) / [#17040](https://github.com/emberjs/ember.js/pull/17040) / [#17041](https://github.com/emberjs/ember.js/pull/17041) / [#17061](https://github.com/emberjs/ember.js/pull/17061) [FEATURE] Final stage of the router service RFC (see [emberjs/rfcs#95](https://github.com/emberjs/rfcs/blob/master/text/0095-router-service.md)
<ide> - [#17051](https://github.com/emberjs/ember.js/pull/17051) Update glimmer-vm packages to 0.36.4
<ide> | 1 |
Text | Text | add description for inspector-only console methods | 982c67419b062412a7e8a7040cfd34239c6be7b4 | <ide><path>doc/api/console.md
<ide> undefined
<ide> ### console.debug(data[, ...args])
<ide> <!-- YAML
<ide> added: v8.0.0
<add>changes:
<add> - version: REPLACEME
<add> pr-url: https://github.com/nodejs/node/pull/17033
<add> description: "`console.debug` is now an alias for `console.log`."
<ide> -->
<ide> * `data` {any}
<ide> * `...args` {any}
<ide> added: v0.1.100
<ide>
<ide> The `console.warn()` function is an alias for [`console.error()`][].
<ide>
<add>## Inspector only methods
<add>The following methods are exposed by the V8 engine in the general API but do
<add>not display anything unless used in conjunction with the [inspector][]
<add>(`--inspect` flag).
<add>
<add>### console.dirxml(object)
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>* `object` {string}
<add>
<add>This method does not display anything unless used in the inspector. The
<add>`console.dirxml()` method displays in `stdout` an XML interactive tree
<add>representation of the descendants of the specified `object` if possible, or the
<add>JavaScript representation if not. Calling `console.dirxml()` on an HTML or XML
<add>element is equivalent to calling `console.log()`.
<add>
<add>### console.markTimeline(label)
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>* `label` {string} Defaults to `'default'`.
<add>
<add>This method does not display anything unless used in the inspector. The
<add>`console.markTimeline()` method is the deprecated form of [`console.timeStamp()`][].
<add>
<add>### console.profile([label])
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>* `label` {string}
<add>
<add>This method does not display anything unless used in the inspector. The
<add>`console.profile()` method starts a JavaScript CPU profile with an optional
<add>label until [`console.profileEnd()`][] is called. The profile is then added to
<add>the **Profile** panel of the inspector.
<add>```js
<add>console.profile('MyLabel');
<add>// Some code
<add>console.profileEnd();
<add>// Adds the profile 'MyLabel' to the Profiles panel of the inspector.
<add>```
<add>
<add>### console.profileEnd()
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>
<add>This method does not display anything unless used in the inspector. Stops the
<add>current JavaScript CPU profiling session if one has been started and prints
<add>the report to the **Profiles** panel of the inspector. See
<add>[`console.profile()`][] for an example.
<add>
<add>### console.table(array[, columns])
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>* `array` {Array|Object}
<add>* `columns` {Array}
<add>
<add>This method does not display anything unless used in the inspector. Prints to
<add>`stdout` the array `array` formatted as a table.
<add>
<add>### console.timeStamp([label])
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>* `label` {string}
<add>
<add>This method does not display anything unless used in the inspector. The
<add>`console.timeStamp()` method adds an event with the label `label` to the
<add>**Timeline** panel of the inspector.
<add>
<add>### console.timeline([label])
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>* `label` {string} Defaults to `'default'`.
<add>
<add>This method does not display anything unless used in the inspector. The
<add>`console.timeline()` method is the deprecated form of [`console.time()`][].
<add>
<add>### console.timelineEnd([label])
<add><!-- YAML
<add>added: v8.0.0
<add>-->
<add>* `label` {string} Defaults to `'default'`.
<add>
<add>This method does not display anything unless used in the inspector. The
<add>`console.timelineEnd()` method is the deprecated form of [`console.timeEnd()`][].
<add>
<ide> [`console.error()`]: #console_console_error_data_args
<ide> [`console.group()`]: #console_console_group_label
<ide> [`console.log()`]: #console_console_log_data_args
<add>[`console.profile()`]: #console_console_profile_label
<add>[`console.profileEnd()`]: #console_console_profileend
<ide> [`console.time()`]: #console_console_time_label
<ide> [`console.timeEnd()`]: #console_console_timeend_label
<add>[`console.timeStamp()`]: #console_console_timestamp_label
<ide> [`process.stderr`]: process.html#process_process_stderr
<ide> [`process.stdout`]: process.html#process_process_stdout
<ide> [`util.format()`]: util.html#util_util_format_format_args
<ide> [`util.inspect()`]: util.html#util_util_inspect_object_options
<ide> [customizing `util.inspect()` colors]: util.html#util_customizing_util_inspect_colors
<add>[inspector]: debugger.html
<ide> [note on process I/O]: process.html#process_a_note_on_process_i_o
<ide> [web-api-assert]: https://developer.mozilla.org/en-US/docs/Web/API/console/assert | 1 |
Text | Text | add a subsystems header in pull-requests.md | 8f1e4d37bf33b8a4d3e09bffc1bcbb04c869eba0 | <ide><path>doc/guides/contributing/pull-requests.md
<ide> so that you can make the actual changes. This is where we will start.
<ide> * [CI Testing](#ci-testing)
<ide> * [Waiting Until the Pull Request Gets Landed](#waiting-until-the-pull-request-gets-landed)
<ide> * [Check Out the Collaborator Guide](#check-out-the-collaborator-guide)
<add> * [Appendix: Subsystems](#appendix-subsystems)
<ide>
<ide> ## Dependencies
<ide>
<ide> A good commit message should describe what changed and why.
<ide> less, and no more than 72 characters)
<ide> * be entirely in lowercase with the exception of proper nouns, acronyms, and
<ide> the words that refer to code, like function/variable names
<del> * be prefixed with the name of the changed subsystem and start with an
<del> imperative verb. Check the output of `git log --oneline files/you/changed` to
<del> find out what subsystems your changes touch.
<add> * be prefixed with the name of the changed [subsystem](#appendix-subsystems)
<add> and start with an imperative verb. Check the output of `git log --oneline
<add> files/you/changed` to find out what subsystems your changes touch.
<ide>
<ide> Examples:
<ide> * `net: add localAddress and localPort to Socket`
<ide> widely used, so don't be discouraged!
<ide> If you want to know more about the code review and the landing process, see the
<ide> [Collaborator Guide][].
<ide>
<add>### Appendix: Subsystems
<add>
<add>* `lib/*.js` (`assert`, `buffer`, etc.)
<add>* `build`
<add>* `doc`
<add>* `lib / src`
<add>* `test`
<add>* `tools`
<add>
<add>More than one subsystem may be valid for any particular issue or pull request.
<add>
<ide> [Building guide]: ../../../BUILDING.md
<ide> [CI (Continuous Integration) test run]: #ci-testing
<ide> [Code of Conduct]: https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md
<ide><path>doc/guides/onboarding-extras.md
<ide>
<ide> ## Labels
<ide>
<del>### Subsystems
<del>
<del>* `lib/*.js` (`assert`, `buffer`, etc.)
<del>* `build`
<del>* `doc`
<del>* `lib / src`
<del>* `test`
<del>* `tools`
<del>
<del>More than one subsystem may be valid for any particular issue or pull request.
<del>
<ide> ### General
<ide>
<ide> * `confirmed-bug`: Bugs you have verified | 2 |
Go | Go | fix random errors in dockersuite.testnetworknat | 769df832a39e2507fb46cb8896666a1c3197ff0b | <ide><path>integration-cli/docker_cli_nat_test.go
<ide> import (
<ide> "fmt"
<ide> "net"
<ide> "os/exec"
<del> "strconv"
<ide> "strings"
<ide>
<ide> "github.com/go-check/check"
<ide> )
<ide>
<ide> func startServerContainer(c *check.C, proto string, port int) string {
<del> cmd := []string{"-d", "-p", fmt.Sprintf("%d:%d", port, port), "busybox", "nc", "-lp", strconv.Itoa(port)}
<add> pStr := fmt.Sprintf("%d:%d", port, port)
<add> bCmd := fmt.Sprintf("nc -lp %d && echo bye", port)
<add> cmd := []string{"-d", "-p", pStr, "busybox", "sh", "-c", bCmd}
<ide> if proto == "udp" {
<ide> cmd = append(cmd, "-u")
<ide> }
<ide> func (s *DockerSuite) TestNetworkNat(c *check.C) {
<ide> }
<ide>
<ide> result := getContainerLogs(c, srv)
<del> if expected := "hello world"; result != expected {
<add>
<add> // Ideally we'd like to check for "hello world" but sometimes
<add> // nc doesn't show the data it received so instead let's look for
<add> // the output of the 'echo bye' that should be printed once
<add> // the nc command gets a connection
<add> expected := "bye"
<add> if !strings.Contains(result, expected) {
<ide> c.Fatalf("Unexpected output. Expected: %q, received: %q", expected, result)
<ide> }
<ide> }
<ide> func (s *DockerSuite) TestNetworkLocalhostTCPNat(c *check.C) {
<ide> conn.Close()
<ide>
<ide> result := getContainerLogs(c, srv)
<del> if expected := "hello world"; result != expected {
<add>
<add> // Ideally we'd like to check for "hello world" but sometimes
<add> // nc doesn't show the data it received so instead let's look for
<add> // the output of the 'echo bye' that should be printed once
<add> // the nc command gets a connection
<add> expected := "bye"
<add> if !strings.Contains(result, expected) {
<ide> c.Fatalf("Unexpected output. Expected: %q, received: %q", expected, result)
<ide> }
<ide> } | 1 |
Javascript | Javascript | remove unused code | 34afcc6fd91a522ce9d02737c80d43088ccb6b6a | <ide><path>lib/internal/priority_queue.js
<ide>
<ide> const {
<ide> Array,
<del> ArrayPrototypeIndexOf,
<ide> Symbol,
<ide> } = primordials;
<ide>
<ide> module.exports = class PriorityQueue {
<ide> }
<ide> }
<ide>
<del> remove(value) {
<del> const heap = this[kHeap];
<del> const pos = ArrayPrototypeIndexOf(heap, value);
<del> if (pos < 1)
<del> return false;
<del>
<del> this.removeAt(pos);
<del>
<del> return true;
<del> }
<del>
<ide> shift() {
<ide> const heap = this[kHeap];
<ide> const value = heap[1];
<ide><path>test/parallel/test-priority-queue.js
<ide> const PriorityQueue = require('internal/priority_queue');
<ide> assert.strictEqual(queue.shift(), undefined);
<ide> }
<ide>
<del>{
<del> // Checks that remove works as expected.
<del> const queue = new PriorityQueue();
<del> for (let i = 16; i > 0; i--)
<del> queue.insert(i);
<del>
<del> const removed = [5, 10, 15];
<del> for (const id of removed)
<del> assert(queue.remove(id));
<del>
<del> assert(!queue.remove(100));
<del> assert(!queue.remove(-100));
<del>
<del> for (let i = 1; i < 17; i++) {
<del> if (removed.indexOf(i) < 0)
<del> assert.strictEqual(queue.shift(), i);
<del> }
<del>
<del> assert.strictEqual(queue.shift(), undefined);
<del>}
<del>
<ide> {
<ide> // Make a max heap with a custom sort function.
<ide> const queue = new PriorityQueue((a, b) => b - a); | 2 |
Ruby | Ruby | use default_cipher constant in messageencryptor | 4c484a69e222ddbc3226311e0dc3551481aa4bbb | <ide><path>activesupport/lib/active_support/message_encryptor.rb
<ide> def initialize(secret, *signature_key_or_options)
<ide> sign_secret = signature_key_or_options.first
<ide> @secret = secret
<ide> @sign_secret = sign_secret
<del> @cipher = options[:cipher] || "aes-256-cbc"
<add> @cipher = options[:cipher] || DEFAULT_CIPHER
<ide> @digest = options[:digest] || "SHA1" unless aead_mode?
<ide> @verifier = resolve_verifier
<ide> @serializer = options[:serializer] || Marshal | 1 |
Ruby | Ruby | fix rubocop style | cbae10a732f18bd8b96edd114a5aa7904cdbb433 | <ide><path>Library/Homebrew/test/test_custom_command.rb
<ide> def test_custom_command
<ide> cmd = "int-test-#{rand}"
<ide> file = "#{path}/brew-#{cmd}"
<ide>
<del> File.open(file, "w") { |f| f.write "#!/bin/sh\necho 'I am #{cmd}'\n" }
<add> File.open(file, "w") do |f|
<add> f.write "#!/bin/sh\necho 'I am #{cmd}'\n"
<add> end
<ide> FileUtils.chmod 0777, file
<ide>
<ide> assert_match "I am #{cmd}", | 1 |
Text | Text | revert incorrect change on readable._read | 2f1ae9eebb65db3ae2bffd9e5dde6ff0a8b66219 | <ide><path>doc/api/stream.md
<ide> const myReadable = new Readable({
<ide> #### readable.\_read(size)
<ide> <!-- YAML
<ide> added: v0.9.4
<del>changes:
<del> - version: v10.0.0
<del> pr-url: https://github.com/nodejs/node/pull/17979
<del> description: Call `_read()` only once per microtick.
<ide> -->
<ide>
<ide> * `size` {number} Number of bytes to read asynchronously
<ide> when `_read()` is called again after it has stopped should it resume pushing
<ide> additional data onto the queue.
<ide>
<ide> Once the `readable._read()` method has been called, it will not be called again
<del>until the [`readable.push()`][stream-push] method is called. `readable._read()`
<del>is guaranteed to be called only once within a synchronous execution, i.e. a
<del>microtick.
<add>until the [`readable.push()`][stream-push] method is called.
<ide>
<ide> The `size` argument is advisory. For implementations where a "read" is a
<ide> single operation that returns data can use the `size` argument to determine how | 1 |
Python | Python | implement threasholds classes | a2f97d56d3824145043c577c2423c93f650f0873 | <ide><path>glances/plugins/glances_plugin.py
<ide> def get_alert(self,
<ide> # Manage action
<ide> self.manage_action(stat_name, ret.lower(), header, action_key)
<ide>
<del> # Default is ok
<add> # Default is 'OK'
<ide> return ret + log_str
<ide>
<ide> def manage_action(self,
<ide><path>glances/thresholds.py
<add># -*- coding: utf-8 -*-
<add>#
<add># This file is part of Glances.
<add>#
<add># Copyright (C) 2017 Nicolargo <[email protected]>
<add>#
<add># Glances is free software; you can redistribute it and/or modify
<add># it under the terms of the GNU Lesser General Public License as published by
<add># the Free Software Foundation, either version 3 of the License, or
<add># (at your option) any later version.
<add>#
<add># Glances is distributed in the hope that it will be useful,
<add># but WITHOUT ANY WARRANTY; without even the implied warranty of
<add># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<add># GNU Lesser General Public License for more details.
<add>#
<add># You should have received a copy of the GNU Lesser General Public License
<add># along with this program. If not, see <http://www.gnu.org/licenses/>.
<add>
<add>"""
<add>Thresholds classes: OK, CAREFUL, WARNING, CRITICAL
<add>"""
<add>
<add>
<add>class _GlancesThreshold(object):
<add>
<add> """Father class for all other Thresholds"""
<add>
<add> def description(self):
<add> return self._threshold['description']
<add>
<add> def value(self):
<add> return self._threshold['value']
<add>
<add> def __repr__(self):
<add> return self._threshold
<add>
<add> def __str__(self):
<add> return self.description()
<add>
<add> def __cmp__(self, other):
<add> """Override the default comparaison behavior"""
<add> return self.value().__cmp__(other.value())
<add>
<add>
<add>class GlancesThresholdOk(_GlancesThreshold):
<add>
<add> """Ok Threshold class"""
<add>
<add> _threshold = {'description': 'OK',
<add> 'value': 0}
<add>
<add>
<add>class GlancesThresholdCareful(_GlancesThreshold):
<add>
<add> """Careful Threshold class"""
<add>
<add> _threshold = {'description': 'CAREFUL',
<add> 'value': 1}
<add>
<add>
<add>class GlancesThresholdWarning(_GlancesThreshold):
<add>
<add> """Warning Threshold class"""
<add>
<add> _threshold = {'description': 'WARNING',
<add> 'value': 2}
<add>
<add>
<add>class GlancesThresholdCritical(_GlancesThreshold):
<add>
<add> """Warning Threshold class"""
<add>
<add> _threshold = {'description': 'CRITICAL',
<add> 'value': 3}
<ide><path>unitest.py
<ide> from glances import __version__
<ide> from glances.globals import WINDOWS, LINUX
<ide> from glances.outputs.glances_bars import Bar
<add>from glances.thresholds import GlancesThresholdOk
<add>from glances.thresholds import GlancesThresholdCareful
<add>from glances.thresholds import GlancesThresholdWarning
<add>from glances.thresholds import GlancesThresholdCritical
<ide>
<ide> # Global variables
<ide> # =================
<ide> def test_013_gpu(self):
<ide> self.assertTrue(type(stats_grab) is list, msg='GPU stats is not a list')
<ide> print('INFO: GPU stats: %s' % stats_grab)
<ide>
<add> def test_094_thresholds(self):
<add> """Test thresholds classes"""
<add> print('INFO: [TEST_094] Mandatories methods')
<add> ok = GlancesThresholdOk()
<add> careful = GlancesThresholdCareful()
<add> warning = GlancesThresholdWarning()
<add> critical = GlancesThresholdCritical()
<add> self.assertTrue(ok < careful)
<add> self.assertTrue(careful < warning)
<add> self.assertTrue(warning < critical)
<add> self.assertFalse(ok > careful)
<add> self.assertTrue(ok == ok)
<add> self.assertTrue(str(ok) == 'OK')
<add>
<ide> def test_095_methods(self):
<ide> """Test mandatories methods"""
<ide> print('INFO: [TEST_095] Mandatories methods') | 3 |
Python | Python | remove unrelated ec2 references in ecsoperator | becedd5af8df01a0210e0a3fa78e619785f39908 | <ide><path>airflow/providers/amazon/aws/operators/ecs.py
<ide> def stop_task(self, cluster, task, reason: str) -> Dict:
<ide>
<ide> class ECSOperator(BaseOperator): # pylint: disable=too-many-instance-attributes
<ide> """
<del> Execute a task on AWS EC2 Container Service
<add> Execute a task on AWS ECS (Elastic Container Service)
<ide>
<del> :param task_definition: the task definition name on EC2 Container Service
<add> :param task_definition: the task definition name on Elastic Container Service
<ide> :type task_definition: str
<del> :param cluster: the cluster name on EC2 Container Service
<add> :param cluster: the cluster name on Elastic Container Service
<ide> :type cluster: str
<ide> :param overrides: the same parameter that boto3 will receive (templated):
<ide> http://boto3.readthedocs.org/en/latest/reference/services/ecs.html#ECS.Client.run_task | 1 |
Go | Go | remove unnecessary check for number of commands | dd7e59a40a4c93070f71adb3ec74021241586c21 | <ide><path>integration-cli/docker_cli_help_test.go
<ide> func (s *DockerSuite) TestHelpTextVerify(c *check.C) {
<ide> c.Fatal(err)
<ide> }
<ide> }
<del>
<del> // Number of commands for standard release and experimental release
<del> standard := 41
<del> experimental := 1
<del> expected := standard + experimental
<del> if isLocalDaemon {
<del> expected++ // for the daemon command
<del> }
<del> c.Assert(len(cmds), checker.LessOrEqualThan, expected, check.Commentf("Wrong # of cmds, it should be: %d\nThe list:\n%q", expected, cmds))
<ide> }
<del>
<ide> }
<ide>
<ide> func (s *DockerSuite) TestHelpExitCodesHelpOutput(c *check.C) { | 1 |
Ruby | Ruby | fix new year heisenbug | db49b7dc94a744fd53eaf80107995ca55f9415e8 | <ide><path>actionpack/test/controller/cookie_test.rb
<ide> def test_cookies_persist_throughout_request
<ide> def test_permanent_cookie
<ide> get :set_permanent_cookie
<ide> assert_match /Jamie/, @response.headers["Set-Cookie"]
<del> assert_match %r(#{20.years.from_now.year}), @response.headers["Set-Cookie"]
<add> assert_match %r(#{20.years.from_now.utc.year}), @response.headers["Set-Cookie"]
<ide> end
<ide>
<ide> def test_signed_cookie
<ide> def test_signed_cookie
<ide>
<ide> def test_permanent_signed_cookie
<ide> get :set_permanent_signed_cookie
<del> assert_match %r(#{20.years.from_now.year}), @response.headers["Set-Cookie"]
<add> assert_match %r(#{20.years.from_now.utc.year}), @response.headers["Set-Cookie"]
<ide> assert_equal 100, @controller.send(:cookies).signed[:remember_me]
<ide> end
<ide> | 1 |
Javascript | Javascript | add finishassets to progressplugin | 58d6c26197109eaff8dca5ae9a1bdcb179b5317e | <ide><path>lib/ProgressPlugin.js
<ide> class ProgressPlugin {
<ide> afterOptimizeChunkAssets: "after chunk asset optimization",
<ide> optimizeAssets: "asset optimization",
<ide> afterOptimizeAssets: "after asset optimization",
<add> finishAssets: "finish assets",
<add> afterFinishAssets: "after finish assets",
<ide> afterSeal: "after seal"
<ide> };
<ide> const numberOfHooks = Object.keys(hooks).length;
<ide><path>test/ProgressPlugin.test.js
<ide> describe("ProgressPlugin", function() {
<ide> "10% ...ding ...ries ...cies ...ules",
<ide> "remove empty arguments and omit arguments when no space"
<ide> );
<del> expect(logs).toContain("92% after chunk asset optimization");
<add> expect(logs).toContain("91% after chunk asset optimization");
<ide> expect(logs).toContain("100%");
<ide> });
<ide> }); | 2 |
Text | Text | fix instructions for installing swiftshader. | 1df88eb82214e79c5c3fdd67453f06c433bc49dc | <ide><path>research/cognitive_mapping_and_planning/README.md
<ide> citing the following paper:
<ide> mkdir -p deps
<ide> git clone --recursive https://github.com/google/swiftshader.git deps/swiftshader-src
<ide> cd deps/swiftshader-src && git checkout 91da6b00584afd7dcaed66da88e2b617429b3950
<add> git submodule update
<ide> mkdir build && cd build && cmake .. && make -j 16 libEGL libGLESv2
<ide> cd ../../../
<ide> cp deps/swiftshader-src/build/libEGL* libEGL.so.1 | 1 |
Javascript | Javascript | increase timeout for tests | 911c64ff3a5351424b5f74b1f65c44fe46e1b07b | <ide><path>test/HotTestCases.template.js
<ide> const describeCases = config => {
<ide> done();
<ide> });
<ide> },
<del> 10000
<add> 20000
<ide> );
<ide>
<ide> const { it: _it, getNumberOfTests } = createLazyTestEnv(
<ide> jasmine.getEnv(),
<del> 10000
<add> 20000
<ide> );
<ide> });
<ide> }); | 1 |
Javascript | Javascript | normalize view and detail properties | 5fa001b12c495c7b8f25703c0da2a7348ffcf2a6 | <ide><path>src/browser/syntheticEvents/SyntheticUIEvent.js
<ide>
<ide> var SyntheticEvent = require('SyntheticEvent');
<ide>
<add>var getEventTarget = require('getEventTarget');
<add>
<ide> /**
<ide> * @interface UIEvent
<ide> * @see http://www.w3.org/TR/DOM-Level-3-Events/
<ide> */
<ide> var UIEventInterface = {
<del> view: null,
<del> detail: null
<add> view: function(event) {
<add> if (event.view) {
<add> return event.view;
<add> } else {
<add> var target = getEventTarget(event);
<add> if (target != null && target.window === target) {
<add> // target is a window object
<add> return target;
<add> } else {
<add> var doc = target.ownerDocument;
<add> return doc.defaultView || doc.parentWindow;
<add> }
<add> }
<add> },
<add> detail: function(event) {
<add> return event.detail || 0;
<add> }
<ide> };
<ide>
<ide> /** | 1 |
Text | Text | move sam ruby to emeritus | b31c6c91ec4bb5baaa600f1f9be698051b1b1774 | <ide><path>README.md
<ide> For information about the governance of the Node.js project, see
<ide> **Ricky Zhou** <[email protected]> (he/him)
<ide> * [ronag](https://github.com/ronag) -
<ide> **Robert Nagy** <[email protected]>
<del>* [rubys](https://github.com/rubys) -
<del>**Sam Ruby** <[email protected]>
<ide> * [ruyadorno](https://github.com/ruyadorno) -
<ide> **Ruy Adorno** <[email protected]> (he/him)
<ide> * [rvagg](https://github.com/rvagg) -
<ide> For information about the governance of the Node.js project, see
<ide> **Ron Korving** <[email protected]>
<ide> * [RReverser](https://github.com/RReverser) -
<ide> **Ingvar Stepanyan** <[email protected]>
<add>* [rubys](https://github.com/rubys) -
<add>**Sam Ruby** <[email protected]>
<ide> * [sam-github](https://github.com/sam-github) -
<ide> **Sam Roberts** <[email protected]>
<ide> * [sebdeckers](https://github.com/sebdeckers) - | 1 |
Text | Text | review some changelog entries [ci skip] | 4e363b1c0e31da7b148feb5bbf036ed690afd08e | <ide><path>activerecord/CHANGELOG.md
<ide> ## Rails 4.0.0 (unreleased) ##
<add>
<ide> * After extraction of mass-assignment attributes (which protects [id, type]
<del> by default) we can pass id to update_attributes and it will update
<add> by default) we can pass id to `update_attributes` and it will update
<ide> another record because id will be used in where statement. We never have
<ide> to change id in where statement because we try to set/replace fields for
<ide> already loaded record but we have to try to set new id for that record.
<ide>
<ide> *Ian Young*
<ide>
<del>* If inverse_of is true on an association, then when one calls +find()+ on
<del> the association, ActiveRecord will first look through the in-memory objects
<add>* If ``:inverse_of` is true on an association, then when one calls `find()` on
<add> the association, Active Record will first look through the in-memory objects
<ide> in the association for a particular id. Then, it will go to the DB if it
<del> is not found. This is accomplished by calling +find_by_scan+ in
<del> collection associations whenever +options[:inverse_of]+ is not nil.
<add> is not found. This is accomplished by calling `find_by_scan` in
<add> collection associations whenever `options[:inverse_of]` is not nil.
<ide>
<ide> Fixes #9470.
<ide>
<ide>
<ide> *Yves Senn*
<ide>
<del>* Fix quoting for sqlite migrations using copy_table_contents() with binary
<add>* Fix quoting for sqlite migrations using `copy_table_contents` with binary
<ide> columns.
<ide>
<ide> These would fail with "SQLite3::SQLException: unrecognized token" because
<del> the column was not being passed to quote() so the data was not quoted
<add> the column was not being passed to `quote` so the data was not quoted
<ide> correctly.
<ide>
<ide> *Matthew M. Boedicker* | 1 |
Javascript | Javascript | add a preference to set the sidebarview on load | c53bcb29926ac09b563b7de6d555f964f4abf660 | <ide><path>web/default_preferences.js
<ide> var DEFAULT_PREFERENCES = {
<ide> showPreviousViewOnLoad: true,
<ide> defaultZoomValue: '',
<del> ifAvailableShowOutlineOnLoad: false,
<add> sidebarViewOnLoad: 0,
<ide> enableHandToolOnLoad: false,
<ide> enableWebGL: false
<ide> };
<ide><path>web/preferences.js
<ide>
<ide> //#include default_preferences.js
<ide>
<add>var SidebarView = {
<add> NONE: 0,
<add> THUMBS: 1,
<add> OUTLINE: 2,
<add> ATTACHMENTS: 3
<add>};
<add>
<ide> /**
<ide> * Preferences - Utility for storing persistent settings.
<ide> * Used for settings that should be applied to all opened documents,
<ide><path>web/viewer.js
<ide> /* globals PDFJS, PDFBug, FirefoxCom, Stats, Cache, PDFFindBar, CustomStyle,
<ide> PDFFindController, ProgressBar, TextLayerBuilder, DownloadManager,
<ide> getFileName, scrollIntoView, getPDFFileNameFromURL, PDFHistory,
<del> Preferences, ViewHistory, PageView, ThumbnailView, URL,
<add> Preferences, SidebarView, ViewHistory, PageView, ThumbnailView, URL,
<ide> noContextMenuHandler, SecondaryToolbar, PasswordPrompt,
<ide> PresentationMode, HandTool, Promise, DocumentProperties,
<ide> DocumentOutlineView, DocumentAttachmentsView */
<ide> var PDFView = {
<ide> var initializedPromise = Promise.all([
<ide> Preferences.get('enableWebGL').then(function resolved(value) {
<ide> PDFJS.disableWebGL = !value;
<add> }, function rejected(reason) {}),
<add> Preferences.get('sidebarViewOnLoad').then(function resolved(value) {
<add> self.preferenceSidebarViewOnLoad = value;
<ide> }, function rejected(reason) {})
<ide> // TODO move more preferences and other async stuff here
<ide> ]);
<ide> var PDFView = {
<ide> self.outline = new DocumentOutlineView(outline);
<ide> document.getElementById('viewOutline').disabled = !outline;
<ide>
<del> if (outline) {
<del> Preferences.get('ifAvailableShowOutlineOnLoad').then(
<del> function (prefValue) {
<del> if (prefValue) {
<del> if (!self.sidebarOpen) {
<del> document.getElementById('sidebarToggle').click();
<del> }
<del> self.switchSidebarView('outline');
<del> }
<del> });
<add> if (outline &&
<add> self.preferenceSidebarViewOnLoad === SidebarView.OUTLINE) {
<add> self.switchSidebarView('outline', true);
<ide> }
<ide> });
<ide> pdfDocument.getAttachments().then(function(attachments) {
<ide> self.attachments = new DocumentAttachmentsView(attachments);
<ide> document.getElementById('viewAttachments').disabled = !attachments;
<add>
<add> if (attachments &&
<add> self.preferenceSidebarViewOnLoad === SidebarView.ATTACHMENTS) {
<add> self.switchSidebarView('attachments', true);
<add> }
<ide> });
<ide> });
<ide>
<add> if (self.preferenceSidebarViewOnLoad === SidebarView.THUMBS) {
<add> Promise.all([firstPagePromise, onePageRendered]).then(function () {
<add> self.switchSidebarView('thumbs', true);
<add> });
<add> }
<add>
<ide> pdfDocument.getMetadata().then(function(data) {
<ide> var info = data.info, metadata = data.metadata;
<ide> self.documentInfo = info;
<ide> var PDFView = {
<ide> this.page = pageNumber; // simple page
<ide> }
<ide> if ('pagemode' in params) {
<del> var toggle = document.getElementById('sidebarToggle');
<ide> if (params.pagemode === 'thumbs' || params.pagemode === 'bookmarks' ||
<ide> params.pagemode === 'attachments') {
<del> if (!this.sidebarOpen) {
<del> toggle.click();
<del> }
<del> this.switchSidebarView(params.pagemode === 'bookmarks' ?
<del> 'outline' :
<del> params.pagemode);
<add> this.switchSidebarView((params.pagemode === 'bookmarks' ?
<add> 'outline' : params.pagemode), true);
<ide> } else if (params.pagemode === 'none' && this.sidebarOpen) {
<del> toggle.click();
<add> document.getElementById('sidebarToggle').click();
<ide> }
<ide> }
<ide> } else if (/^\d+$/.test(hash)) { // page number
<ide> var PDFView = {
<ide> }
<ide> },
<ide>
<del> switchSidebarView: function pdfViewSwitchSidebarView(view) {
<add> switchSidebarView: function pdfViewSwitchSidebarView(view, openSidebar) {
<add> if (openSidebar && !this.sidebarOpen) {
<add> document.getElementById('sidebarToggle').click();
<add> }
<ide> var thumbsView = document.getElementById('thumbnailView');
<ide> var outlineView = document.getElementById('outlineView');
<ide> var attachmentsView = document.getElementById('attachmentsView'); | 3 |
PHP | PHP | unskip most of the sqlserver tests | 9d685fef0431330add95a8168b5c3a5e40b28218 | <ide><path>tests/TestCase/Database/Driver/SqlserverTest.php
<ide> class SqlserverTest extends TestCase
<ide> public function setUp()
<ide> {
<ide> parent::setUp();
<del> $this->skipUnless(defined('PDO::SQLSRV_ENCODING_UTF8'), 'SQL Server extension not present');
<add> $this->missingExtension = !defined('PDO::SQLSRV_ENCODING_UTF8');
<ide> }
<ide>
<ide> /**
<ide> public function setUp()
<ide> */
<ide> public function testConnectionConfigCustom()
<ide> {
<add> $this->skipIf($this->missingExtension, 'pdo_sqlsrv is not installed.');
<ide> $config = [
<ide> 'persistent' => false,
<ide> 'host' => 'foo',
<ide><path>tests/TestCase/Database/Schema/SqlserverSchemaTest.php
<ide> */
<ide> class SqlserverSchemaTest extends TestCase
<ide> {
<del>
<del> /**
<del> * Set up
<del> *
<del> * @return void
<del> */
<del> public function setUp()
<del> {
<del> parent::setUp();
<del> $this->skipUnless(defined('PDO::SQLSRV_ENCODING_UTF8'), 'SQL Server extension not present');
<del> }
<del>
<ide> /**
<ide> * Helper method for skipping tests that need a real connection.
<ide> * | 2 |
Python | Python | add support for triggering jobs by name | a1845c68f9a04e61dd99ccc0a23d17a277babf57 | <ide><path>airflow/providers/databricks/hooks/databricks.py
<ide> import sys
<ide> import time
<ide> from time import sleep
<del>from typing import Dict
<add>from typing import Any, Dict, List, Optional
<ide> from urllib.parse import urlparse
<ide>
<ide> import requests
<ide> INSTALL_LIBS_ENDPOINT = ('POST', 'api/2.0/libraries/install')
<ide> UNINSTALL_LIBS_ENDPOINT = ('POST', 'api/2.0/libraries/uninstall')
<ide>
<add>LIST_JOBS_ENDPOINT = ('GET', 'api/2.1/jobs/list')
<add>
<ide> USER_AGENT_HEADER = {'user-agent': f'airflow-{__version__}'}
<ide>
<ide> RUN_LIFE_CYCLE_STATES = ['PENDING', 'RUNNING', 'TERMINATING', 'TERMINATED', 'SKIPPED', 'INTERNAL_ERROR']
<ide> def submit_run(self, json: dict) -> int:
<ide> response = self._do_api_call(SUBMIT_RUN_ENDPOINT, json)
<ide> return response['run_id']
<ide>
<add> def list_jobs(self, limit: int = 25, offset: int = 0, expand_tasks: bool = False) -> List[Dict[str, Any]]:
<add> """
<add> Lists the jobs in the Databricks Job Service.
<add>
<add> :param limit: The limit/batch size used to retrieve jobs.
<add> :param offset: The offset of the first job to return, relative to the most recently created job.
<add> :param expand_tasks: Whether to include task and cluster details in the response.
<add> :return: A list of jobs.
<add> """
<add> has_more = True
<add> jobs = []
<add>
<add> while has_more:
<add> json = {
<add> 'limit': limit,
<add> 'offset': offset,
<add> 'expand_tasks': expand_tasks,
<add> }
<add> response = self._do_api_call(LIST_JOBS_ENDPOINT, json)
<add> jobs += response['jobs'] if 'jobs' in response else []
<add> has_more = response.get('has_more', False)
<add> if has_more:
<add> offset += len(response['jobs'])
<add>
<add> return jobs
<add>
<add> def find_job_id_by_name(self, job_name: str) -> Optional[int]:
<add> """
<add> Finds job id by its name. If there are multiple jobs with the same name, raises AirflowException.
<add>
<add> :param job_name: The name of the job to look up.
<add> :return: The job_id as an int or None if no job was found.
<add> """
<add> all_jobs = self.list_jobs()
<add> matching_jobs = [j for j in all_jobs if j['settings']['name'] == job_name]
<add>
<add> if len(matching_jobs) > 1:
<add> raise AirflowException(
<add> f"There are more than one job with name {job_name}. Please delete duplicated jobs first"
<add> )
<add>
<add> if not matching_jobs:
<add> return None
<add> else:
<add> return matching_jobs[0]['job_id']
<add>
<ide> def get_run_page_url(self, run_id: int) -> str:
<ide> """
<ide> Retrieves run_page_url.
<ide><path>airflow/providers/databricks/operators/databricks.py
<ide> if TYPE_CHECKING:
<ide> from airflow.utils.context import Context
<ide>
<del>
<ide> XCOM_RUN_ID_KEY = 'run_id'
<ide> XCOM_RUN_PAGE_URL_KEY = 'run_page_url'
<ide>
<ide> class DatabricksRunNowOperator(BaseOperator):
<ide>
<ide> Currently the named parameters that ``DatabricksRunNowOperator`` supports are
<ide> - ``job_id``
<add> - ``job_name``
<ide> - ``json``
<ide> - ``notebook_params``
<ide> - ``python_params``
<ide> class DatabricksRunNowOperator(BaseOperator):
<ide>
<ide> .. seealso::
<ide> https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsRunNow
<add> :param job_name: the name of the existing Databricks job.
<add> It must exist only one job with the specified name.
<add> ``job_id`` and ``job_name`` are mutually exclusive.
<add> This field will be templated.
<ide> :param json: A JSON object containing API parameters which will be passed
<ide> directly to the ``api/2.1/jobs/run-now`` endpoint. The other named parameters
<ide> (i.e. ``notebook_params``, ``spark_submit_params``..) to this operator will
<ide> def __init__(
<ide> self,
<ide> *,
<ide> job_id: Optional[str] = None,
<add> job_name: Optional[str] = None,
<ide> json: Optional[Any] = None,
<ide> notebook_params: Optional[Dict[str, str]] = None,
<ide> python_params: Optional[List[str]] = None,
<ide> def __init__(
<ide>
<ide> if job_id is not None:
<ide> self.json['job_id'] = job_id
<add> if job_name is not None:
<add> self.json['job_name'] = job_name
<add> if 'job_id' in self.json and 'job_name' in self.json:
<add> raise AirflowException("Argument 'job_name' is not allowed with argument 'job_id'")
<ide> if notebook_params is not None:
<ide> self.json['notebook_params'] = notebook_params
<ide> if python_params is not None:
<ide> def _get_hook(self) -> DatabricksHook:
<ide>
<ide> def execute(self, context: 'Context'):
<ide> hook = self._get_hook()
<add> if 'job_name' in self.json:
<add> job_id = hook.find_job_id_by_name(self.json['job_name'])
<add> if job_id is None:
<add> raise AirflowException(f"Job ID for job name {self.json['job_name']} can not be found")
<add> self.json['job_id'] = job_id
<add> del self.json['job_name']
<ide> self.run_id = hook.run_now(self.json)
<ide> _handle_databricks_operator_execution(self, hook, self.log, context)
<ide>
<ide><path>tests/providers/databricks/hooks/test_databricks.py
<ide> CLUSTER_ID = 'cluster_id'
<ide> RUN_ID = 1
<ide> JOB_ID = 42
<add>JOB_NAME = 'job-name'
<ide> HOST = 'xx.cloud.databricks.com'
<ide> HOST_WITH_SCHEME = 'https://xx.cloud.databricks.com'
<ide> LOGIN = 'login'
<ide> {"jar": "dbfs:/mnt/libraries/library.jar"},
<ide> {"maven": {"coordinates": "org.jsoup:jsoup:1.7.2", "exclusions": ["slf4j:slf4j"]}},
<ide> ]
<add>LIST_JOBS_RESPONSE = {
<add> 'jobs': [
<add> {
<add> 'job_id': JOB_ID,
<add> 'settings': {
<add> 'name': JOB_NAME,
<add> },
<add> },
<add> ],
<add> 'has_more': False,
<add>}
<ide>
<ide>
<ide> def run_now_endpoint(host):
<ide> def uninstall_endpoint(host):
<ide> return f'https://{host}/api/2.0/libraries/uninstall'
<ide>
<ide>
<add>def list_jobs_endpoint(host):
<add> """
<add> Utility function to generate the list jobs endpoint giver the host
<add> """
<add> return f'https://{host}/api/2.1/jobs/list'
<add>
<add>
<ide> def create_valid_response_mock(content):
<ide> response = mock.MagicMock()
<ide> response.json.return_value = content
<ide> def test_is_aad_token_valid_returns_false(self):
<ide> aad_token = {'token': 'my_token', 'expires_on': int(time.time())}
<ide> self.assertFalse(self.hook._is_aad_token_valid(aad_token))
<ide>
<add> @mock.patch('airflow.providers.databricks.hooks.databricks.requests')
<add> def test_list_jobs_success_single_page(self, mock_requests):
<add> mock_requests.codes.ok = 200
<add> mock_requests.get.return_value.json.return_value = LIST_JOBS_RESPONSE
<add>
<add> jobs = self.hook.list_jobs()
<add>
<add> mock_requests.get.assert_called_once_with(
<add> list_jobs_endpoint(HOST),
<add> json=None,
<add> params={'limit': 25, 'offset': 0, 'expand_tasks': False},
<add> auth=(LOGIN, PASSWORD),
<add> headers=USER_AGENT_HEADER,
<add> timeout=self.hook.timeout_seconds,
<add> )
<add>
<add> assert jobs == LIST_JOBS_RESPONSE['jobs']
<add>
<add> @mock.patch('airflow.providers.databricks.hooks.databricks.requests')
<add> def test_list_jobs_success_multiple_pages(self, mock_requests):
<add> mock_requests.codes.ok = 200
<add> mock_requests.get.side_effect = [
<add> create_successful_response_mock({**LIST_JOBS_RESPONSE, 'has_more': True}),
<add> create_successful_response_mock(LIST_JOBS_RESPONSE),
<add> ]
<add>
<add> jobs = self.hook.list_jobs()
<add>
<add> assert mock_requests.get.call_count == 2
<add>
<add> first_call_args = mock_requests.method_calls[0]
<add> assert first_call_args[1][0] == list_jobs_endpoint(HOST)
<add> assert first_call_args[2]['params'] == {'limit': 25, 'offset': 0, 'expand_tasks': False}
<add>
<add> second_call_args = mock_requests.method_calls[1]
<add> assert second_call_args[1][0] == list_jobs_endpoint(HOST)
<add> assert second_call_args[2]['params'] == {'limit': 25, 'offset': 1, 'expand_tasks': False}
<add>
<add> assert len(jobs) == 2
<add> assert jobs == LIST_JOBS_RESPONSE['jobs'] * 2
<add>
<add> @mock.patch('airflow.providers.databricks.hooks.databricks.requests')
<add> def test_get_job_id_by_name_success(self, mock_requests):
<add> mock_requests.codes.ok = 200
<add> mock_requests.get.return_value.json.return_value = LIST_JOBS_RESPONSE
<add>
<add> job_id = self.hook.find_job_id_by_name(JOB_NAME)
<add>
<add> mock_requests.get.assert_called_once_with(
<add> list_jobs_endpoint(HOST),
<add> json=None,
<add> params={'limit': 25, 'offset': 0, 'expand_tasks': False},
<add> auth=(LOGIN, PASSWORD),
<add> headers=USER_AGENT_HEADER,
<add> timeout=self.hook.timeout_seconds,
<add> )
<add>
<add> assert job_id == JOB_ID
<add>
<add> @mock.patch('airflow.providers.databricks.hooks.databricks.requests')
<add> def test_get_job_id_by_name_not_found(self, mock_requests):
<add> mock_requests.codes.ok = 200
<add> mock_requests.get.return_value.json.return_value = LIST_JOBS_RESPONSE
<add>
<add> job_id = self.hook.find_job_id_by_name("Non existing job")
<add>
<add> mock_requests.get.assert_called_once_with(
<add> list_jobs_endpoint(HOST),
<add> json=None,
<add> params={'limit': 25, 'offset': 0, 'expand_tasks': False},
<add> auth=(LOGIN, PASSWORD),
<add> headers=USER_AGENT_HEADER,
<add> timeout=self.hook.timeout_seconds,
<add> )
<add>
<add> assert job_id is None
<add>
<add> @mock.patch('airflow.providers.databricks.hooks.databricks.requests')
<add> def test_get_job_id_by_name_raise_exception_with_duplicates(self, mock_requests):
<add> mock_requests.codes.ok = 200
<add> mock_requests.get.return_value.json.return_value = {
<add> **LIST_JOBS_RESPONSE,
<add> 'jobs': LIST_JOBS_RESPONSE['jobs'] * 2,
<add> }
<add>
<add> exception_message = f'There are more than one job with name {JOB_NAME}.'
<add> with pytest.raises(AirflowException, match=exception_message):
<add> self.hook.find_job_id_by_name(JOB_NAME)
<add>
<add> mock_requests.get.assert_called_once_with(
<add> list_jobs_endpoint(HOST),
<add> json=None,
<add> params={'limit': 25, 'offset': 0, 'expand_tasks': False},
<add> auth=(LOGIN, PASSWORD),
<add> headers=USER_AGENT_HEADER,
<add> timeout=self.hook.timeout_seconds,
<add> )
<add>
<ide>
<ide> class TestDatabricksHookToken(unittest.TestCase):
<ide> """
<ide><path>tests/providers/databricks/operators/test_databricks.py
<ide> RUN_NAME = 'run-name'
<ide> RUN_ID = 1
<ide> JOB_ID = "42"
<add>JOB_NAME = "job-name"
<ide> NOTEBOOK_PARAMS = {"dry-run": "true", "oldest-time-to-consider": "1457570074236"}
<ide> JAR_PARAMS = ["param1", "param2"]
<ide> RENDERED_TEMPLATED_JAR_PARAMS = [f'/test-{DATE}']
<ide> def test_no_wait_for_termination(self, db_mock_class):
<ide> db_mock.run_now.assert_called_once_with(expected)
<ide> db_mock.get_run_page_url.assert_called_once_with(RUN_ID)
<ide> db_mock.get_run_state.assert_not_called()
<add>
<add> def test_init_exeption_with_job_name_and_job_id(self):
<add> exception_message = "Argument 'job_name' is not allowed with argument 'job_id'"
<add>
<add> with pytest.raises(AirflowException, match=exception_message):
<add> DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, job_name=JOB_NAME)
<add>
<add> with pytest.raises(AirflowException, match=exception_message):
<add> run = {'job_id': JOB_ID, 'job_name': JOB_NAME}
<add> DatabricksRunNowOperator(task_id=TASK_ID, json=run)
<add>
<add> with pytest.raises(AirflowException, match=exception_message):
<add> run = {'job_id': JOB_ID}
<add> DatabricksRunNowOperator(task_id=TASK_ID, json=run, job_name=JOB_NAME)
<add>
<add> @mock.patch('airflow.providers.databricks.operators.databricks.DatabricksHook')
<add> def test_exec_with_job_name(self, db_mock_class):
<add> run = {'notebook_params': NOTEBOOK_PARAMS, 'notebook_task': NOTEBOOK_TASK, 'jar_params': JAR_PARAMS}
<add> op = DatabricksRunNowOperator(task_id=TASK_ID, job_name=JOB_NAME, json=run)
<add> db_mock = db_mock_class.return_value
<add> db_mock.find_job_id_by_name.return_value = JOB_ID
<add> db_mock.run_now.return_value = 1
<add> db_mock.get_run_state.return_value = RunState('TERMINATED', 'SUCCESS', '')
<add>
<add> op.execute(None)
<add>
<add> expected = databricks_operator._deep_string_coerce(
<add> {
<add> 'notebook_params': NOTEBOOK_PARAMS,
<add> 'notebook_task': NOTEBOOK_TASK,
<add> 'jar_params': JAR_PARAMS,
<add> 'job_id': JOB_ID,
<add> }
<add> )
<add>
<add> db_mock_class.assert_called_once_with(
<add> DEFAULT_CONN_ID, retry_limit=op.databricks_retry_limit, retry_delay=op.databricks_retry_delay
<add> )
<add> db_mock.find_job_id_by_name.assert_called_once_with(JOB_NAME)
<add> db_mock.run_now.assert_called_once_with(expected)
<add> db_mock.get_run_page_url.assert_called_once_with(RUN_ID)
<add> db_mock.get_run_state.assert_called_once_with(RUN_ID)
<add> assert RUN_ID == op.run_id
<add>
<add> @mock.patch('airflow.providers.databricks.operators.databricks.DatabricksHook')
<add> def test_exec_failure_if_job_id_not_found(self, db_mock_class):
<add> run = {'notebook_params': NOTEBOOK_PARAMS, 'notebook_task': NOTEBOOK_TASK, 'jar_params': JAR_PARAMS}
<add> op = DatabricksRunNowOperator(task_id=TASK_ID, job_name=JOB_NAME, json=run)
<add> db_mock = db_mock_class.return_value
<add> db_mock.find_job_id_by_name.return_value = None
<add>
<add> exception_message = f"Job ID for job name {JOB_NAME} can not be found"
<add> with pytest.raises(AirflowException, match=exception_message):
<add> op.execute(None)
<add>
<add> db_mock.find_job_id_by_name.assert_called_once_with(JOB_NAME) | 4 |
Javascript | Javascript | remove code-climate complains | 1388cc09b4130ac4266de2e58ef3918b524b4d4b | <ide><path>Brocfile.js
<ide>
<ide> var fs = require('fs');
<ide> var util = require('util');
<del>var path = require('path');
<ide> var pickFiles = require('broccoli-static-compiler');
<ide> var transpileES6 = require('broccoli-es6-module-transpiler');
<ide> var mergeTrees = require('broccoli-merge-trees');
<ide> var inlineTemplatePrecompiler = require('./lib/broccoli-ember-inline-template-pr
<ide> The `...` and if block would be stripped out of final output unless
<ide> `features.json` has `ember-metal-is-present` set to true.
<ide> */
<del>function defeatureifyConfig(options) {
<add>function defeatureifyConfig(opts) {
<ide> var stripDebug = false;
<del> var options = options || {};
<add> var options = opts || {};
<ide> var configJson = JSON.parse(fs.readFileSync("features.json").toString());
<ide> var features = options.features || configJson.features;
<ide>
<ide> function concatES6(sourceTrees, options) {
<ide> /*
<ide> In order to ensure that tree is compliant with older Javascript versions we
<ide> recast these trees here. For example, in ie6 the following would be an
<del>error:
<add> error:
<ide>
<ide> ```
<ide> {default: "something"}.default
<ide> function es6Package(packageName) {
<ide>
<ide> compiledTrees = mergeTrees(compiledTrees);
<ide>
<del> /*
<del> Memoizes trees. Guard above ensures that if this is set will automatically return.
<del> */
<add> // Memoizes trees. Guard above ensures that if this is set will automatically return.
<ide> pkg['trees'] = {lib: libTree, compiledTree: compiledTrees, vendorTrees: vendorTrees};
<ide>
<ide> // tests go boom if you try to pick them and they don't exists
<ide> function htmlbarsPackage(packageName) {
<ide> /*
<ide> Relies on bower to install other Ember micro libs. Assumes that /lib is
<ide> available and contains all the necessary ES6 modules necessary for the library
<del>to be required. And compiles them.
<add> to be required. And compiles them.
<ide> */
<ide> function vendoredEs6Package(packageName) {
<ide> var tree = pickFiles('bower_components/' + packageName + '/lib', { | 1 |
Javascript | Javascript | fix problems with graceful-fs bug | f04882f8e4b5755cc63b0b9a87672627e00ece51 | <ide><path>packager/react-packager/src/DependencyResolver/fastfs.js
<ide> const {EventEmitter} = require('events');
<ide> const fs = require('graceful-fs');
<ide> const path = require('path');
<ide>
<del>const open = Promise.denodeify(fs.open);
<add>// workaround for https://github.com/isaacs/node-graceful-fs/issues/56
<add>// fs.close is patched, whereas graceful-fs.close is not.
<add>const fsClose = require('fs').close;
<add>
<ide> const readFile = Promise.denodeify(fs.readFile);
<ide> const stat = Promise.denodeify(fs.stat);
<ide>
<ide> class File {
<ide> }
<ide>
<ide> readWhile(predicate) {
<del> const CHUNK_SIZE = 512;
<del> let result = '';
<del>
<del> return open(this.path, 'r').then(fd => {
<del> /* global Buffer: true */
<del> const buffer = new Buffer(CHUNK_SIZE);
<del> const p = new Promise((resolve, reject) => {
<del> let counter = 0;
<del> const callback = (error, bytesRead) => {
<del> if (error) {
<del> reject();
<del> return;
<del> }
<del>
<del> const chunk = buffer.toString('utf8', 0, bytesRead);
<del> result += chunk;
<del> if (bytesRead > 0 && predicate(chunk, counter++, result)) {
<del> readChunk(fd, buffer, callback);
<del> } else {
<del> if (bytesRead === 0 && !this._read) { // reached EOF
<del> this._read = Promise.resolve(result);
<del> }
<del> resolve(result);
<del> }
<del> };
<del> readChunk(fd, buffer, callback);
<del> });
<del>
<del> p.catch(() => fs.close(fd));
<del> return p;
<add> return readWhile(this.path, predicate).then(({result, completed}) => {
<add> if (completed && !this._read) {
<add> this._read = Promise.resolve(result);
<add> }
<add> return result;
<ide> });
<del>
<del> function readChunk(fd, buffer, callback) {
<del> fs.read(fd, buffer, 0, CHUNK_SIZE, null, callback);
<del> }
<ide> }
<ide>
<ide> stat() {
<ide> class File {
<ide> }
<ide> }
<ide>
<add>function readWhile(filePath, predicate) {
<add> return new Promise((resolve, reject) => {
<add> fs.open(filePath, 'r', (openError, fd) => {
<add> if (openError) {
<add> reject(openError);
<add> return;
<add> }
<add>
<add> read(
<add> fd,
<add> /*global Buffer: true*/
<add> new Buffer(512),
<add> makeReadCallback(fd, predicate, (readError, result, completed) => {
<add> if (readError) {
<add> reject(readError);
<add> } else {
<add> resolve({result, completed});
<add> }
<add> })
<add> );
<add> });
<add> });
<add>}
<add>
<add>function read(fd, buffer, callback) {
<add> fs.read(fd, buffer, 0, buffer.length, -1, callback);
<add>}
<add>
<add>function close(fd, error, result, complete, callback) {
<add> fsClose(fd, closeError => callback(error || closeError, result, complete));
<add>}
<add>
<add>function makeReadCallback(fd, predicate, callback) {
<add> let result = '';
<add> let index = 0;
<add> return function readCallback(error, bytesRead, buffer) {
<add> if (error) {
<add> close(fd, error, undefined, false, callback);
<add> return;
<add> }
<add>
<add> const completed = bytesRead === 0;
<add> const chunk = completed ? '' : buffer.toString('utf8', 0, bytesRead);
<add> result += chunk;
<add> if (completed || !predicate(chunk, index++, result)) {
<add> close(fd, null, result, completed, callback);
<add> } else {
<add> read(fd, buffer, readCallback);
<add> }
<add> };
<add>}
<add>
<ide> function isDescendant(root, child) {
<ide> return path.relative(root, child).indexOf('..') !== 0;
<ide> } | 1 |
Javascript | Javascript | fix bailout for writehead | b9960eefc2093ea639353171189652b7cf116326 | <ide><path>lib/_http_server.js
<ide> ServerResponse.prototype._implicitHeader = function() {
<ide> this.writeHead(this.statusCode);
<ide> };
<ide>
<del>ServerResponse.prototype.writeHead = function(statusCode) {
<del> var headers, headerIndex;
<add>ServerResponse.prototype.writeHead = function(statusCode, reason, obj) {
<add> var headers;
<ide>
<del> if (util.isString(arguments[1])) {
<del> this.statusMessage = arguments[1];
<del> headerIndex = 2;
<add> if (util.isString(reason)) {
<add> // writeHead(statusCode, reasonPhrase[, headers])
<add> this.statusMessage = reason;
<ide> } else {
<add> // writeHead(statusCode[, headers])
<ide> this.statusMessage =
<ide> this.statusMessage || STATUS_CODES[statusCode] || 'unknown';
<del> headerIndex = 1;
<add> obj = reason;
<ide> }
<ide> this.statusCode = statusCode;
<ide>
<del> var obj = arguments[headerIndex];
<del>
<ide> if (this._headers) {
<ide> // Slow-case: when progressive API and header fields are passed.
<ide> if (obj) { | 1 |
Ruby | Ruby | prevent string allocations | 38251d2e33ccdfeee7ddbeeba5737c2a36911606 | <ide><path>actionpack/lib/action_dispatch/http/request.rb
<ide> class Request < Rack::Request
<ide> ENV_METHODS.each do |env|
<ide> class_eval <<-METHOD, __FILE__, __LINE__ + 1
<ide> def #{env.sub(/^HTTP_/n, '').downcase} # def accept_charset
<del> @env["#{env}"] # @env["HTTP_ACCEPT_CHARSET"]
<add> @env["#{env}".freeze] # @env["HTTP_ACCEPT_CHARSET"]
<ide> end # end
<ide> METHOD
<ide> end | 1 |
Go | Go | move ulimit options to runconfig opts | 5adbea7075b2fda6ea9947d131aaa2a5db0c1295 | <ide><path>api/client/build.go
<ide> import (
<ide> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/pkg/progress"
<ide> "github.com/docker/docker/pkg/streamformatter"
<del> "github.com/docker/docker/pkg/ulimit"
<ide> "github.com/docker/docker/pkg/urlutil"
<ide> "github.com/docker/docker/reference"
<add> runconfigopts "github.com/docker/docker/runconfig/opts"
<ide> "github.com/docker/docker/utils"
<ide> "github.com/docker/go-units"
<ide> )
<ide> func (cli *DockerCli) CmdBuild(args ...string) error {
<ide> cmd.Var(&flBuildArg, []string{"-build-arg"}, "Set build-time variables")
<ide> isolation := cmd.String([]string{"-isolation"}, "", "Container isolation level")
<ide>
<del> ulimits := make(map[string]*ulimit.Ulimit)
<del> flUlimits := opts.NewUlimitOpt(&ulimits)
<add> ulimits := make(map[string]*units.Ulimit)
<add> flUlimits := runconfigopts.NewUlimitOpt(&ulimits)
<ide> cmd.Var(flUlimits, []string{"-ulimit"}, "Ulimit options")
<ide>
<ide> cmd.Require(flag.Exact, 1)
<ide><path>daemon/config_unix.go
<ide> import (
<ide>
<ide> "github.com/docker/docker/opts"
<ide> flag "github.com/docker/docker/pkg/mflag"
<del> "github.com/docker/docker/pkg/ulimit"
<add> runconfigopts "github.com/docker/docker/runconfig/opts"
<add> "github.com/docker/go-units"
<ide> )
<ide>
<ide> var (
<ide> type Config struct {
<ide> EnableSelinuxSupport bool
<ide> RemappedRoot string
<ide> SocketGroup string
<del> Ulimits map[string]*ulimit.Ulimit
<add> Ulimits map[string]*units.Ulimit
<ide> }
<ide>
<ide> // bridgeConfig stores all the bridge driver specific
<ide> func (config *Config) InstallFlags(cmd *flag.FlagSet, usageFn func(string) strin
<ide> // Then platform-specific install flags
<ide> cmd.BoolVar(&config.EnableSelinuxSupport, []string{"-selinux-enabled"}, false, usageFn("Enable selinux support"))
<ide> cmd.StringVar(&config.SocketGroup, []string{"G", "-group"}, "docker", usageFn("Group for the unix socket"))
<del> config.Ulimits = make(map[string]*ulimit.Ulimit)
<del> cmd.Var(opts.NewUlimitOpt(&config.Ulimits), []string{"-default-ulimit"}, usageFn("Set default ulimits for containers"))
<add> config.Ulimits = make(map[string]*units.Ulimit)
<add> cmd.Var(runconfigopts.NewUlimitOpt(&config.Ulimits), []string{"-default-ulimit"}, usageFn("Set default ulimits for containers"))
<ide> cmd.BoolVar(&config.Bridge.EnableIPTables, []string{"#iptables", "-iptables"}, true, usageFn("Enable addition of iptables rules"))
<ide> cmd.BoolVar(&config.Bridge.EnableIPForward, []string{"#ip-forward", "-ip-forward"}, true, usageFn("Enable net.ipv4.ip_forward"))
<ide> cmd.BoolVar(&config.Bridge.EnableIPMasq, []string{"-ip-masq"}, true, usageFn("Enable IP masquerading"))
<add><path>runconfig/opts/ulimit.go
<del><path>opts/ulimit.go
<ide> package opts
<ide> import (
<ide> "fmt"
<ide>
<del> "github.com/docker/docker/pkg/ulimit"
<add> "github.com/docker/go-units"
<ide> )
<ide>
<ide> // UlimitOpt defines a map of Ulimits
<ide> type UlimitOpt struct {
<del> values *map[string]*ulimit.Ulimit
<add> values *map[string]*units.Ulimit
<ide> }
<ide>
<ide> // NewUlimitOpt creates a new UlimitOpt
<del>func NewUlimitOpt(ref *map[string]*ulimit.Ulimit) *UlimitOpt {
<add>func NewUlimitOpt(ref *map[string]*units.Ulimit) *UlimitOpt {
<ide> if ref == nil {
<del> ref = &map[string]*ulimit.Ulimit{}
<add> ref = &map[string]*units.Ulimit{}
<ide> }
<ide> return &UlimitOpt{ref}
<ide> }
<ide>
<ide> // Set validates a Ulimit and sets its name as a key in UlimitOpt
<ide> func (o *UlimitOpt) Set(val string) error {
<del> l, err := ulimit.Parse(val)
<add> l, err := units.ParseUlimit(val)
<ide> if err != nil {
<ide> return err
<ide> }
<ide> func (o *UlimitOpt) String() string {
<ide> }
<ide>
<ide> // GetList returns a slice of pointers to Ulimits.
<del>func (o *UlimitOpt) GetList() []*ulimit.Ulimit {
<del> var ulimits []*ulimit.Ulimit
<add>func (o *UlimitOpt) GetList() []*units.Ulimit {
<add> var ulimits []*units.Ulimit
<ide> for _, v := range *o.values {
<ide> ulimits = append(ulimits, v)
<ide> }
<add><path>runconfig/opts/ulimit_test.go
<del><path>opts/ulimit_test.go
<ide> package opts
<ide> import (
<ide> "testing"
<ide>
<del> "github.com/docker/docker/pkg/ulimit"
<add> "github.com/docker/go-units"
<ide> )
<ide>
<ide> func TestUlimitOpt(t *testing.T) {
<del> ulimitMap := map[string]*ulimit.Ulimit{
<add> ulimitMap := map[string]*units.Ulimit{
<ide> "nofile": {"nofile", 1024, 512},
<ide> }
<ide>
<ide><path>runconfig/parse.go
<ide> import (
<ide> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/pkg/mount"
<ide> "github.com/docker/docker/pkg/signal"
<add> runconfigopts "github.com/docker/docker/runconfig/opts"
<ide> "github.com/docker/docker/volume"
<ide> "github.com/docker/go-connections/nat"
<ide> "github.com/docker/go-units"
<ide> func Parse(cmd *flag.FlagSet, args []string) (*container.Config, *container.Host
<ide> flLabels = opts.NewListOpts(opts.ValidateEnv)
<ide> flDevices = opts.NewListOpts(ValidateDevice)
<ide>
<del> flUlimits = opts.NewUlimitOpt(nil)
<add> flUlimits = runconfigopts.NewUlimitOpt(nil)
<ide>
<ide> flPublish = opts.NewListOpts(nil)
<ide> flExpose = opts.NewListOpts(nil) | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.