content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Javascript | Javascript | fix the calculation of default vertical metrics | 99165fb70272f52a2213986bd00dc0f6963a7150 | <ide><path>src/canvas.js
<ide> var CanvasGraphics = (function CanvasGraphicsClosure() {
<ide> var character = glyph.fontChar;
<ide> var vmetric = glyph.vmetric || defaultVMetrics;
<ide> if (vertical) {
<del> var vx = vmetric[1] * fontSize * current.fontMatrix[0];
<add> var vx = glyph.vmetric ? vmetric[1] : glyph.width * 0.5;
<add> vx = -vx * fontSize * current.fontMatrix[0];
<ide> var vy = vmetric[2] * fontSize * current.fontMatrix[0];
<ide> }
<ide> var width = vmetric ? -vmetric[0] : glyph.width;
<ide> var CanvasGraphics = (function CanvasGraphicsClosure() {
<ide> geom.canvasWidth = canvasWidth;
<ide> if (vertical) {
<ide> var vmetric = font.defaultVMetrics;
<del> geom.x -= vmetric[1] * fontSize * current.fontMatrix[0] /
<add> geom.x += vmetric[1] * fontSize * current.fontMatrix[0] /
<ide> fontSizeScale * geom.hScale;
<ide> geom.y += vmetric[2] * fontSize * current.fontMatrix[0] /
<ide> fontSizeScale * geom.vScale;
<ide> var CanvasGraphics = (function CanvasGraphicsClosure() {
<ide> if (vertical) {
<ide> var fontSizeScale = current.fontSizeScale;
<ide> var vmetric = font.defaultVMetrics;
<del> geom.x -= vmetric[1] * fontSize * current.fontMatrix[0] /
<add> geom.x += vmetric[1] * fontSize * current.fontMatrix[0] /
<ide> fontSizeScale * geom.hScale;
<ide> geom.y += vmetric[2] * fontSize * current.fontMatrix[0] /
<ide> fontSizeScale * geom.vScale;
<ide><path>src/evaluator.js
<ide> var PartialEvaluator = (function PartialEvaluatorClosure() {
<ide>
<ide> if (properties.vertical) {
<ide> var vmetrics = dict.get('DW2') || [880, -1000];
<del> defaultVMetrics = [vmetrics[1], vmetrics[1] / 2, vmetrics[0]];
<add> defaultVMetrics = [vmetrics[1], defaultWidth * 0.5, vmetrics[0]];
<ide> vmetrics = dict.get('W2');
<ide> if (vmetrics) {
<ide> for (var i = 0, ii = vmetrics.length; i < ii; i++) { | 2 |
PHP | PHP | keep chain going on explicit delete. add tests | b880ad19282db768718cfd1629ebbc41054daadc | <ide><path>src/Illuminate/Queue/CallQueuedHandler.php
<ide> public function call(Job $job, array $data)
<ide> $command, $handler = $this->resolveHandler($job, $command)
<ide> );
<ide>
<del> if (! $job->isDeletedOrReleased()) {
<del> $this->ensureNextJobIsChainIsDispatched($command);
<add> if (! $job->hasFailed() && ! $job->isReleased()) {
<add> $this->ensureNextJobInChainIsDispatched($command);
<add> }
<ide>
<add> if (! $job->isDeletedOrReleased()) {
<ide> $job->delete();
<ide> }
<ide> }
<ide> protected function setJobInstanceIfNecessary(Job $job, $instance)
<ide> * @param mixed $command
<ide> * @return void
<ide> */
<del> protected function ensureNextJobIsChainIsDispatched($command)
<add> protected function ensureNextJobInChainIsDispatched($command)
<ide> {
<ide> if (method_exists($command, 'dispatchNextJobInChain')) {
<ide> $command->dispatchNextJobInChain();
<ide><path>tests/Integration/Queue/JobChainingTest.php
<ide> public function test_jobs_can_be_chained_on_success()
<ide> $this->assertTrue(JobChainingTestSecondJob::$ran);
<ide> }
<ide>
<add> public function test_jobs_chained_on_explicit_delete()
<add> {
<add> JobChainingTestDeletingJob::dispatch()->chain([
<add> new JobChainingTestSecondJob,
<add> ]);
<add>
<add> $this->assertTrue(JobChainingTestDeletingJob::$ran);
<add> $this->assertTrue(JobChainingTestSecondJob::$ran);
<add> }
<add>
<ide> public function test_jobs_can_be_chained_on_success_with_several_jobs()
<ide> {
<ide> JobChainingTestFirstJob::dispatch()->chain([
<ide> public function test_jobs_can_be_chained_via_queue()
<ide> $this->assertTrue(JobChainingTestSecondJob::$ran);
<ide> }
<ide>
<del> public function test_second_job_is_not_fired_if_first_was_already_deleted()
<add> public function test_second_job_is_not_fired_if_first_failed()
<ide> {
<ide> Queue::connection('sync')->push((new JobChainingTestFailingJob)->chain([
<ide> new JobChainingTestSecondJob,
<ide> public function test_second_job_is_not_fired_if_first_was_already_deleted()
<ide> $this->assertFalse(JobChainingTestSecondJob::$ran);
<ide> }
<ide>
<add> public function test_second_job_is_not_fired_if_first_released()
<add> {
<add> Queue::connection('sync')->push((new JobChainingTestReleasingJob)->chain([
<add> new JobChainingTestSecondJob,
<add> ]));
<add>
<add> $this->assertFalse(JobChainingTestSecondJob::$ran);
<add> }
<add>
<ide> public function test_third_job_is_not_fired_if_second_fails()
<ide> {
<ide> Queue::connection('sync')->push((new JobChainingTestFirstJob)->chain([
<ide> public function handle()
<ide> }
<ide> }
<ide>
<add>class JobChainingTestDeletingJob implements ShouldQueue
<add>{
<add> use Dispatchable, InteractsWithQueue, Queueable;
<add>
<add> public static $ran = false;
<add>
<add> public function handle()
<add> {
<add> static::$ran = true;
<add> $this->delete();
<add> }
<add>}
<add>
<add>class JobChainingTestReleasingJob implements ShouldQueue
<add>{
<add> use Dispatchable, InteractsWithQueue, Queueable;
<add>
<add> public function handle()
<add> {
<add> $this->release(30);
<add> }
<add>}
<add>
<ide> class JobChainingTestFailingJob implements ShouldQueue
<ide> {
<ide> use Dispatchable, InteractsWithQueue, Queueable; | 2 |
Mixed | Python | update no_trainer examples to use new logger | 35d48db881edec8a5ea60db9cf54cda7dd42506c | <ide><path>examples/pytorch/README.md
<ide> python xla_spawn.py --num_cores 8 \
<ide>
<ide> Most PyTorch example scripts have a version using the [🤗 Accelerate](https://github.com/huggingface/accelerate) library
<ide> that exposes the training loop so it's easy for you to customize or tweak them to your needs. They all require you to
<del>install `accelerate` with
<add>install `accelerate` with the latest development version
<ide>
<ide> ```bash
<del>pip install accelerate
<add>pip install git+https://github.com/huggingface/accelerate
<ide> ```
<ide>
<ide> Then you can easily launch any of the scripts by running
<ide><path>examples/pytorch/image-classification/run_image_classification_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide>
<ide> require_version("datasets>=2.0.0", "To fix: pip install -r examples/pytorch/image-classification/requirements.txt")
<ide>
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/language-modeling/run_clm_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator, DistributedType
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide>
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/language-modeling/requirements.txt")
<ide>
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/language-modeling/run_mlm_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator, DistributedType
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/language-modeling/requirements.txt")
<ide> MODEL_CONFIG_CLASSES = list(MODEL_MAPPING.keys())
<ide> MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES)
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/multiple-choice/run_swag_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> from transformers.utils import PaddingStrategy, get_full_repo_name
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide> # You should update this to your particular problem to have better documentation of `model_type`
<ide> MODEL_CONFIG_CLASSES = list(MODEL_MAPPING.keys())
<ide> MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES)
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide>
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/question-answering/requirements.txt")
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide>
<ide>
<ide> def save_prefixed_metrics(results, output_dir, file_name: str = "all_results.json", metric_key_prefix: str = "eval"):
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/question-answering/run_qa_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide>
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/question-answering/requirements.txt")
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide> # You should update this to your particular problem to have better documentation of `model_type`
<ide> MODEL_CONFIG_CLASSES = list(MODEL_MAPPING.keys())
<ide> MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES)
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/semantic-segmentation/run_semantic_segmentation_no_trainer.py
<ide>
<ide> import argparse
<ide> import json
<del>import logging
<ide> import math
<ide> import os
<ide> import random
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository, hf_hub_download
<ide> from transformers import (
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide>
<ide> require_version("datasets>=2.0.0", "To fix: pip install -r examples/pytorch/semantic-segmentation/requirements.txt")
<ide>
<ide> def main():
<ide> # Initialize the accelerator. We will let the accelerator handle device placement for us in this example.
<ide> # If we're using tracking, we also need to initialize it here and it will pick up all supported trackers in the environment
<ide> accelerator = Accelerator(log_with="all", logging_dir=args.output_dir) if args.with_tracking else Accelerator()
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/speech-pretraining/run_wav2vec2_pretraining_no_trainer.py
<ide> """ Pre-Training a 🤗 Wav2Vec2 model on unlabeled audio data """
<ide>
<ide> import argparse
<del>import logging
<ide> import math
<ide> import os
<ide> from dataclasses import dataclass
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> AdamW,
<ide> from transformers.utils import get_full_repo_name
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide>
<ide>
<ide> def parse_args():
<ide> def main():
<ide>
<ide> # Initialize the accelerator. We will let the accelerator handle device placement for us in this example.
<ide> accelerator = Accelerator()
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/summarization/run_summarization_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from filelock import FileLock
<ide> from huggingface_hub import Repository
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/summarization/requirements.txt")
<ide>
<ide> # You should update this to your particular problem to have better documentation of `model_type`
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/text-classification/run_glue_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide>
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/text-classification/requirements.txt")
<ide>
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/token-classification/run_ner_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/token-classification/requirements.txt")
<ide>
<ide> # You should update this to your particular problem to have better documentation of `model_type`
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info()
<ide><path>examples/pytorch/translation/run_translation_no_trainer.py
<ide>
<ide> import transformers
<ide> from accelerate import Accelerator
<add>from accelerate.logging import get_logger
<ide> from accelerate.utils import set_seed
<ide> from huggingface_hub import Repository
<ide> from transformers import (
<ide> from transformers.utils.versions import require_version
<ide>
<ide>
<del>logger = logging.getLogger(__name__)
<add>logger = get_logger(__name__)
<ide> require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/translation/requirements.txt")
<ide>
<ide> # You should update this to your particular problem to have better documentation of `model_type`
<ide> def main():
<ide> datefmt="%m/%d/%Y %H:%M:%S",
<ide> level=logging.INFO,
<ide> )
<del> logger.info(accelerator.state)
<del>
<del> # Setup logging, we only want one process per machine to log things on the screen.
<del> # accelerator.is_local_main_process is only True for one process per machine.
<del> logger.setLevel(logging.INFO if accelerator.is_local_main_process else logging.ERROR)
<add> logger.info(accelerator.state, main_process_only=False)
<ide> if accelerator.is_local_main_process:
<ide> datasets.utils.logging.set_verbosity_warning()
<ide> transformers.utils.logging.set_verbosity_info() | 13 |
Python | Python | add support for verbose flag to language | 96da86b3e5d3a515f0f8db57ef1704750233ff38 | <ide><path>spacy/language.py
<ide> def begin_training(self, get_gold_tuples=None, **cfg):
<ide> self._optimizer.device = device
<ide> return self._optimizer
<ide>
<del> def evaluate(self, docs_golds):
<add> def evaluate(self, docs_golds, verbose=False):
<ide> scorer = Scorer()
<ide> docs, golds = zip(*docs_golds)
<ide> docs = list(docs)
<ide> def evaluate(self, docs_golds):
<ide> docs = list(pipe.pipe(docs))
<ide> assert len(docs) == len(golds)
<ide> for doc, gold in zip(docs, golds):
<del> scorer.score(doc, gold)
<add> if verbose:
<add> print(doc)
<add> scorer.score(doc, gold, verbose=verbose)
<ide> return scorer
<ide>
<ide> @contextmanager | 1 |
Ruby | Ruby | remove relation#& alias for relation#merge | fbd917f50a6046d02dd6a64ccfb1aed0cbce68d8 | <ide><path>activerecord/lib/active_record/associations/association_proxy.rb
<ide> def send(method, *args)
<ide> end
<ide>
<ide> def scoped
<del> target_scope & @association_scope
<add> target_scope.merge(@association_scope)
<ide> end
<ide>
<ide> protected
<ide><path>activerecord/lib/active_record/associations/through_association.rb
<ide> module ThroughAssociation
<ide> protected
<ide>
<ide> def target_scope
<del> super & @reflection.through_reflection.klass.scoped
<add> super.merge(@reflection.through_reflection.klass.scoped)
<ide> end
<ide>
<ide> def association_scope
<ide><path>activerecord/lib/active_record/relation/spawn_methods.rb
<ide> def merge(r)
<ide> merged_relation
<ide> end
<ide>
<del> alias :& :merge
<del>
<ide> # Removes from the query the condition(s) specified in +skips+.
<ide> #
<ide> # Example:
<ide><path>activerecord/test/cases/method_scoping_test.rb
<ide> def test_scoped_create
<ide> end
<ide>
<ide> def test_scoped_create_with_join_and_merge
<del> (Comment.where(:body => "but Who's Buying?").joins(:post) & Post.where(:body => 'Peace Sells...')).with_scope do
<add> Comment.where(:body => "but Who's Buying?").joins(:post).merge(Post.where(:body => 'Peace Sells...')).with_scope do
<ide> assert_equal({:body => "but Who's Buying?"}, Comment.scoped.scope_for_create)
<ide> end
<ide> end
<ide><path>activerecord/test/cases/relation_scoping_test.rb
<ide> def test_scope_composed_by_limit_and_then_offset_is_equal_to_scope_composed_by_o
<ide> end
<ide>
<ide> def test_create_with_merge
<del> aaron = (PoorDeveloperCalledJamis.create_with(:name => 'foo', :salary => 20) &
<del> PoorDeveloperCalledJamis.create_with(:name => 'Aaron')).new
<add> aaron = PoorDeveloperCalledJamis.create_with(:name => 'foo', :salary => 20).merge(
<add> PoorDeveloperCalledJamis.create_with(:name => 'Aaron')).new
<ide> assert_equal 20, aaron.salary
<ide> assert_equal 'Aaron', aaron.name
<ide>
<ide><path>activerecord/test/cases/relations_test.rb
<ide> def test_select_argument_error
<ide> end
<ide>
<ide> def test_relation_merging
<del> devs = Developer.where("salary >= 80000") & Developer.limit(2) & Developer.order('id ASC').where("id < 3")
<add> devs = Developer.where("salary >= 80000").merge(Developer.limit(2)).merge(Developer.order('id ASC').where("id < 3"))
<ide> assert_equal [developers(:david), developers(:jamis)], devs.to_a
<ide>
<del> dev_with_count = Developer.limit(1) & Developer.order('id DESC') & Developer.select('developers.*')
<add> dev_with_count = Developer.limit(1).merge(Developer.order('id DESC')).merge(Developer.select('developers.*'))
<ide> assert_equal [developers(:poor_jamis)], dev_with_count.to_a
<ide> end
<ide>
<ide> def test_relation_merging_with_eager_load
<ide> relations = []
<del> relations << (Post.order('comments.id DESC') & Post.eager_load(:last_comment) & Post.scoped)
<del> relations << (Post.eager_load(:last_comment) & Post.order('comments.id DESC') & Post.scoped)
<add> relations << Post.order('comments.id DESC').merge(Post.eager_load(:last_comment)).merge(Post.scoped)
<add> relations << Post.eager_load(:last_comment).merge(Post.order('comments.id DESC')).merge(Post.scoped)
<ide>
<ide> relations.each do |posts|
<ide> post = posts.find { |p| p.id == 1 }
<ide> def test_relation_merging_with_eager_load
<ide> end
<ide>
<ide> def test_relation_merging_with_locks
<del> devs = Developer.lock.where("salary >= 80000").order("id DESC") & Developer.limit(2)
<add> devs = Developer.lock.where("salary >= 80000").order("id DESC").merge(Developer.limit(2))
<ide> assert_present devs.locked
<ide> end
<ide>
<ide> def test_relation_merging_with_preload
<del> [Post.scoped & Post.preload(:author), Post.preload(:author) & Post.scoped].each do |posts|
<add> [Post.scoped.merge(Post.preload(:author)), Post.preload(:author).merge(Post.scoped)].each do |posts|
<ide> assert_queries(2) { assert posts.first.author }
<ide> end
<ide> end
<ide>
<ide> def test_relation_merging_with_joins
<del> comments = Comment.joins(:post).where(:body => 'Thank you for the welcome') & Post.where(:body => 'Such a lovely day')
<add> comments = Comment.joins(:post).where(:body => 'Thank you for the welcome').merge(Post.where(:body => 'Such a lovely day'))
<ide> assert_equal 1, comments.count
<ide> end
<ide> | 6 |
Ruby | Ruby | convert `patching` test to spec | af65b07ac944082f5c9a84803690189e403b854a | <ide><path>Library/Homebrew/test/patching_spec.rb
<add>require "formula"
<add>
<add>describe "patching" do
<add> TESTBALL_URL = "file://#{TEST_FIXTURE_DIR}/tarballs/testball-0.1.tbz".freeze
<add> TESTBALL_PATCHES_URL = "file://#{TEST_FIXTURE_DIR}/tarballs/testball-0.1-patches.tgz".freeze
<add> PATCH_URL_A = "file://#{TEST_FIXTURE_DIR}/patches/noop-a.diff".freeze
<add> PATCH_URL_B = "file://#{TEST_FIXTURE_DIR}/patches/noop-b.diff".freeze
<add> PATCH_A_CONTENTS = File.read "#{TEST_FIXTURE_DIR}/patches/noop-a.diff"
<add> PATCH_B_CONTENTS = File.read "#{TEST_FIXTURE_DIR}/patches/noop-b.diff"
<add> APPLY_A = "noop-a.diff".freeze
<add> APPLY_B = "noop-b.diff".freeze
<add> APPLY_C = "noop-c.diff".freeze
<add>
<add> def formula(name = "formula_name", path: Formulary.core_path(name), spec: :stable, alias_path: nil, &block)
<add> Class.new(Formula) {
<add> url TESTBALL_URL
<add> sha256 TESTBALL_SHA256
<add> class_eval(&block)
<add> }.new(name, path, spec, alias_path: alias_path)
<add> end
<add>
<add> matcher :be_patched do
<add> match do |formula|
<add> shutup do
<add> formula.brew do
<add> formula.patch
<add> s = File.read("libexec/NOOP")
<add> expect(s).not_to include("NOOP"), "libexec/NOOP was not patched as expected"
<add> expect(s).to include("ABCD"), "libexec/NOOP was not patched as expected"
<add> end
<add> end
<add> end
<add> end
<add>
<add> matcher :be_sequentially_patched do
<add> match do |formula|
<add> shutup do
<add> formula.brew do
<add> formula.patch
<add> s = File.read("libexec/NOOP")
<add> expect(s).not_to include("NOOP"), "libexec/NOOP was not patched as expected"
<add> expect(s).not_to include("ABCD"), "libexec/NOOP was not patched as expected"
<add> expect(s).to include("1234"), "libexec/NOOP was not patched as expected"
<add> end
<add> end
<add> end
<add> end
<add>
<add> matcher :miss_apply do
<add> match do |formula|
<add> expect {
<add> shutup do
<add> formula.brew do
<add> formula.patch
<add> end
<add> end
<add> }.to raise_error(MissingApplyError)
<add> end
<add> end
<add>
<add> specify "single_patch" do
<add> expect(
<add> formula do
<add> def patches
<add> PATCH_URL_A
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "single_patch_dsl" do
<add> expect(
<add> formula do
<add> patch do
<add> url PATCH_URL_A
<add> sha256 PATCH_A_SHA256
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "single_patch_dsl_with_apply" do
<add> expect(
<add> formula do
<add> patch do
<add> url TESTBALL_PATCHES_URL
<add> sha256 TESTBALL_PATCHES_SHA256
<add> apply APPLY_A
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "single_patch_dsl_with_sequential_apply" do
<add> expect(
<add> formula do
<add> patch do
<add> url TESTBALL_PATCHES_URL
<add> sha256 TESTBALL_PATCHES_SHA256
<add> apply APPLY_A, APPLY_C
<add> end
<add> end,
<add> ).to be_sequentially_patched
<add> end
<add>
<add> specify "single_patch_dsl_with_strip" do
<add> expect(
<add> formula do
<add> patch :p1 do
<add> url PATCH_URL_A
<add> sha256 PATCH_A_SHA256
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "single_patch_dsl_with_strip_with_apply" do
<add> expect(
<add> formula do
<add> patch :p1 do
<add> url TESTBALL_PATCHES_URL
<add> sha256 TESTBALL_PATCHES_SHA256
<add> apply APPLY_A
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "single_patch_dsl_with_incorrect_strip" do
<add> expect {
<add> shutup do
<add> f = formula do
<add> patch :p0 do
<add> url PATCH_URL_A
<add> sha256 PATCH_A_SHA256
<add> end
<add> end
<add>
<add> f.brew { |formula, _staging| formula.patch }
<add> end
<add> }.to raise_error(ErrorDuringExecution)
<add> end
<add>
<add> specify "single_patch_dsl_with_incorrect_strip_with_apply" do
<add> expect {
<add> shutup do
<add> f = formula do
<add> patch :p0 do
<add> url TESTBALL_PATCHES_URL
<add> sha256 TESTBALL_PATCHES_SHA256
<add> apply APPLY_A
<add> end
<add> end
<add>
<add> f.brew { |formula, _staging| formula.patch }
<add> end
<add> }.to raise_error(ErrorDuringExecution)
<add> end
<add>
<add> specify "patch_p0_dsl" do
<add> expect(
<add> formula do
<add> patch :p0 do
<add> url PATCH_URL_B
<add> sha256 PATCH_B_SHA256
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "patch_p0_dsl_with_apply" do
<add> expect(
<add> formula do
<add> patch :p0 do
<add> url TESTBALL_PATCHES_URL
<add> sha256 TESTBALL_PATCHES_SHA256
<add> apply APPLY_B
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "patch_p0" do
<add> expect(
<add> formula do
<add> def patches
<add> { p0: PATCH_URL_B }
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "patch_array" do
<add> expect(
<add> formula do
<add> def patches
<add> [PATCH_URL_A]
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "patch_hash" do
<add> expect(
<add> formula do
<add> def patches
<add> { p1: PATCH_URL_A }
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "patch_hash_array" do
<add> expect(
<add> formula do
<add> def patches
<add> { p1: [PATCH_URL_A] }
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "patch_string" do
<add> expect(formula { patch PATCH_A_CONTENTS }).to be_patched
<add> end
<add>
<add> specify "patch_string_with_strip" do
<add> expect(formula { patch :p0, PATCH_B_CONTENTS }).to be_patched
<add> end
<add>
<add> specify "patch_data_constant" do
<add> expect(
<add> formula("test", path: Pathname.new(__FILE__).expand_path) do
<add> def patches
<add> :DATA
<add> end
<add> end,
<add> ).to be_patched
<add> end
<add>
<add> specify "single_patch_missing_apply_fail" do
<add> expect(
<add> formula do
<add> def patches
<add> TESTBALL_PATCHES_URL
<add> end
<add> end,
<add> ).to miss_apply
<add> end
<add>
<add> specify "single_patch_dsl_missing_apply_fail" do
<add> expect(
<add> formula do
<add> patch do
<add> url TESTBALL_PATCHES_URL
<add> sha256 TESTBALL_PATCHES_SHA256
<add> end
<add> end,
<add> ).to miss_apply
<add> end
<add>
<add> specify "single_patch_dsl_with_apply_enoent_fail" do
<add> expect {
<add> shutup do
<add> f = formula do
<add> patch do
<add> url TESTBALL_PATCHES_URL
<add> sha256 TESTBALL_PATCHES_SHA256
<add> apply "patches/#{APPLY_A}"
<add> end
<add> end
<add>
<add> f.brew { |formula, _staging| formula.patch }
<add> end
<add> }.to raise_error(ErrorDuringExecution)
<add> end
<add>end
<add>
<add>__END__
<add>diff --git a/libexec/NOOP b/libexec/NOOP
<add>index bfdda4c..e08d8f4 100755
<add>--- a/libexec/NOOP
<add>+++ b/libexec/NOOP
<add>
<add> #!/bin/bash
<add>-echo NOOP
<add>\ No newline at end of file
<add>+echo ABCD
<add>\ No newline at end of file
<ide><path>Library/Homebrew/test/patching_test.rb
<del>require "testing_env"
<del>require "formula"
<del>
<del>class PatchingTests < Homebrew::TestCase
<del> TESTBALL_URL = "file://#{TEST_FIXTURE_DIR}/tarballs/testball-0.1.tbz".freeze
<del> TESTBALL_PATCHES_URL = "file://#{TEST_FIXTURE_DIR}/tarballs/testball-0.1-patches.tgz".freeze
<del> PATCH_URL_A = "file://#{TEST_FIXTURE_DIR}/patches/noop-a.diff".freeze
<del> PATCH_URL_B = "file://#{TEST_FIXTURE_DIR}/patches/noop-b.diff".freeze
<del> PATCH_A_CONTENTS = File.read "#{TEST_FIXTURE_DIR}/patches/noop-a.diff"
<del> PATCH_B_CONTENTS = File.read "#{TEST_FIXTURE_DIR}/patches/noop-b.diff"
<del> APPLY_A = "noop-a.diff".freeze
<del> APPLY_B = "noop-b.diff".freeze
<del> APPLY_C = "noop-c.diff".freeze
<del>
<del> def formula(*args, &block)
<del> super do
<del> url TESTBALL_URL
<del> sha256 TESTBALL_SHA256
<del> class_eval(&block)
<del> end
<del> end
<del>
<del> def assert_patched(formula)
<del> shutup do
<del> formula.brew do
<del> formula.patch
<del> s = File.read("libexec/NOOP")
<del> refute_includes s, "NOOP", "libexec/NOOP was not patched as expected"
<del> assert_includes s, "ABCD", "libexec/NOOP was not patched as expected"
<del> end
<del> end
<del> end
<del>
<del> def assert_sequentially_patched(formula)
<del> shutup do
<del> formula.brew do
<del> formula.patch
<del> s = File.read("libexec/NOOP")
<del> refute_includes s, "NOOP", "libexec/NOOP was not patched as expected"
<del> refute_includes s, "ABCD", "libexec/NOOP was not patched as expected"
<del> assert_includes s, "1234", "libexec/NOOP was not patched as expected"
<del> end
<del> end
<del> end
<del>
<del> def assert_missing_apply_fail(formula)
<del> assert_raises(MissingApplyError) do
<del> shutup do
<del> formula.brew do
<del> formula.patch
<del> end
<del> end
<del> end
<del> end
<del>
<del> def test_single_patch
<del> assert_patched formula {
<del> def patches
<del> PATCH_URL_A
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl
<del> assert_patched formula {
<del> patch do
<del> url PATCH_URL_A
<del> sha256 PATCH_A_SHA256
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl_with_apply
<del> assert_patched formula {
<del> patch do
<del> url TESTBALL_PATCHES_URL
<del> sha256 TESTBALL_PATCHES_SHA256
<del> apply APPLY_A
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl_with_sequential_apply
<del> assert_sequentially_patched formula {
<del> patch do
<del> url TESTBALL_PATCHES_URL
<del> sha256 TESTBALL_PATCHES_SHA256
<del> apply APPLY_A, APPLY_C
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl_with_strip
<del> assert_patched formula {
<del> patch :p1 do
<del> url PATCH_URL_A
<del> sha256 PATCH_A_SHA256
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl_with_strip_with_apply
<del> assert_patched formula {
<del> patch :p1 do
<del> url TESTBALL_PATCHES_URL
<del> sha256 TESTBALL_PATCHES_SHA256
<del> apply APPLY_A
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl_with_incorrect_strip
<del> assert_raises(ErrorDuringExecution) do
<del> shutup do
<del> formula do
<del> patch :p0 do
<del> url PATCH_URL_A
<del> sha256 PATCH_A_SHA256
<del> end
<del> end.brew { |f, _staging| f.patch }
<del> end
<del> end
<del> end
<del>
<del> def test_single_patch_dsl_with_incorrect_strip_with_apply
<del> assert_raises(ErrorDuringExecution) do
<del> shutup do
<del> formula do
<del> patch :p0 do
<del> url TESTBALL_PATCHES_URL
<del> sha256 TESTBALL_PATCHES_SHA256
<del> apply APPLY_A
<del> end
<del> end.brew { |f, _staging| f.patch }
<del> end
<del> end
<del> end
<del>
<del> def test_patch_p0_dsl
<del> assert_patched formula {
<del> patch :p0 do
<del> url PATCH_URL_B
<del> sha256 PATCH_B_SHA256
<del> end
<del> }
<del> end
<del>
<del> def test_patch_p0_dsl_with_apply
<del> assert_patched formula {
<del> patch :p0 do
<del> url TESTBALL_PATCHES_URL
<del> sha256 TESTBALL_PATCHES_SHA256
<del> apply APPLY_B
<del> end
<del> }
<del> end
<del>
<del> def test_patch_p0
<del> assert_patched formula {
<del> def patches
<del> { p0: PATCH_URL_B }
<del> end
<del> }
<del> end
<del>
<del> def test_patch_array
<del> assert_patched formula {
<del> def patches
<del> [PATCH_URL_A]
<del> end
<del> }
<del> end
<del>
<del> def test_patch_hash
<del> assert_patched formula {
<del> def patches
<del> { p1: PATCH_URL_A }
<del> end
<del> }
<del> end
<del>
<del> def test_patch_hash_array
<del> assert_patched formula {
<del> def patches
<del> { p1: [PATCH_URL_A] }
<del> end
<del> }
<del> end
<del>
<del> def test_patch_string
<del> assert_patched formula { patch PATCH_A_CONTENTS }
<del> end
<del>
<del> def test_patch_string_with_strip
<del> assert_patched formula { patch :p0, PATCH_B_CONTENTS }
<del> end
<del>
<del> def test_patch_data_constant
<del> assert_patched formula("test", Pathname.new(__FILE__).expand_path) {
<del> def patches
<del> :DATA
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_missing_apply_fail
<del> assert_missing_apply_fail formula {
<del> def patches
<del> TESTBALL_PATCHES_URL
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl_missing_apply_fail
<del> assert_missing_apply_fail formula {
<del> patch do
<del> url TESTBALL_PATCHES_URL
<del> sha256 TESTBALL_PATCHES_SHA256
<del> end
<del> }
<del> end
<del>
<del> def test_single_patch_dsl_with_apply_enoent_fail
<del> assert_raises(ErrorDuringExecution) do
<del> shutup do
<del> formula do
<del> patch do
<del> url TESTBALL_PATCHES_URL
<del> sha256 TESTBALL_PATCHES_SHA256
<del> apply "patches/#{APPLY_A}"
<del> end
<del> end.brew { |f, _staging| f.patch }
<del> end
<del> end
<del> end
<del>end
<del>
<del>__END__
<del>diff --git a/libexec/NOOP b/libexec/NOOP
<del>index bfdda4c..e08d8f4 100755
<del>--- a/libexec/NOOP
<del>+++ b/libexec/NOOP
<del>
<del> #!/bin/bash
<del>-echo NOOP
<del>\ No newline at end of file
<del>+echo ABCD
<del>\ No newline at end of file | 2 |
Ruby | Ruby | fix spelling in compute_class warning | a2fca818c7a9d296cbc193c5abe56aecd226bc4d | <ide><path>activerecord/lib/active_record/reflection.rb
<ide> def compute_class(name)
<ide> msg = <<-MSG.squish
<ide> Rails couldn't find a valid model for #{name} association.
<ide> Please provide the :class_name option on the association declaration.
<del> If :class_name is already provided make sure is an ActiveRecord::Base subclass.
<add> If :class_name is already provided, make sure it's an ActiveRecord::Base subclass.
<ide> MSG
<ide>
<ide> begin | 1 |
Javascript | Javascript | allocate iteration structures lazily | 2b460347e7f0f18a19bc559b028cb5d51820a227 | <ide><path>packages/ember-metal/lib/meta.js
<ide> export class Meta {
<ide>
<ide> _forEachIn(key, subkey, fn) {
<ide> let pointer = this;
<del> let seen = new EmptyObject();
<del> let calls = [];
<add> let seen;
<add> let calls;
<ide> while (pointer !== undefined) {
<ide> let map = pointer[key];
<ide> if (map) {
<add> seen = seen || new EmptyObject();
<ide> let innerMap = map[subkey];
<ide> if (innerMap) {
<ide> for (let innerKey in innerMap) {
<ide> if (!seen[innerKey]) {
<ide> seen[innerKey] = true;
<add> calls = calls || [];
<ide> calls.push([innerKey, innerMap[innerKey]]);
<ide> }
<ide> }
<ide> }
<ide> }
<ide> pointer = pointer.parent;
<ide> }
<del> for (let i = 0; i < calls.length; i++) {
<del> let [innerKey, value] = calls[i];
<del> fn(innerKey, value);
<add> if (calls) {
<add> for (let i = 0; i < calls.length; i++) {
<add> let [innerKey, value] = calls[i];
<add> fn(innerKey, value);
<add> }
<ide> }
<ide> }
<ide> | 1 |
PHP | PHP | fix cs error | 423e6836eb6746d20bb25a7812f99e983a520663 | <ide><path>tests/TestCase/ORM/Locator/TableLocatorTest.php
<ide> use Cake\TestSuite\TestCase;
<ide> use Cake\Validation\Validator;
<ide> use TestApp\Infrastructure\Table\AddressesTable;
<del>use TestApp\Model\Table\ArticlesTable;
<ide> use TestPlugin\Infrastructure\Table\AddressesTable as PluginAddressesTable;
<ide>
<ide> /** | 1 |
Ruby | Ruby | add new osxfuse exceptions | 7572e91a6031dfc2c5d12d07686436aa44de501e | <ide><path>Library/Homebrew/diagnostic.rb
<ide> def check_for_stray_dylibs
<ide> "libmacfuse_i64.2.dylib", # OSXFuse MacFuse compatibility layer
<ide> "libosxfuse_i32.2.dylib", # OSXFuse
<ide> "libosxfuse_i64.2.dylib", # OSXFuse
<add> "libosxfuse.2.dylib", # OSXFuse
<ide> "libTrAPI.dylib", # TrAPI / Endpoint Security VPN
<ide> "libntfs-3g.*.dylib", # NTFS-3G
<ide> "libntfs.*.dylib", # NTFS-3G
<ide> def check_for_stray_las
<ide> "libfuse_ino64.la", # MacFuse
<ide> "libosxfuse_i32.la", # OSXFuse
<ide> "libosxfuse_i64.la", # OSXFuse
<add> "libosxfuse.la", # OSXFuse
<ide> "libntfs-3g.la", # NTFS-3G
<ide> "libntfs.la", # NTFS-3G
<ide> "libublio.la", # NTFS-3G | 1 |
Javascript | Javascript | apply automatic lint fixes for inspect_repl.js | bdb6c597a7d51035d18d0fee27d8a385998c5084 | <ide><path>lib/internal/inspector/inspect_repl.js
<ide> function createRepl(inspector) {
<ide> })
<ide> .join('\n');
<ide> }
<add>
<ide> function listScripts(displayNatives = false) {
<ide> print(formatScripts(displayNatives));
<ide> }
<ide> function createRepl(inspector) {
<ide> const i = start + offset;
<ide> const isCurrent = i === (lineNumber + 1);
<ide>
<del> const markedLine = isCurrent
<del> ? markSourceColumn(lineText, columnNumber, options.colors)
<del> : lineText;
<add> const markedLine = isCurrent ?
<add> markSourceColumn(lineText, columnNumber, options.colors) :
<add> lineText;
<ide>
<ide> let isBreakpoint = false;
<ide> knownBreakpoints.forEach(({ location }) => {
<ide> function createRepl(inspector) {
<ide>
<ide> function prepareControlCode(input) {
<ide> if (input === '\n') return lastCommand;
<del> // exec process.title => exec("process.title");
<add> // Add parentheses: exec process.title => exec("process.title");
<ide> const match = input.match(/^\s*exec\s+([^\n]*)/);
<ide> if (match) {
<ide> lastCommand = `exec(${JSON.stringify(match[1])})`;
<ide> function createRepl(inspector) {
<ide> // setBreakpoint('fn()'): Break when a function is called
<ide> if (script.endsWith('()')) {
<ide> const debugExpr = `debug(${script.slice(0, -2)})`;
<del> const debugCall = selectedFrame
<del> ? Debugger.evaluateOnCallFrame({
<add> const debugCall = selectedFrame ?
<add> Debugger.evaluateOnCallFrame({
<ide> callFrameId: selectedFrame.callFrameId,
<ide> expression: debugExpr,
<ide> includeCommandLineAPI: true,
<del> })
<del> : Runtime.evaluate({
<add> }) :
<add> Runtime.evaluate({
<ide> expression: debugExpr,
<ide> includeCommandLineAPI: true,
<ide> });
<ide> function createRepl(inspector) {
<ide>
<ide> inspector.suspendReplWhile(() =>
<ide> Promise.all([formatWatchers(true), selectedFrame.list(2)])
<del> .then(([watcherList, context]) => {
<add> .then(({ 0: watcherList, 1: context }) => {
<ide> if (watcherList) {
<ide> return `${watcherList}\n${inspect(context)}`;
<ide> }
<ide> function createRepl(inspector) {
<ide> Debugger.on('scriptParsed', (script) => {
<ide> const { scriptId, url } = script;
<ide> if (url) {
<del> knownScripts[scriptId] = Object.assign({
<del> isNative: isNativeUrl(url),
<del> }, script);
<add> knownScripts[scriptId] = { isNative: isNativeUrl(url), ...script };
<ide> }
<ide> });
<ide>
<ide> Profiler.on('consoleProfileFinished', ({ profile }) => {
<ide> Profile.createAndRegister({ profile });
<ide> print([
<ide> 'Captured new CPU profile.',
<del> `Access it with profiles[${profiles.length - 1}]`
<add> `Access it with profiles[${profiles.length - 1}]`,
<ide> ].join('\n'));
<ide> });
<ide>
<ide> function createRepl(inspector) {
<ide> print(`Heap snapshot: ${done}/${total}`, false);
<ide> }
<ide> }
<add>
<ide> function onChunk({ chunk }) {
<ide> sizeWritten += chunk.length;
<ide> writer.write(chunk);
<ide> print(`Writing snapshot: ${sizeWritten}`, false);
<ide> }
<add>
<ide> function onResolve() {
<ide> writer.end(() => {
<ide> teardown();
<ide> print(`Wrote snapshot: ${absoluteFile}`);
<ide> resolve();
<ide> });
<ide> }
<add>
<ide> function onReject(error) {
<ide> teardown();
<ide> reject(error);
<ide> }
<add>
<ide> function teardown() {
<ide> HeapProfiler.removeListener(
<ide> 'reportHeapSnapshotProgress', onProgress);
<ide> function createRepl(inspector) {
<ide> .then(() => Debugger.setBlackboxPatterns({ patterns: [] }))
<ide> .then(() => Debugger.setPauseOnExceptions({ state: pauseOnExceptionState }))
<ide> .then(() => restoreBreakpoints())
<del> .then(() => Runtime.runIfWaitingForDebugger())
<add> .then(() => Runtime.runIfWaitingForDebugger());
<ide> }
<ide>
<ide> return function startRepl() { | 1 |
Javascript | Javascript | fix asset path-traversal outside of roots | 98aea639b235a23f97b435d4803b25bb90cd443e | <ide><path>packager/react-packager/src/AssetServer/index.js
<ide> class AssetServer {
<ide> _findRoot(roots, dir) {
<ide> return Promise.all(
<ide> roots.map(root => {
<del> const absPath = path.join(root, dir);
<add> // important: we want to resolve root + dir
<add> // to ensure the requested path doesn't traverse beyond root
<add> const absPath = path.resolve(root, dir);
<ide> return stat(absPath).then(fstat => {
<del> return {path: absPath, isDirectory: fstat.isDirectory()};
<del> }, err => {
<del> return {path: absPath, isDirectory: false};
<add> // keep asset requests from traversing files
<add> // up from the root (e.g. ../../../etc/hosts)
<add> if (!absPath.startsWith(root)) {
<add> return {path: absPath, isValid: false};
<add> }
<add> return {path: absPath, isValid: fstat.isDirectory()};
<add> }, _ => {
<add> return {path: absPath, isValid: false};
<ide> });
<ide> })
<ide> ).then(stats => {
<ide> for (let i = 0; i < stats.length; i++) {
<del> if (stats[i].isDirectory) {
<add> if (stats[i].isValid) {
<ide> return stats[i].path;
<ide> }
<ide> } | 1 |
Javascript | Javascript | use the correct size for the generated package | b8e8d17b0bf5b37797b9b96f07f1e3fe3fcc7038 | <ide><path>script/lib/create-debian-package.js
<ide> module.exports = function (packagedAppPath) {
<ide> )
<ide>
<ide> console.log(`Writing control file into "${debianPackageConfigPath}"`)
<del> const packageSizeInKilobytes = childProcess.spawnSync('du', ['-sk']).stdout.toString().split(/\s+/)[0]
<add> const packageSizeInKilobytes = childProcess.spawnSync('du', ['-sk', packagedAppPath]).stdout.toString().split(/\s+/)[0]
<ide> const controlFileTemplate = fs.readFileSync(path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'debian', 'control.in'))
<ide> const controlFileContents = template(controlFileTemplate)({
<ide> appFileName: atomExecutableName, version: CONFIG.appMetadata.version, arch: arch, | 1 |
Javascript | Javascript | add bambara language. | dccd3884d93e328209440b3b0cbaffa99b893d93 | <ide><path>src/locale/bm.js
<add>//! moment.js locale configuration
<add>//! locale : Bambara [bm]
<add>//! author : Estelle Comment : https://github.com/estellecomment
<add>// Language contact person : Abdoufata Kane : https://github.com/abdoufata
<add>
<add>import moment from '../moment';
<add>
<add>export default moment.defineLocale('bm', {
<add> months : 'Zanwuyekalo_Fewuruyekalo_Marisikalo_Awirilikalo_Mɛkalo_Zuwɛnkalo_Zuluyekalo_Utikalo_Sɛtanburukalo_ɔkutɔburukalo_Nowanburukalo_Desanburukalo'.split('_'),
<add> monthsShort : 'Zan_Few_Mar_Awi_Mɛ_Zuw_Zul_Uti_Sɛt_ɔku_Now_Des'.split('_'),
<add> weekdays : 'Kari_Ntɛnɛn_Tarata_Araba_Alamisa_Juma_Sibiri'.split('_'),
<add> weekdaysShort : 'Kar_Ntɛ_Tar_Ara_Ala_Jum_Sib'.split('_'),
<add> weekdaysMin : 'Ka_Nt_Ta_Ar_Al_Ju_Si'.split('_'),
<add> longDateFormat : {
<add> LT : 'HH:mm',
<add> LTS : 'HH:mm:ss',
<add> L : 'DD/MM/YYYY',
<add> LL : 'MMMM [tile] D [san] YYYY',
<add> LLL : 'MMMM [tile] D [san] YYYY [lɛrɛ] HH:mm',
<add> LLLL : 'dddd MMMM [tile] D [san] YYYY [lɛrɛ] HH:mm'
<add> },
<add> calendar : {
<add> sameDay : '[Bi lɛrɛ] LT',
<add> nextDay : '[Sini lɛrɛ] LT',
<add> nextWeek : 'dddd [don lɛrɛ] LT',
<add> lastDay : '[Kunu lɛrɛ] LT',
<add> lastWeek : 'dddd [tɛmɛnen lɛrɛ] LT',
<add> sameElse : 'L'
<add> },
<add> relativeTime : {
<add> future : '%s kɔnɔ',
<add> past : 'a bɛ %s bɔ',
<add> s : 'sanga dama dama',
<add> m : 'miniti kelen',
<add> mm : 'miniti %d',
<add> h : 'lɛrɛ kelen',
<add> hh : 'lɛrɛ %d',
<add> d : 'tile kelen',
<add> dd : 'tile %d',
<add> M : 'kalo kelen',
<add> MM : 'kalo %d',
<add> y : 'san kelen',
<add> yy : 'san %d'
<add> },
<add> week : {
<add> dow : 1, // Monday is the first day of the week.
<add> doy : 4 // The week that contains Jan 4th is the first week of the year.
<add> }
<add>});
<ide><path>src/test/locale/bm.js
<add>import {localeModule, test} from '../qunit';
<add>import moment from '../../moment';
<add>localeModule('bm');
<add>
<add>test('parse', function (assert) {
<add> var i,
<add> tests = 'Zanwuyekalo Zan_Fewuruyekalo Few_Marisikalo Mar_Awirilikalo Awi_Mɛkalo Mɛ_Zuwɛnkalo Zuw_Zuluyekalo Zul_Utikalo Uti_Sɛtanburukalo Sɛt_ɔkutɔburukalo ɔku_Nowanburukalo Now_Desanburukalo Des'.split('_');
<add>
<add> function equalTest(input, mmm, i) {
<add> assert.equal(moment(input, mmm).month(), i, input + ' should be month ' + (i + 1));
<add> }
<add>
<add> for (i = 0; i < 12; i++) {
<add> tests[i] = tests[i].split(' ');
<add> equalTest(tests[i][0], 'MMM', i);
<add> equalTest(tests[i][1], 'MMM', i);
<add> equalTest(tests[i][0], 'MMMM', i);
<add> equalTest(tests[i][1], 'MMMM', i);
<add> equalTest(tests[i][0].toLocaleLowerCase(), 'MMMM', i);
<add> equalTest(tests[i][1].toLocaleLowerCase(), 'MMMM', i);
<add> equalTest(tests[i][0].toLocaleUpperCase(), 'MMMM', i);
<add> equalTest(tests[i][1].toLocaleUpperCase(), 'MMMM', i);
<add> }
<add>});
<add>
<add>test('format', function (assert) {
<add> var a = [
<add> ['dddd, MMMM Do YYYY, h:mm:ss a', 'Kari, Fewuruyekalo 14 2010, 3:25:50 pm'],
<add> ['ddd, hA', 'Kar, 3PM'],
<add> ['M Mo MM MMMM MMM', '2 2 02 Fewuruyekalo Few'],
<add> ['YYYY YY', '2010 10'],
<add> ['D Do DD', '14 14 14'],
<add> ['d do dddd ddd dd', '0 0 Kari Kar Ka'],
<add> ['DDD DDDo DDDD', '45 45 045'],
<add> ['w wo ww', '6 6 06'],
<add> ['h hh', '3 03'],
<add> ['H HH', '15 15'],
<add> ['m mm', '25 25'],
<add> ['s ss', '50 50'],
<add> ['a A', 'pm PM'],
<add> ['[le] Do [jour du mois]', 'le 14 jour du mois'],
<add> ['[le] DDDo [jour de l’année]', 'le 45 jour de l’année'],
<add> ['LTS', '15:25:50'],
<add> ['L', '14/02/2010'],
<add> ['LL', 'Fewuruyekalo tile 14 san 2010'],
<add> ['LLL', 'Fewuruyekalo tile 14 san 2010 lɛrɛ 15:25'],
<add> ['LLLL', 'Kari Fewuruyekalo tile 14 san 2010 lɛrɛ 15:25'],
<add> ['l', '14/2/2010'],
<add> ['ll', 'Few tile 14 san 2010'],
<add> ['lll', 'Few tile 14 san 2010 lɛrɛ 15:25'],
<add> ['llll', 'Kar Few tile 14 san 2010 lɛrɛ 15:25']
<add> ],
<add> b = moment(new Date(2010, 1, 14, 15, 25, 50, 125)),
<add> i;
<add>
<add> for (i = 0; i < a.length; i++) {
<add> assert.equal(b.format(a[i][0]), a[i][1], a[i][0] + ' ---> ' + a[i][1]);
<add> }
<add>});
<add>
<add>test('format month', function (assert) {
<add> var i,
<add> expected = 'Zanwuyekalo Zan_Fewuruyekalo Few_Marisikalo Mar_Awirilikalo Awi_Mɛkalo Mɛ_Zuwɛnkalo Zuw_Zuluyekalo Zul_Utikalo Uti_Sɛtanburukalo Sɛt_ɔkutɔburukalo ɔku_Nowanburukalo Now_Desanburukalo Des'.split('_');
<add>
<add> for (i = 0; i < expected.length; i++) {
<add> assert.equal(moment([2011, i, 1]).format('MMMM MMM'), expected[i], expected[i]);
<add> }
<add>});
<add>
<add>test('format week', function (assert) {
<add> var i,
<add> expected = 'Kari Kar Ka_Ntɛnɛn Ntɛ Nt_Tarata Tar Ta_Araba Ara Ar_Alamisa Ala Al_Juma Jum Ju_Sibiri Sib Si'.split('_');
<add>
<add> for (i = 0; i < expected.length; i++) {
<add> assert.equal(moment([2011, 0, 2 + i]).format('dddd ddd dd'), expected[i], expected[i]);
<add> }
<add>});
<add>
<add>test('from', function (assert) {
<add> var start = moment([2007, 1, 28]);
<add>
<add> assert.equal(start.from(moment([2007, 1, 28]).add({s: 44}), true), 'sanga dama dama', '44 seconds = a few seconds');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({s: 45}), true), 'miniti kelen', '45 seconds = a minute');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({s: 89}), true), 'miniti kelen', '89 seconds = a minute');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({s: 90}), true), 'miniti 2', '90 seconds = 2 minutes');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({m: 44}), true), 'miniti 44', '44 minutes = 44 minutes');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({m: 45}), true), 'lɛrɛ kelen', '45 minutes = an hour');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({m: 89}), true), 'lɛrɛ kelen', '89 minutes = an hour');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({m: 90}), true), 'lɛrɛ 2', '90 minutes = 2 hours');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({h: 5}), true), 'lɛrɛ 5', '5 hours = 5 hours');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({h: 21}), true), 'lɛrɛ 21', '21 hours = 21 hours');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({h: 22}), true), 'tile kelen', '22 hours = a day');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({h: 35}), true), 'tile kelen', '35 hours = a day');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({h: 36}), true), 'tile 2', '36 hours = 2 days');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 1}), true), 'tile kelen', '1 day = a day');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 5}), true), 'tile 5', '5 days = 5 days');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 25}), true), 'tile 25', '25 days = 25 days');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 26}), true), 'kalo kelen', '26 days = a month');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 30}), true), 'kalo kelen', '30 days = a month');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 43}), true), 'kalo kelen', '43 days = a month');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 46}), true), 'kalo 2', '46 days = 2 months');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 74}), true), 'kalo 2', '75 days = 2 months');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 76}), true), 'kalo 3', '76 days = 3 months');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({M: 1}), true), 'kalo kelen', '1 month = a month');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({M: 5}), true), 'kalo 5', '5 months = 5 months');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 345}), true), 'san kelen', '345 days = a year');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({d: 548}), true), 'san 2', '548 days = 2 years');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({y: 1}), true), 'san kelen', '1 year = a year');
<add> assert.equal(start.from(moment([2007, 1, 28]).add({y: 5}), true), 'san 5', '5 years = 5 years');
<add>});
<add>
<add>test('suffix', function (assert) {
<add> assert.equal(moment(30000).from(0), 'sanga dama dama kɔnɔ', 'prefix');
<add> assert.equal(moment(0).from(30000), 'a bɛ sanga dama dama bɔ', 'suffix');
<add>});
<add>
<add>test('fromNow', function (assert) {
<add> assert.equal(moment().add({s: 30}).fromNow(), 'sanga dama dama kɔnɔ', 'in a few seconds');
<add> assert.equal(moment().add({d: 5}).fromNow(), 'tile 5 kɔnɔ', 'in 5 days');
<add>});
<add>
<add>test('same day', function (assert) {
<add> var a = moment().hours(12).minutes(0).seconds(0);
<add>
<add> assert.equal(moment(a).calendar(), 'Bi lɛrɛ 12:00', 'Today at the same time');
<add> assert.equal(moment(a).add({m: 25}).calendar(), 'Bi lɛrɛ 12:25', 'Now plus 25 min');
<add> assert.equal(moment(a).add({h: 1}).calendar(), 'Bi lɛrɛ 13:00', 'Now plus 1 hour');
<add> assert.equal(moment(a).add({d: 1}).calendar(), 'Sini lɛrɛ 12:00', 'Tomorrow at the same time');
<add> assert.equal(moment(a).subtract({h: 1}).calendar(), 'Bi lɛrɛ 11:00', 'Now minus 1 hour');
<add> assert.equal(moment(a).subtract({d: 1}).calendar(), 'Kunu lɛrɛ 12:00', 'Yesterday at the same time');
<add>});
<add>
<add>test('same next week', function (assert) {
<add> var i, m;
<add>
<add> for (i = 2; i < 7; i++) {
<add> m = moment().add({d: i});
<add> assert.equal(m.calendar(), m.format('dddd [don lɛrɛ] LT'), 'Today + ' + i + ' days current time');
<add> m.hours(0).minutes(0).seconds(0).milliseconds(0);
<add> assert.equal(m.calendar(), m.format('dddd [don lɛrɛ] LT'), 'Today + ' + i + ' days beginning of day');
<add> m.hours(23).minutes(59).seconds(59).milliseconds(999);
<add> assert.equal(m.calendar(), m.format('dddd [don lɛrɛ] LT'), 'Today + ' + i + ' days end of day');
<add> }
<add>});
<add>
<add>test('same last week', function (assert) {
<add> var i, m;
<add>
<add> for (i = 2; i < 7; i++) {
<add> m = moment().subtract({d: i});
<add> assert.equal(m.calendar(), m.format('dddd [tɛmɛnen lɛrɛ] LT'), 'Today - ' + i + ' days current time');
<add> m.hours(0).minutes(0).seconds(0).milliseconds(0);
<add> assert.equal(m.calendar(), m.format('dddd [tɛmɛnen lɛrɛ] LT'), 'Today - ' + i + ' days beginning of day');
<add> m.hours(23).minutes(59).seconds(59).milliseconds(999);
<add> assert.equal(m.calendar(), m.format('dddd [tɛmɛnen lɛrɛ] LT'), 'Today - ' + i + ' days end of day');
<add> }
<add>});
<add>
<add>test('same all else', function (assert) {
<add> var weeksAgo = moment().subtract({w: 1}),
<add> weeksFromNow = moment().add({w: 1});
<add>
<add> assert.equal(weeksAgo.calendar(), weeksAgo.format('L'), '1 week ago');
<add> assert.equal(weeksFromNow.calendar(), weeksFromNow.format('L'), 'in 1 week');
<add>
<add> weeksAgo = moment().subtract({w: 2});
<add> weeksFromNow = moment().add({w: 2});
<add>
<add> assert.equal(weeksAgo.calendar(), weeksAgo.format('L'), '2 weeks ago');
<add> assert.equal(weeksFromNow.calendar(), weeksFromNow.format('L'), 'in 2 weeks');
<add>});
<add>
<add>test('weeks year starting sunday formatted', function (assert) {
<add> assert.equal(moment([2012, 0, 1]).format('w ww wo'), '52 52 52', 'Jan 1 2012 should be week 52');
<add> assert.equal(moment([2012, 0, 2]).format('w ww wo'), '1 01 1', 'Jan 2 2012 should be week 1');
<add> assert.equal(moment([2012, 0, 8]).format('w ww wo'), '1 01 1', 'Jan 8 2012 should be week 1');
<add> assert.equal(moment([2012, 0, 9]).format('w ww wo'), '2 02 2', 'Jan 9 2012 should be week 2');
<add> assert.equal(moment([2012, 0, 15]).format('w ww wo'), '2 02 2', 'Jan 15 2012 should be week 2');
<add>});
<add> | 2 |
Javascript | Javascript | reduce traversing module | 65bdfbf07f911d5014e32ab126ef0c568c22bf49 | <ide><path>src/traversing.js
<ide> var runtil = /Until$/,
<ide>
<ide> jQuery.fn.extend({
<ide> find: function( selector ) {
<del> var i, ret, self;
<add> var self, matched, i,
<add> l = this.length;
<ide>
<ide> if ( typeof selector !== "string" ) {
<ide> self = this;
<ide> return this.pushStack( jQuery( selector ).filter(function() {
<del> for ( i = 0; i < self.length; i++ ) {
<add> for ( i = 0; i < l; i++ ) {
<ide> if ( jQuery.contains( self[ i ], this ) ) {
<ide> return true;
<ide> }
<ide> }
<ide> }) );
<ide> }
<ide>
<del> ret = [];
<del> for ( i = 0; i < this.length; i++ ) {
<del> jQuery.find( selector, this[ i ], ret );
<add> matched = [];
<add> for ( i = 0; i < l; i++ ) {
<add> jQuery.find( selector, this[ i ], matched );
<ide> }
<ide>
<ide> // Needed because $( selector, context ) becomes $( context ).find( selector )
<del> ret = this.pushStack( jQuery.unique( ret ) );
<del> ret.selector = ( this.selector ? this.selector + " " : "" ) + selector;
<del> return ret;
<add> matched = this.pushStack( jQuery.unique( matched ) );
<add> matched.selector = ( this.selector ? this.selector + " " : "" ) + selector;
<add> return matched;
<ide> },
<ide>
<ide> has: function( target ) {
<del> var i,
<del> targets = jQuery( target, this ),
<del> len = targets.length;
<add> var targets = jQuery( target, this ),
<add> l = targets.length;
<ide>
<ide> return this.filter(function() {
<del> for ( i = 0; i < len; i++ ) {
<add> var i = 0;
<add> for ( ; i < l; i++ ) {
<ide> if ( jQuery.contains( this, targets[i] ) ) {
<ide> return true;
<ide> }
<ide> jQuery.fn.extend({
<ide> // If this is a positional/relative selector, check membership in the returned set
<ide> // so $("p:first").is("p:last") won't return true for a doc with two "p".
<ide> rneedsContext.test( selector ) ?
<del> jQuery( selector, this.context ).index( this[0] ) >= 0 :
<add> jQuery( selector, this.context ).index( this[ 0 ] ) >= 0 :
<ide> jQuery.filter( selector, this ).length > 0 :
<ide> this.filter( selector ).length > 0 );
<ide> },
<ide>
<ide> closest: function( selectors, context ) {
<del> var cur,
<del> i = 0,
<add> var cur, i = 0,
<ide> l = this.length,
<del> ret = [],
<del> pos = rneedsContext.test( selectors ) || typeof selectors !== "string" ?
<add> matched = [],
<add> pos = ( rneedsContext.test( selectors ) || typeof selectors !== "string" ) ?
<ide> jQuery( selectors, context || this.context ) :
<ide> 0;
<ide>
<ide> for ( ; i < l; i++ ) {
<del> cur = this[i];
<add> cur = this[ i ];
<ide>
<del> while ( cur && cur.ownerDocument && cur !== context && cur.nodeType !== 11 ) {
<del> if ( pos ? pos.index(cur) > -1 : jQuery.find.matchesSelector(cur, selectors) ) {
<del> ret.push( cur );
<add> while ( cur && cur.ownerDocument && cur !== context ) {
<add> if ( pos ? pos.index( cur ) > -1 : jQuery.find.matchesSelector( cur, selectors ) ) {
<add> matched.push( cur );
<ide> break;
<ide> }
<del> cur = cur.parentNode;
<add> cur = cur.parentElement;
<ide> }
<ide> }
<ide>
<del> return this.pushStack( ret.length > 1 ? jQuery.unique( ret ) : ret );
<add> return this.pushStack( matched.length > 1 ? jQuery.unique( matched ) : matched );
<ide> },
<ide>
<ide> // Determine the position of an element within
<ide> jQuery.fn.extend({
<ide>
<ide> // No argument, return index in parent
<ide> if ( !elem ) {
<del> return ( this[0] && this[0].parentNode ) ? this.first().prevAll().length : -1;
<add> return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1;
<ide> }
<ide>
<ide> // index in selector
<ide> if ( typeof elem === "string" ) {
<del> return jQuery.inArray( this[0], jQuery( elem ) );
<add> return core_indexOf.call( jQuery( elem ), this[ 0 ] );
<ide> }
<ide>
<ide> // Locate the position of the desired element
<del> return jQuery.inArray(
<add> return core_indexOf.call( this,
<add>
<ide> // If it receives a jQuery object, the first element is used
<del> elem.jquery ? elem[0] : elem, this );
<add> elem.jquery ? elem[ 0 ] : elem
<add> );
<ide> },
<ide>
<ide> add: function( selector, context ) {
<ide> jQuery.fn.extend({
<ide>
<ide> jQuery.fn.andSelf = jQuery.fn.addBack;
<ide>
<del>function sibling( cur, dir ) {
<del> do {
<del> cur = cur[ dir ];
<del> } while ( cur && cur.nodeType !== 1 );
<del>
<del> return cur;
<del>}
<del>
<ide> jQuery.each({
<ide> parent: function( elem ) {
<del> var parent = elem.parentNode;
<del> return parent && parent.nodeType !== 11 ? parent : null;
<add> return elem.parentElement;
<ide> },
<ide> parents: function( elem ) {
<del> return jQuery.dir( elem, "parentNode" );
<add> return jQuery.dir( elem, "parentElement" );
<ide> },
<ide> parentsUntil: function( elem, i, until ) {
<del> return jQuery.dir( elem, "parentNode", until );
<add> return jQuery.dir( elem, "parentElement", until );
<ide> },
<ide> next: function( elem ) {
<del> return sibling( elem, "nextSibling" );
<add> return elem.nextElementSibling;
<ide> },
<ide> prev: function( elem ) {
<del> return sibling( elem, "previousSibling" );
<add> return elem.previousElementSibling;
<ide> },
<ide> nextAll: function( elem ) {
<del> return jQuery.dir( elem, "nextSibling" );
<add> return jQuery.dir( elem, "nextElementSibling" );
<ide> },
<ide> prevAll: function( elem ) {
<del> return jQuery.dir( elem, "previousSibling" );
<add> return jQuery.dir( elem, "previousElementSibling" );
<ide> },
<ide> nextUntil: function( elem, i, until ) {
<del> return jQuery.dir( elem, "nextSibling", until );
<add> return jQuery.dir( elem, "nextElementSibling", until );
<ide> },
<ide> prevUntil: function( elem, i, until ) {
<del> return jQuery.dir( elem, "previousSibling", until );
<add> return jQuery.dir( elem, "previousElementSibling", until );
<ide> },
<ide> siblings: function( elem ) {
<ide> return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem );
<ide> },
<ide> children: function( elem ) {
<del> return jQuery.sibling( elem.firstChild );
<add> var children = elem.children;
<add>
<add> // documentFragment or document does not have children property
<add> return children ? jQuery.merge( [], children ) : jQuery.sibling( elem.firstChild );
<ide> },
<ide> contents: function( elem ) {
<ide> return jQuery.nodeName( elem, "iframe" ) ?
<ide> jQuery.each({
<ide> }
<ide> }, function( name, fn ) {
<ide> jQuery.fn[ name ] = function( until, selector ) {
<del> var ret = jQuery.map( this, fn, until );
<add> var matched = jQuery.map( this, fn, until );
<ide>
<ide> if ( !runtil.test( name ) ) {
<ide> selector = until;
<ide> }
<ide>
<ide> if ( selector && typeof selector === "string" ) {
<del> ret = jQuery.filter( selector, ret );
<add> matched = jQuery.filter( selector, matched );
<ide> }
<ide>
<del> ret = this.length > 1 && !guaranteedUnique[ name ] ? jQuery.unique( ret ) : ret;
<add> if ( this.length > 1 ) {
<add> if ( !guaranteedUnique[ name ] ) {
<add> jQuery.unique( matched );
<add> }
<ide>
<del> if ( this.length > 1 && rparentsprev.test( name ) ) {
<del> ret = ret.reverse();
<add> if ( rparentsprev.test( name ) ) {
<add> matched.reverse();
<add> }
<ide> }
<ide>
<del> return this.pushStack( ret );
<add> return this.pushStack( matched );
<ide> };
<ide> });
<ide>
<ide> jQuery.extend({
<ide> }
<ide>
<ide> return elems.length === 1 ?
<del> jQuery.find.matchesSelector(elems[0], expr) ? [ elems[0] ] : [] :
<del> jQuery.find.matches(expr, elems);
<add> jQuery.find.matchesSelector( elems[ 0 ], expr ) ? [ elems[ 0 ] ] : [] :
<add> jQuery.find.matches( expr, elems );
<ide> },
<ide>
<ide> dir: function( elem, dir, until ) {
<del> var matched = [],
<del> cur = elem[ dir ];
<add> var cur = elem[ dir ],
<add> matched = [];
<ide>
<del> while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) {
<del> if ( cur.nodeType === 1 ) {
<del> matched.push( cur );
<del> }
<del> cur = cur[dir];
<add> while ( cur && ( !until || !jQuery( cur ).is( until ) ) ) {
<add> matched.push( cur );
<add> cur = cur[ dir ];
<ide> }
<add>
<ide> return matched;
<ide> },
<ide>
<ide> sibling: function( n, elem ) {
<del> var r = [];
<add> var matched = [];
<ide>
<ide> for ( ; n; n = n.nextSibling ) {
<ide> if ( n.nodeType === 1 && n !== elem ) {
<del> r.push( n );
<add> matched.push( n );
<ide> }
<ide> }
<ide>
<del> return r;
<add> return matched;
<ide> }
<ide> });
<ide>
<ide> function winnow( elements, qualifier, keep ) {
<ide> // Set to 0 to skip string check
<ide> qualifier = qualifier || 0;
<ide>
<add> var filtered;
<add>
<ide> if ( jQuery.isFunction( qualifier ) ) {
<ide> return jQuery.grep(elements, function( elem, i ) {
<ide> var retVal = !!qualifier.call( elem, i, elem );
<ide> return retVal === keep;
<ide> });
<add> }
<ide>
<del> } else if ( qualifier.nodeType ) {
<add> if ( qualifier.nodeType ) {
<ide> return jQuery.grep(elements, function( elem ) {
<ide> return ( elem === qualifier ) === keep;
<ide> });
<add> }
<ide>
<del> } else if ( typeof qualifier === "string" ) {
<del> var filtered = jQuery.grep(elements, function( elem ) {
<add> if ( typeof qualifier === "string" ) {
<add> filtered = jQuery.grep(elements, function( elem ) {
<ide> return elem.nodeType === 1;
<ide> });
<ide>
<ide> if ( isSimple.test( qualifier ) ) {
<del> return jQuery.filter(qualifier, filtered, !keep);
<del> } else {
<del> qualifier = jQuery.filter( qualifier, filtered );
<add> return jQuery.filter( qualifier, filtered, !keep );
<ide> }
<add>
<add> qualifier = jQuery.filter( qualifier, filtered );
<ide> }
<ide>
<ide> return jQuery.grep(elements, function( elem ) {
<del> return ( jQuery.inArray( elem, qualifier ) >= 0 ) === keep;
<add> return ( core_indexOf.call( qualifier, elem ) >= 0 ) === keep;
<ide> });
<ide> }
<ide><path>test/unit/traversing.js
<ide> test("eq('-1') #10616", function() {
<ide> });
<ide>
<ide> test("index(no arg) #10977", function() {
<del> expect(1);
<del>
<add> expect(2);
<add>
<ide> var $list = jQuery("<ul id='indextest'><li>THIS ONE</li><li class='one'>a</li><li class='two'>b</li><li class='three'>c</li></ul>");
<ide> jQuery("#qunit-fixture").append( $list );
<ide> strictEqual ( jQuery( "#indextest li:not(.one,.two)" ).index() , 0, "No Argument Index Check" );
<ide> $list.remove();
<add>
<add> var fragment = document.createDocumentFragment(),
<add> div = fragment.appendChild( document.createElement("div") );
<add>
<add> equal( jQuery( div ).index(), 0, "If jQuery#index called on element whos parent is fragment, it still should work correctly" );
<ide> }); | 2 |
Ruby | Ruby | fix the undefined method content_tag | ab2ace6bbef1cc98c98e5a9a6b45e734bc5edd9d | <ide><path>actionview/lib/action_view/helpers/translation_helper.rb
<ide> module ActionView
<ide> # = Action View Translation Helpers
<ide> module Helpers
<ide> module TranslationHelper
<add> include TagHelper
<ide> # Delegates to <tt>I18n#translate</tt> but also performs three additional functions.
<ide> #
<ide> # First, it will ensure that any thrown +MissingTranslation+ messages will be turned
<ide><path>actionview/test/template/translation_helper_test.rb
<ide> require 'abstract_unit'
<ide>
<ide> class TranslationHelperTest < ActiveSupport::TestCase
<del> include ActionView::Helpers::TagHelper
<ide> include ActionView::Helpers::TranslationHelper
<ide>
<ide> attr_reader :request, :view | 2 |
Python | Python | parse deepmac meta arch correctly | 8b45de4ffc7eb8d66f0139ee1f62e699ee401072 | <ide><path>research/object_detection/builders/model_builder.py
<ide> def _build_center_net_model(center_net_config, is_training, add_summaries):
<ide> object_detection_params = object_detection_proto_to_params(
<ide> center_net_config.object_detection_task)
<ide>
<add> if center_net_config.HasField('deepmac_mask_estimation'):
<add> logging.warn(('Building experimental DeepMAC meta-arch.'
<add> ' Some features may be omitted.'))
<add> deepmac_params = deepmac_meta_arch.deepmac_proto_to_params(
<add> center_net_config.deepmac_mask_estimation)
<add> return deepmac_meta_arch.DeepMACMetaArch(
<add> is_training=is_training,
<add> add_summaries=add_summaries,
<add> num_classes=center_net_config.num_classes,
<add> feature_extractor=feature_extractor,
<add> image_resizer_fn=image_resizer_fn,
<add> object_center_params=object_center_params,
<add> object_detection_params=object_detection_params,
<add> deepmac_params=deepmac_params)
<add>
<ide> keypoint_params_dict = None
<ide> if center_net_config.keypoint_estimation_task:
<ide> label_map_proto = label_map_util.load_labelmap( | 1 |
Java | Java | fix the usage of deprecated methods | 434f2100cb7f16db8acc52a7a9eb13155a5db57a | <ide><path>rxjava-contrib/rxjava-string/src/main/java/rx/observables/StringObservable.java
<ide> import rx.Observable.OnSubscribe;
<ide> import rx.Observable.Operator;
<ide> import rx.Subscriber;
<del>import rx.Subscription;
<add>import rx.functions.Action1;
<ide> import rx.functions.Func0;
<ide> import rx.functions.Func1;
<ide> import rx.functions.Func2;
<ide> import java.nio.charset.CodingErrorAction;
<ide> import java.util.Arrays;
<ide> import java.util.concurrent.Callable;
<del>import java.util.concurrent.atomic.AtomicBoolean;
<ide> import java.util.regex.Pattern;
<ide>
<ide> public class StringObservable {
<ide> public static Observable<byte[]> from(final InputStream i) {
<ide> return from(i, 8 * 1024);
<ide> }
<ide>
<del> private static class CloseableResource<S extends Closeable> implements Subscription {
<del> private final AtomicBoolean unsubscribed = new AtomicBoolean();
<del> private S closable;
<del>
<del> public CloseableResource(S closeable) {
<del> this.closable = closeable;
<del> }
<del>
<del> @Override
<del> public void unsubscribe() {
<del> if (unsubscribed.compareAndSet(false, true)) {
<del> try {
<del> closable.close();
<del> } catch (Exception e) {
<del> throw new RuntimeException(e);
<del> }
<del> }
<del> }
<del>
<del> @Override
<del> public boolean isUnsubscribed() {
<del> return unsubscribed.get();
<del> }
<del> }
<del>
<ide> /**
<ide> * Func0 that allows throwing an {@link IOException}s commonly thrown during IO operations.
<del> * @see StringObservable#from(UnsafeFunc0, UnsafeFunc1)
<add> * @see StringObservable#using(UnsafeFunc0, Func1)
<ide> *
<ide> * @param <R>
<ide> */
<ide> public boolean isUnsubscribed() {
<ide> * @param observableFactory
<ide> * Converts the {@link Closeable} resource into a {@link Observable} with {@link #from(InputStream)} or {@link #from(Reader)}
<ide> * @return
<add> * An {@link Observable} that automatically closes the resource when done.
<ide> */
<ide> public static <R, S extends Closeable> Observable<R> using(final UnsafeFunc0<S> resourceFactory,
<ide> final Func1<S, Observable<R>> observableFactory) {
<del> return Observable.using(new Func0<CloseableResource<S>>() {
<add> return Observable.using(new Func0<S>() {
<ide> @Override
<del> public CloseableResource<S> call() {
<add> public S call() {
<ide> try {
<del> return new CloseableResource<S>(resourceFactory.call());
<add> return resourceFactory.call();
<ide> } catch (Throwable e) {
<ide> throw new RuntimeException(e);
<ide> }
<ide> }
<del> }, new Func1<CloseableResource<S>, Observable<R>>() {
<add> }, observableFactory, new Action1<S>() {
<ide> @Override
<del> public Observable<R> call(CloseableResource<S> t1) {
<del> return observableFactory.call(t1.closable);
<add> public void call(S resource) {
<add> try {
<add> resource.close();
<add> } catch (IOException e) {
<add> throw new RuntimeException(e);
<add> }
<ide> }
<ide> });
<ide> }
<ide> public StringBuilder call(StringBuilder a, String b) {
<ide> /**
<ide> * Maps {@link Observable}<{@link Object}> to {@link Observable}<{@link String}> by using {@link String#valueOf(Object)}
<ide> * @param src
<del> * @return
<add> * @return An {@link Observable} of only {@link String}s.
<ide> */
<ide> public static Observable<String> toString(Observable<?> src) {
<ide> return src.map(new Func1<Object, String>() {
<ide><path>rxjava-contrib/rxjava-string/src/test/java/rx/observables/StringObservableTest.java
<ide> public class StringObservableTest {
<ide>
<ide> @Test
<ide> public void testMultibyteSpanningTwoBuffers() {
<del> Observable<byte[]> src = Observable.from(new byte[] { (byte) 0xc2 }, new byte[] { (byte) 0xa1 });
<del> String out = StringObservable.decode(src, "UTF-8").toBlockingObservable().single();
<add> Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 }, new byte[] { (byte) 0xa1 });
<add> String out = StringObservable.decode(src, "UTF-8").toBlocking().single();
<ide>
<ide> assertEquals("\u00A1", out);
<ide> }
<ide>
<ide> @Test
<ide> public void testMalformedAtTheEndReplace() {
<del> Observable<byte[]> src = Observable.from(new byte[] { (byte) 0xc2 });
<del> String out = decode(src, "UTF-8").toBlockingObservable().single();
<add> Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 });
<add> String out = decode(src, "UTF-8").toBlocking().single();
<ide>
<ide> // REPLACEMENT CHARACTER
<ide> assertEquals("\uFFFD", out);
<ide> }
<ide>
<ide> @Test
<ide> public void testMalformedInTheMiddleReplace() {
<del> Observable<byte[]> src = Observable.from(new byte[] { (byte) 0xc2, 65 });
<del> String out = decode(src, "UTF-8").toBlockingObservable().single();
<add> Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2, 65 });
<add> String out = decode(src, "UTF-8").toBlocking().single();
<ide>
<ide> // REPLACEMENT CHARACTER
<ide> assertEquals("\uFFFDA", out);
<ide> }
<ide>
<ide> @Test(expected = RuntimeException.class)
<ide> public void testMalformedAtTheEndReport() {
<del> Observable<byte[]> src = Observable.from(new byte[] { (byte) 0xc2 });
<add> Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 });
<ide> CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder();
<del> decode(src, charsetDecoder).toBlockingObservable().single();
<add> decode(src, charsetDecoder).toBlocking().single();
<ide> }
<ide>
<ide> @Test(expected = RuntimeException.class)
<ide> public void testMalformedInTheMiddleReport() {
<del> Observable<byte[]> src = Observable.from(new byte[] { (byte) 0xc2, 65 });
<add> Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2, 65 });
<ide> CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder();
<del> decode(src, charsetDecoder).toBlockingObservable().single();
<add> decode(src, charsetDecoder).toBlocking().single();
<ide> }
<ide>
<ide> @Test
<ide> public void testPropogateError() {
<del> Observable<byte[]> src = Observable.from(new byte[] { 65 });
<add> Observable<byte[]> src = Observable.just(new byte[] { 65 });
<ide> Observable<byte[]> err = Observable.error(new IOException());
<ide> CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder();
<ide> try {
<del> decode(Observable.concat(src, err), charsetDecoder).toList().toBlockingObservable().single();
<add> decode(Observable.concat(src, err), charsetDecoder).toList().toBlocking().single();
<ide> fail();
<ide> } catch (RuntimeException e) {
<ide> assertEquals(IOException.class, e.getCause().getClass());
<ide> public void testPropogateError() {
<ide>
<ide> @Test
<ide> public void testPropogateErrorInTheMiddleOfMultibyte() {
<del> Observable<byte[]> src = Observable.from(new byte[] { (byte) 0xc2 });
<add> Observable<byte[]> src = Observable.just(new byte[] { (byte) 0xc2 });
<ide> Observable<byte[]> err = Observable.error(new IOException());
<ide> CharsetDecoder charsetDecoder = Charset.forName("UTF-8").newDecoder();
<ide> try {
<del> decode(Observable.concat(src, err), charsetDecoder).toList().toBlockingObservable().single();
<add> decode(Observable.concat(src, err), charsetDecoder).toList().toBlocking().single();
<ide> fail();
<ide> } catch (RuntimeException e) {
<ide> assertEquals(MalformedInputException.class, e.getCause().getClass());
<ide> public void testPropogateErrorInTheMiddleOfMultibyte() {
<ide> public void testEncode() {
<ide> assertArrayEquals(
<ide> new byte[] { (byte) 0xc2, (byte) 0xa1 }, encode(Observable.just("\u00A1"), "UTF-8")
<del> .toBlockingObservable().single());
<add> .toBlocking().single());
<ide> }
<ide>
<ide> @Test
<ide> public void testSplitOnOh() {
<ide> }
<ide>
<ide> public void testSplit(String str, String regex, int limit, String... parts) {
<del> testSplit(str, regex, 0, Observable.from(str), parts);
<add> testSplit(str, regex, 0, Observable.just(str), parts);
<ide> for (int i = 0; i < str.length(); i++) {
<ide> String a = str.substring(0, i);
<ide> String b = str.substring(i, str.length());
<del> testSplit(a + "|" + b, regex, limit, Observable.from(a, b), parts);
<add> testSplit(a + "|" + b, regex, limit, Observable.just(a, b), parts);
<ide> }
<ide> }
<ide>
<ide> public void testJoinMixed() {
<ide>
<ide> @Test
<ide> public void testJoinWithEmptyString() {
<del> Observable<String> source = Observable.from("", "b", "c");
<add> Observable<String> source = Observable.just("", "b", "c");
<ide>
<ide> Observable<String> result = join(source, ", ");
<ide>
<ide> public void testJoinWithEmptyString() {
<ide>
<ide> @Test
<ide> public void testJoinWithNull() {
<del> Observable<String> source = Observable.from("a", null, "c");
<add> Observable<String> source = Observable.just("a", null, "c");
<ide>
<ide> Observable<String> result = join(source, ", ");
<ide>
<ide> public void testJoinWithNull() {
<ide>
<ide> @Test
<ide> public void testJoinSingle() {
<del> Observable<String> source = Observable.from("a");
<add> Observable<String> source = Observable.just("a");
<ide>
<ide> Observable<String> result = join(source, ", ");
<ide>
<ide> public void testJoinThrows() {
<ide> @Test
<ide> public void testFromInputStream() {
<ide> final byte[] inBytes = "test".getBytes();
<del> final byte[] outBytes = from(new ByteArrayInputStream(inBytes)).toBlockingObservable().single();
<add> final byte[] outBytes = from(new ByteArrayInputStream(inBytes)).toBlocking().single();
<ide> assertNotSame(inBytes, outBytes);
<ide> assertArrayEquals(inBytes, outBytes);
<ide> }
<ide> public synchronized int read(byte[] b, int off, int len) {
<ide> return super.read(b, off, len);
<ide> }
<ide> };
<del> StringObservable.from(is).first().toBlockingObservable().single();
<add> StringObservable.from(is).first().toBlocking().single();
<ide> assertEquals(1, numReads.get());
<ide> }
<ide>
<ide> @Test
<ide> public void testFromReader() {
<ide> final String inStr = "test";
<del> final String outStr = from(new StringReader(inStr)).toBlockingObservable().single();
<add> final String outStr = from(new StringReader(inStr)).toBlocking().single();
<ide> assertNotSame(inStr, outStr);
<ide> assertEquals(inStr, outStr);
<ide> }
<ide> public void testByLine() {
<ide> String newLine = System.getProperty("line.separator");
<ide>
<ide> List<Line> lines = byLine(Observable.from(Arrays.asList("qwer", newLine + "asdf" + newLine, "zx", "cv")))
<del> .toList().toBlockingObservable().single();
<add> .toList().toBlocking().single();
<ide>
<ide> assertEquals(Arrays.asList(new Line(0, "qwer"), new Line(1, "asdf"), new Line(2, "zxcv")), lines);
<ide> } | 2 |
Ruby | Ruby | lock connection before checking it in | ce2abffc0e29f562da26f89d7f07aeaf06a9cbcd | <ide><path>activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
<ide> def checkout(checkout_timeout = @checkout_timeout)
<ide> # +conn+: an AbstractAdapter object, which was obtained by earlier by
<ide> # calling #checkout on this pool.
<ide> def checkin(conn)
<del> synchronize do
<del> remove_connection_from_thread_cache conn
<add> conn.lock.synchronize do
<add> synchronize do
<add> remove_connection_from_thread_cache conn
<ide>
<del> conn._run_checkin_callbacks do
<del> conn.expire
<del> end
<add> conn._run_checkin_callbacks do
<add> conn.expire
<add> end
<ide>
<del> @available.add conn
<add> @available.add conn
<add> end
<ide> end
<ide> end
<ide> | 1 |
Javascript | Javascript | use symbol instead of sentinel terminology | bcd1732a98a07df6d49cbdb1cb5ea4aedef15aed | <ide><path>src/CollectionImpl.js
<ide> import {
<ide> isIndexed,
<ide> isAssociative,
<ide> isOrdered,
<del> IS_ITERABLE_SENTINEL,
<del> IS_KEYED_SENTINEL,
<del> IS_INDEXED_SENTINEL,
<del> IS_ORDERED_SENTINEL,
<add> IS_COLLECTION_SYMBOL,
<add> IS_KEYED_SYMBOL,
<add> IS_INDEXED_SYMBOL,
<add> IS_ORDERED_SYMBOL,
<ide> } from './Predicates';
<ide>
<ide> import { is } from './is';
<ide> mixin(Collection, {
<ide> });
<ide>
<ide> const CollectionPrototype = Collection.prototype;
<del>CollectionPrototype[IS_ITERABLE_SENTINEL] = true;
<add>CollectionPrototype[IS_COLLECTION_SYMBOL] = true;
<ide> CollectionPrototype[ITERATOR_SYMBOL] = CollectionPrototype.values;
<ide> CollectionPrototype.toJSON = CollectionPrototype.toArray;
<ide> CollectionPrototype.__toStringMapper = quoteString;
<ide> mixin(KeyedCollection, {
<ide> });
<ide>
<ide> const KeyedCollectionPrototype = KeyedCollection.prototype;
<del>KeyedCollectionPrototype[IS_KEYED_SENTINEL] = true;
<add>KeyedCollectionPrototype[IS_KEYED_SYMBOL] = true;
<ide> KeyedCollectionPrototype[ITERATOR_SYMBOL] = CollectionPrototype.entries;
<ide> KeyedCollectionPrototype.toJSON = toObject;
<ide> KeyedCollectionPrototype.__toStringMapper = (v, k) =>
<ide> mixin(IndexedCollection, {
<ide> });
<ide>
<ide> const IndexedCollectionPrototype = IndexedCollection.prototype;
<del>IndexedCollectionPrototype[IS_INDEXED_SENTINEL] = true;
<del>IndexedCollectionPrototype[IS_ORDERED_SENTINEL] = true;
<add>IndexedCollectionPrototype[IS_INDEXED_SYMBOL] = true;
<add>IndexedCollectionPrototype[IS_ORDERED_SYMBOL] = true;
<ide>
<ide> mixin(SetCollection, {
<ide> // ### ES6 Collection methods (ES6 Array and Map)
<ide><path>src/List.js
<ide> export class List extends IndexedCollection {
<ide> }
<ide>
<ide> export function isList(maybeList) {
<del> return !!(maybeList && maybeList[IS_LIST_SENTINEL]);
<add> return !!(maybeList && maybeList[IS_LIST_SYMBOL]);
<ide> }
<ide>
<ide> List.isList = isList;
<ide>
<del>const IS_LIST_SENTINEL = '';
<add>const IS_LIST_SYMBOL = '';
<ide>
<ide> export const ListPrototype = List.prototype;
<del>ListPrototype[IS_LIST_SENTINEL] = true;
<add>ListPrototype[IS_LIST_SYMBOL] = true;
<ide> ListPrototype[DELETE] = ListPrototype.remove;
<ide> ListPrototype.merge = ListPrototype.concat;
<ide> ListPrototype.setIn = setIn;
<ide><path>src/Map.js
<ide> export class Map extends KeyedCollection {
<ide> }
<ide>
<ide> export function isMap(maybeMap) {
<del> return !!(maybeMap && maybeMap[IS_MAP_SENTINEL]);
<add> return !!(maybeMap && maybeMap[IS_MAP_SYMBOL]);
<ide> }
<ide>
<ide> Map.isMap = isMap;
<ide>
<del>const IS_MAP_SENTINEL = '';
<add>const IS_MAP_SYMBOL = '';
<ide>
<ide> export const MapPrototype = Map.prototype;
<del>MapPrototype[IS_MAP_SENTINEL] = true;
<add>MapPrototype[IS_MAP_SYMBOL] = true;
<ide> MapPrototype[DELETE] = MapPrototype.remove;
<ide> MapPrototype.removeAll = MapPrototype.deleteAll;
<ide> MapPrototype.setIn = setIn;
<ide><path>src/Operations.js
<ide> import {
<ide> isKeyed,
<ide> isIndexed,
<ide> isOrdered,
<del> IS_ORDERED_SENTINEL,
<add> IS_ORDERED_SYMBOL,
<ide> } from './Predicates';
<ide> import {
<ide> getIterator,
<ide> export class ToKeyedSequence extends KeyedSeq {
<ide> return this._iter.__iterator(type, reverse);
<ide> }
<ide> }
<del>ToKeyedSequence.prototype[IS_ORDERED_SENTINEL] = true;
<add>ToKeyedSequence.prototype[IS_ORDERED_SYMBOL] = true;
<ide>
<ide> export class ToIndexedSequence extends IndexedSeq {
<ide> constructor(iter) {
<ide><path>src/OrderedMap.js
<ide> */
<ide>
<ide> import { KeyedCollection } from './Collection';
<del>import { IS_ORDERED_SENTINEL, isOrdered } from './Predicates';
<add>import { IS_ORDERED_SYMBOL, isOrdered } from './Predicates';
<ide> import { Map, isMap, emptyMap } from './Map';
<ide> import { emptyList } from './List';
<ide> import { DELETE, NOT_SET, SIZE } from './TrieUtils';
<ide> function isOrderedMap(maybeOrderedMap) {
<ide>
<ide> OrderedMap.isOrderedMap = isOrderedMap;
<ide>
<del>OrderedMap.prototype[IS_ORDERED_SENTINEL] = true;
<add>OrderedMap.prototype[IS_ORDERED_SYMBOL] = true;
<ide> OrderedMap.prototype[DELETE] = OrderedMap.prototype.remove;
<ide>
<ide> function makeOrderedMap(map, list, ownerID, hash) {
<ide><path>src/OrderedSet.js
<ide> */
<ide>
<ide> import { SetCollection, KeyedCollection } from './Collection';
<del>import { IS_ORDERED_SENTINEL, isOrdered } from './Predicates';
<add>import { IS_ORDERED_SYMBOL, isOrdered } from './Predicates';
<ide> import { IndexedCollectionPrototype } from './CollectionImpl';
<ide> import { Set, isSet } from './Set';
<ide> import { emptyOrderedMap } from './OrderedMap';
<ide> function isOrderedSet(maybeOrderedSet) {
<ide> OrderedSet.isOrderedSet = isOrderedSet;
<ide>
<ide> const OrderedSetPrototype = OrderedSet.prototype;
<del>OrderedSetPrototype[IS_ORDERED_SENTINEL] = true;
<add>OrderedSetPrototype[IS_ORDERED_SYMBOL] = true;
<ide> OrderedSetPrototype.zip = IndexedCollectionPrototype.zip;
<ide> OrderedSetPrototype.zipWith = IndexedCollectionPrototype.zipWith;
<ide>
<ide><path>src/Predicates.js
<ide> export function isImmutable(maybeImmutable) {
<ide> }
<ide>
<ide> export function isCollection(maybeCollection) {
<del> return !!(maybeCollection && maybeCollection[IS_ITERABLE_SENTINEL]);
<add> return !!(maybeCollection && maybeCollection[IS_COLLECTION_SYMBOL]);
<ide> }
<ide>
<ide> export function isKeyed(maybeKeyed) {
<del> return !!(maybeKeyed && maybeKeyed[IS_KEYED_SENTINEL]);
<add> return !!(maybeKeyed && maybeKeyed[IS_KEYED_SYMBOL]);
<ide> }
<ide>
<ide> export function isIndexed(maybeIndexed) {
<del> return !!(maybeIndexed && maybeIndexed[IS_INDEXED_SENTINEL]);
<add> return !!(maybeIndexed && maybeIndexed[IS_INDEXED_SYMBOL]);
<ide> }
<ide>
<ide> export function isAssociative(maybeAssociative) {
<ide> return isKeyed(maybeAssociative) || isIndexed(maybeAssociative);
<ide> }
<ide>
<ide> export function isOrdered(maybeOrdered) {
<del> return !!(maybeOrdered && maybeOrdered[IS_ORDERED_SENTINEL]);
<add> return !!(maybeOrdered && maybeOrdered[IS_ORDERED_SYMBOL]);
<ide> }
<ide>
<ide> export function isRecord(maybeRecord) {
<del> return !!(maybeRecord && maybeRecord[IS_RECORD_SENTINEL]);
<add> return !!(maybeRecord && maybeRecord[IS_RECORD_SYMBOL]);
<ide> }
<ide>
<ide> export function isValueObject(maybeValue) {
<ide> export function isValueObject(maybeValue) {
<ide> );
<ide> }
<ide>
<del>export const IS_ITERABLE_SENTINEL = '';
<del>export const IS_KEYED_SENTINEL = '';
<del>export const IS_INDEXED_SENTINEL = '';
<del>export const IS_ORDERED_SENTINEL = '';
<del>export const IS_RECORD_SENTINEL = '';
<add>// Note: values unchanged to preserve immutable-devtools.
<add>export const IS_COLLECTION_SYMBOL = '';
<add>export const IS_KEYED_SYMBOL = '';
<add>export const IS_INDEXED_SYMBOL = '';
<add>export const IS_ORDERED_SYMBOL = '';
<add>export const IS_RECORD_SYMBOL = '';
<ide><path>src/Record.js
<ide> import { KeyedCollection } from './Collection';
<ide> import { keyedSeqFromValue } from './Seq';
<ide> import { List } from './List';
<ide> import { ITERATE_ENTRIES, ITERATOR_SYMBOL } from './Iterator';
<del>import { isRecord, IS_RECORD_SENTINEL } from './Predicates';
<add>import { isRecord, IS_RECORD_SYMBOL } from './Predicates';
<ide> import { CollectionPrototype } from './CollectionImpl';
<ide> import { DELETE } from './TrieUtils';
<ide> import { getIn } from './methods/getIn';
<ide> export class Record {
<ide> Record.isRecord = isRecord;
<ide> Record.getDescriptiveName = recordName;
<ide> const RecordPrototype = Record.prototype;
<del>RecordPrototype[IS_RECORD_SENTINEL] = true;
<add>RecordPrototype[IS_RECORD_SYMBOL] = true;
<ide> RecordPrototype[DELETE] = RecordPrototype.remove;
<ide> RecordPrototype.deleteIn = RecordPrototype.removeIn = deleteIn;
<ide> RecordPrototype.getIn = getIn;
<ide><path>src/Seq.js
<ide> import {
<ide> isKeyed,
<ide> isAssociative,
<ide> isRecord,
<del> IS_ORDERED_SENTINEL,
<add> IS_ORDERED_SYMBOL,
<ide> } from './Predicates';
<ide> import {
<ide> Iterator,
<ide> Seq.Keyed = KeyedSeq;
<ide> Seq.Set = SetSeq;
<ide> Seq.Indexed = IndexedSeq;
<ide>
<del>const IS_SEQ_SENTINEL = '';
<add>const IS_SEQ_SYMBOL = '';
<ide>
<del>Seq.prototype[IS_SEQ_SENTINEL] = true;
<add>Seq.prototype[IS_SEQ_SYMBOL] = true;
<ide>
<ide> // #pragma Root Sequences
<ide>
<ide> class ObjectSeq extends KeyedSeq {
<ide> });
<ide> }
<ide> }
<del>ObjectSeq.prototype[IS_ORDERED_SENTINEL] = true;
<add>ObjectSeq.prototype[IS_ORDERED_SYMBOL] = true;
<ide>
<ide> class CollectionSeq extends IndexedSeq {
<ide> constructor(collection) {
<ide> class CollectionSeq extends IndexedSeq {
<ide> // # pragma Helper functions
<ide>
<ide> export function isSeq(maybeSeq) {
<del> return !!(maybeSeq && maybeSeq[IS_SEQ_SENTINEL]);
<add> return !!(maybeSeq && maybeSeq[IS_SEQ_SYMBOL]);
<ide> }
<ide>
<ide> let EMPTY_SEQ;
<ide><path>src/Set.js
<ide> export class Set extends SetCollection {
<ide> }
<ide>
<ide> export function isSet(maybeSet) {
<del> return !!(maybeSet && maybeSet[IS_SET_SENTINEL]);
<add> return !!(maybeSet && maybeSet[IS_SET_SYMBOL]);
<ide> }
<ide>
<ide> Set.isSet = isSet;
<ide>
<del>const IS_SET_SENTINEL = '';
<add>const IS_SET_SYMBOL = '';
<ide>
<ide> const SetPrototype = Set.prototype;
<del>SetPrototype[IS_SET_SENTINEL] = true;
<add>SetPrototype[IS_SET_SYMBOL] = true;
<ide> SetPrototype[DELETE] = SetPrototype.remove;
<ide> SetPrototype.merge = SetPrototype.concat = SetPrototype.union;
<ide> SetPrototype.withMutations = withMutations;
<ide><path>src/Stack.js
<ide> export class Stack extends IndexedCollection {
<ide> }
<ide>
<ide> function isStack(maybeStack) {
<del> return !!(maybeStack && maybeStack[IS_STACK_SENTINEL]);
<add> return !!(maybeStack && maybeStack[IS_STACK_SYMBOL]);
<ide> }
<ide>
<ide> Stack.isStack = isStack;
<ide>
<del>const IS_STACK_SENTINEL = '';
<add>const IS_STACK_SYMBOL = '';
<ide>
<ide> const StackPrototype = Stack.prototype;
<del>StackPrototype[IS_STACK_SENTINEL] = true;
<add>StackPrototype[IS_STACK_SYMBOL] = true;
<ide> StackPrototype.shift = StackPrototype.pop;
<ide> StackPrototype.unshift = StackPrototype.push;
<ide> StackPrototype.unshiftAll = StackPrototype.pushAll; | 11 |
Text | Text | remove wrong default value in buffer.md | 0cd8359652b39cdb577ac3c67bdea03e6aba9f97 | <ide><path>doc/api/buffer.md
<ide> changes:
<ide> -->
<ide>
<ide> * `value` {integer} Number to be written to `buf`.
<del>* `offset` {integer} Number of bytes to skip before starting to write. Must satisfy: `0 <= offset <= buf.length - byteLength`.
<del>* `byteLength` {integer} Number of bytes to write. Must satisfy: `0 < byteLength <= 6`.
<del> **Default:** `false`.
<add>* `offset` {integer} Number of bytes to skip before starting to write.
<add> Must satisfy: `0 <= offset <= buf.length - byteLength`.
<add>* `byteLength` {integer} Number of bytes to write. Must satisfy:
<add> `0 < byteLength <= 6`.
<ide> * Returns: {integer} `offset` plus the number of bytes written.
<ide>
<ide> Writes `byteLength` bytes of `value` to `buf` at the specified `offset`. | 1 |
Java | Java | update nativemodule specs | 5ffabca0549c78048f35bb726e5e9b12f4cbe8bf | <ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeAccessibilityInfoSpec.java
<ide> public NativeAccessibilityInfoSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void isTouchExplorationEnabled(Callback onSuccess);
<add> public abstract void announceForAccessibility(String announcement);
<ide>
<ide> @ReactMethod
<del> public abstract void setAccessibilityFocus(double reactTag);
<add> public abstract void isReduceMotionEnabled(Callback onSuccess);
<ide>
<ide> @ReactMethod
<del> public abstract void isReduceMotionEnabled(Callback onSuccess);
<add> public abstract void isTouchExplorationEnabled(Callback onSuccess);
<ide>
<ide> @ReactMethod
<del> public abstract void announceForAccessibility(String announcement);
<add> public abstract void setAccessibilityFocus(double reactTag);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeAccessibilityManagerSpec.java
<ide> public NativeAccessibilityManagerSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void getCurrentReduceMotionState(Callback onSuccess, Callback onError);
<del>
<del> @ReactMethod
<del> public abstract void getCurrentReduceTransparencyState(Callback onSuccess, Callback onError);
<add> public abstract void announceForAccessibility(String announcement);
<ide>
<ide> @ReactMethod
<ide> public abstract void getCurrentBoldTextState(Callback onSuccess, Callback onError);
<ide> public NativeAccessibilityManagerSpec(ReactApplicationContext reactContext) {
<ide> public abstract void getCurrentInvertColorsState(Callback onSuccess, Callback onError);
<ide>
<ide> @ReactMethod
<del> public abstract void setAccessibilityFocus(double reactTag);
<add> public abstract void getCurrentReduceMotionState(Callback onSuccess, Callback onError);
<ide>
<ide> @ReactMethod
<del> public abstract void getCurrentVoiceOverState(Callback onSuccess, Callback onError);
<add> public abstract void getCurrentReduceTransparencyState(Callback onSuccess, Callback onError);
<ide>
<ide> @ReactMethod
<del> public abstract void announceForAccessibility(String announcement);
<add> public abstract void getCurrentVoiceOverState(Callback onSuccess, Callback onError);
<ide>
<ide> @ReactMethod
<ide> public abstract void setAccessibilityContentSizeMultipliers(ReadableMap JSMultipliers);
<add>
<add> @ReactMethod
<add> public abstract void setAccessibilityFocus(double reactTag);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeAnimatedModuleSpec.java
<ide> public NativeAnimatedModuleSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void connectAnimatedNodes(double parentTag, double childTag);
<add> public abstract void addAnimatedEventToView(double viewTag, String eventName,
<add> ReadableMap eventMapping);
<ide>
<ide> @ReactMethod
<del> public abstract void dropAnimatedNode(double tag);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<del> public abstract void stopAnimation(double animationId);
<add> public abstract void connectAnimatedNodeToView(double nodeTag, double viewTag);
<ide>
<ide> @ReactMethod
<del> public abstract void disconnectAnimatedNodeFromView(double nodeTag, double viewTag);
<add> public abstract void connectAnimatedNodes(double parentTag, double childTag);
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void createAnimatedNode(double tag, ReadableMap config);
<ide>
<ide> @ReactMethod
<del> public abstract void flattenAnimatedNodeOffset(double nodeTag);
<add> public abstract void disconnectAnimatedNodeFromView(double nodeTag, double viewTag);
<ide>
<ide> @ReactMethod
<del> public abstract void removeAnimatedEventFromView(double viewTag, String eventName,
<del> double animatedNodeTag);
<add> public abstract void disconnectAnimatedNodes(double parentTag, double childTag);
<ide>
<ide> @ReactMethod
<del> public abstract void disconnectAnimatedNodes(double parentTag, double childTag);
<add> public abstract void dropAnimatedNode(double tag);
<ide>
<ide> @ReactMethod
<ide> public abstract void extractAnimatedNodeOffset(double nodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void setAnimatedNodeValue(double nodeTag, double value);
<add> public abstract void finishOperationBatch();
<ide>
<ide> @ReactMethod
<del> public abstract void startOperationBatch();
<add> public abstract void flattenAnimatedNodeOffset(double nodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void setAnimatedNodeOffset(double nodeTag, double offset);
<add> public abstract void getValue(double tag, Callback saveValueCallback);
<ide>
<ide> @ReactMethod
<del> public abstract void startAnimatingNode(double animationId, double nodeTag, ReadableMap config,
<del> Callback endCallback);
<add> public abstract void removeAnimatedEventFromView(double viewTag, String eventName,
<add> double animatedNodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void restoreDefaultValues(double nodeTag);
<add> public abstract void removeListeners(double count);
<ide>
<ide> @ReactMethod
<del> public abstract void getValue(double tag, Callback saveValueCallback);
<add> public abstract void restoreDefaultValues(double nodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void stopListeningToAnimatedNodeValue(double tag);
<add> public abstract void setAnimatedNodeOffset(double nodeTag, double offset);
<ide>
<ide> @ReactMethod
<del> public abstract void addAnimatedEventToView(double viewTag, String eventName,
<del> ReadableMap eventMapping);
<add> public abstract void setAnimatedNodeValue(double nodeTag, double value);
<ide>
<ide> @ReactMethod
<del> public abstract void createAnimatedNode(double tag, ReadableMap config);
<add> public abstract void startAnimatingNode(double animationId, double nodeTag, ReadableMap config,
<add> Callback endCallback);
<ide>
<ide> @ReactMethod
<del> public abstract void finishOperationBatch();
<add> public abstract void startListeningToAnimatedNodeValue(double tag);
<ide>
<ide> @ReactMethod
<del> public abstract void startListeningToAnimatedNodeValue(double tag);
<add> public abstract void startOperationBatch();
<ide>
<ide> @ReactMethod
<del> public abstract void connectAnimatedNodeToView(double nodeTag, double viewTag);
<add> public abstract void stopAnimation(double animationId);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void stopListeningToAnimatedNodeValue(double tag);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeAnimatedTurboModuleSpec.java
<ide> public NativeAnimatedTurboModuleSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void connectAnimatedNodes(double parentTag, double childTag);
<add> public abstract void addAnimatedEventToView(double viewTag, String eventName,
<add> ReadableMap eventMapping);
<ide>
<ide> @ReactMethod
<del> public abstract void dropAnimatedNode(double tag);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<del> public abstract void stopAnimation(double animationId);
<add> public abstract void connectAnimatedNodeToView(double nodeTag, double viewTag);
<ide>
<ide> @ReactMethod
<del> public abstract void disconnectAnimatedNodeFromView(double nodeTag, double viewTag);
<add> public abstract void connectAnimatedNodes(double parentTag, double childTag);
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void createAnimatedNode(double tag, ReadableMap config);
<ide>
<ide> @ReactMethod
<del> public abstract void flattenAnimatedNodeOffset(double nodeTag);
<add> public abstract void disconnectAnimatedNodeFromView(double nodeTag, double viewTag);
<ide>
<ide> @ReactMethod
<del> public abstract void removeAnimatedEventFromView(double viewTag, String eventName,
<del> double animatedNodeTag);
<add> public abstract void disconnectAnimatedNodes(double parentTag, double childTag);
<ide>
<ide> @ReactMethod
<del> public abstract void disconnectAnimatedNodes(double parentTag, double childTag);
<add> public abstract void dropAnimatedNode(double tag);
<ide>
<ide> @ReactMethod
<ide> public abstract void extractAnimatedNodeOffset(double nodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void setAnimatedNodeValue(double nodeTag, double value);
<add> public abstract void finishOperationBatch();
<ide>
<ide> @ReactMethod
<del> public abstract void startOperationBatch();
<add> public abstract void flattenAnimatedNodeOffset(double nodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void setAnimatedNodeOffset(double nodeTag, double offset);
<add> public abstract void getValue(double tag, Callback saveValueCallback);
<ide>
<ide> @ReactMethod
<del> public abstract void startAnimatingNode(double animationId, double nodeTag, ReadableMap config,
<del> Callback endCallback);
<add> public abstract void removeAnimatedEventFromView(double viewTag, String eventName,
<add> double animatedNodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void restoreDefaultValues(double nodeTag);
<add> public abstract void removeListeners(double count);
<ide>
<ide> @ReactMethod
<del> public abstract void getValue(double tag, Callback saveValueCallback);
<add> public abstract void restoreDefaultValues(double nodeTag);
<ide>
<ide> @ReactMethod
<del> public abstract void stopListeningToAnimatedNodeValue(double tag);
<add> public abstract void setAnimatedNodeOffset(double nodeTag, double offset);
<ide>
<ide> @ReactMethod
<del> public abstract void addAnimatedEventToView(double viewTag, String eventName,
<del> ReadableMap eventMapping);
<add> public abstract void setAnimatedNodeValue(double nodeTag, double value);
<ide>
<ide> @ReactMethod
<del> public abstract void createAnimatedNode(double tag, ReadableMap config);
<add> public abstract void startAnimatingNode(double animationId, double nodeTag, ReadableMap config,
<add> Callback endCallback);
<ide>
<ide> @ReactMethod
<del> public abstract void finishOperationBatch();
<add> public abstract void startListeningToAnimatedNodeValue(double tag);
<ide>
<ide> @ReactMethod
<del> public abstract void startListeningToAnimatedNodeValue(double tag);
<add> public abstract void startOperationBatch();
<ide>
<ide> @ReactMethod
<del> public abstract void connectAnimatedNodeToView(double nodeTag, double viewTag);
<add> public abstract void stopAnimation(double animationId);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void stopListeningToAnimatedNodeValue(double tag);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeAppStateSpec.java
<ide> public NativeAppStateSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<ide> public abstract void getCurrentAppState(Callback success, Callback error);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void removeListeners(double count);
<ide>
<ide> protected abstract Map<String, Object> getTypedExportedConstants();
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeAppearanceSpec.java
<ide> public NativeAppearanceSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod(
<ide> isBlockingSynchronousMethod = true
<ide> )
<ide> public abstract @Nullable String getColorScheme();
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void removeListeners(double count);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeAsyncStorageSpec.java
<ide> public NativeAsyncStorageSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void multiSet(ReadableArray kvPairs, Callback callback);
<add> public abstract void clear(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void multiGet(ReadableArray keys, Callback callback);
<add> public abstract void getAllKeys(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void getAllKeys(Callback callback);
<add> public abstract void multiGet(ReadableArray keys, Callback callback);
<ide>
<ide> @ReactMethod
<ide> public abstract void multiMerge(ReadableArray kvPairs, Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void clear(Callback callback);
<add> public abstract void multiRemove(ReadableArray keys, Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void multiRemove(ReadableArray keys, Callback callback);
<add> public abstract void multiSet(ReadableArray kvPairs, Callback callback);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeBlobModuleSpec.java
<ide> public NativeBlobModuleSpec(ReactApplicationContext reactContext) {
<ide> super(reactContext);
<ide> }
<ide>
<del> @ReactMethod
<del> public abstract void sendOverSocket(ReadableMap blob, double socketID);
<del>
<ide> @ReactMethod
<ide> public abstract void addNetworkingHandler();
<ide>
<ide> public NativeBlobModuleSpec(ReactApplicationContext reactContext) {
<ide> @ReactMethod
<ide> public abstract void removeWebSocketHandler(double id);
<ide>
<add> @ReactMethod
<add> public abstract void sendOverSocket(ReadableMap blob, double socketID);
<add>
<ide> protected abstract Map<String, Object> getTypedExportedConstants();
<ide>
<ide> @Override
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeBugReportingSpec.java
<ide> public NativeBugReportingSpec(ReactApplicationContext reactContext) {
<ide> super(reactContext);
<ide> }
<ide>
<del> @ReactMethod
<del> public abstract void startReportAProblemFlow();
<del>
<ide> @ReactMethod
<ide> public abstract void setCategoryID(String categoryID);
<ide>
<ide> @ReactMethod
<ide> public abstract void setExtraData(ReadableMap extraData, ReadableMap extraFiles);
<add>
<add> @ReactMethod
<add> public abstract void startReportAProblemFlow();
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeClipboardSpec.java
<ide> public NativeClipboardSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void setString(String content);
<add> public abstract void getString(Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void getString(Promise promise);
<add> public abstract void setString(String content);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeDevMenuSpec.java
<ide> public NativeDevMenuSpec(ReactApplicationContext reactContext) {
<ide> super(reactContext);
<ide> }
<ide>
<add> @ReactMethod
<add> public abstract void debugRemotely(boolean enableDebug);
<add>
<ide> @ReactMethod
<ide> public abstract void reload();
<ide>
<ide> @ReactMethod
<del> public abstract void debugRemotely(boolean enableDebug);
<add> public abstract void setHotLoadingEnabled(boolean enabled);
<ide>
<ide> @ReactMethod
<ide> public abstract void setProfilingEnabled(boolean enabled);
<ide>
<ide> @ReactMethod
<ide> public abstract void show();
<del>
<del> @ReactMethod
<del> public abstract void setHotLoadingEnabled(boolean enabled);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeDevSettingsSpec.java
<ide> public NativeDevSettingsSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void setIsDebuggingRemotely(boolean isDebuggingRemotelyEnabled);
<add> public abstract void addListener(String eventName);
<add>
<add> @ReactMethod
<add> public abstract void addMenuItem(String title);
<ide>
<ide> @ReactMethod
<ide> public void onFastRefresh() {
<ide> public void onFastRefresh() {
<ide> public abstract void reload();
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<del>
<del> @ReactMethod
<del> public abstract void setProfilingEnabled(boolean isProfilingEnabled);
<add> public void reloadWithReason(String reason) {
<add> }
<ide>
<ide> @ReactMethod
<del> public abstract void addMenuItem(String title);
<add> public abstract void removeListeners(double count);
<ide>
<ide> @ReactMethod
<ide> public abstract void setHotLoadingEnabled(boolean isHotLoadingEnabled);
<ide>
<ide> @ReactMethod
<del> public abstract void toggleElementInspector();
<add> public abstract void setIsDebuggingRemotely(boolean isDebuggingRemotelyEnabled);
<ide>
<ide> @ReactMethod
<del> public void reloadWithReason(String reason) {
<del> }
<add> public abstract void setIsShakeToShowDevMenuEnabled(boolean enabled);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void setProfilingEnabled(boolean isProfilingEnabled);
<ide>
<ide> @ReactMethod
<del> public abstract void setIsShakeToShowDevMenuEnabled(boolean enabled);
<add> public abstract void toggleElementInspector();
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeExceptionsManagerSpec.java
<ide> public NativeExceptionsManagerSpec(ReactApplicationContext reactContext) {
<ide> public void dismissRedbox() {
<ide> }
<ide>
<del> @ReactMethod
<del> public abstract void reportFatalException(String message, ReadableArray stack,
<del> double exceptionId);
<del>
<ide> @ReactMethod
<ide> public void reportException(ReadableMap data) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void updateExceptionMessage(String message, ReadableArray stack,
<add> public abstract void reportFatalException(String message, ReadableArray stack,
<ide> double exceptionId);
<ide>
<ide> @ReactMethod
<ide> public abstract void reportSoftException(String message, ReadableArray stack, double exceptionId);
<add>
<add> @ReactMethod
<add> public abstract void updateExceptionMessage(String message, ReadableArray stack,
<add> double exceptionId);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeFrameRateLoggerSpec.java
<ide> public NativeFrameRateLoggerSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void endScroll();
<add> public abstract void beginScroll();
<ide>
<ide> @ReactMethod
<del> public abstract void beginScroll();
<add> public abstract void endScroll();
<ide>
<ide> @ReactMethod
<ide> public abstract void setContext(String context);
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeImageLoaderAndroidSpec.java
<ide> public NativeImageLoaderAndroidSpec(ReactApplicationContext reactContext) {
<ide> public abstract void getSize(String uri, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void prefetchImage(String uri, double requestId, Promise promise);
<add> public abstract void getSizeWithHeaders(String uri, ReadableMap headers, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void queryCache(ReadableArray uris, Promise promise);
<add> public abstract void prefetchImage(String uri, double requestId, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void getSizeWithHeaders(String uri, ReadableMap headers, Promise promise);
<add> public abstract void queryCache(ReadableArray uris, Promise promise);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeImageLoaderIOSSpec.java
<ide> public NativeImageLoaderIOSSpec(ReactApplicationContext reactContext) {
<ide> public abstract void getSize(String uri, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void prefetchImage(String uri, Promise promise);
<add> public abstract void getSizeWithHeaders(String uri, ReadableMap headers, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void queryCache(ReadableArray uris, Promise promise);
<add> public abstract void prefetchImage(String uri, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void getSizeWithHeaders(String uri, ReadableMap headers, Promise promise);
<add> public abstract void queryCache(ReadableArray uris, Promise promise);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeImagePickerIOSSpec.java
<ide> public NativeImagePickerIOSSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void openCameraDialog(ReadableMap config, Callback successCallback,
<del> Callback cancelCallback);
<add> public abstract void canRecordVideos(Callback callback);
<ide>
<ide> @ReactMethod
<ide> public abstract void canUseCamera(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void openSelectDialog(ReadableMap config, Callback successCallback,
<del> Callback cancelCallback);
<add> public abstract void clearAllPendingVideos();
<ide>
<ide> @ReactMethod
<del> public abstract void canRecordVideos(Callback callback);
<add> public abstract void openCameraDialog(ReadableMap config, Callback successCallback,
<add> Callback cancelCallback);
<ide>
<ide> @ReactMethod
<del> public abstract void clearAllPendingVideos();
<add> public abstract void openSelectDialog(ReadableMap config, Callback successCallback,
<add> Callback cancelCallback);
<ide>
<ide> @ReactMethod
<ide> public abstract void removePendingVideo(String url);
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeImageStoreSpec.java
<ide> public NativeImageStoreSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void getBase64ForTag(String uri, Callback successCallback,
<add> public abstract void addImageFromBase64(String base64ImageData, Callback successCallback,
<ide> Callback errorCallback);
<ide>
<ide> @ReactMethod
<del> public abstract void removeImageForTag(String uri);
<del>
<del> @ReactMethod
<del> public abstract void addImageFromBase64(String base64ImageData, Callback successCallback,
<add> public abstract void getBase64ForTag(String uri, Callback successCallback,
<ide> Callback errorCallback);
<ide>
<ide> @ReactMethod
<ide> public abstract void hasImageForTag(String uri, Callback callback);
<add>
<add> @ReactMethod
<add> public abstract void removeImageForTag(String uri);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeKeyboardObserverSpec.java
<ide> public NativeKeyboardObserverSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void removeListeners(double count);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeLinkingSpec.java
<ide> public NativeLinkingSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void openURL(String url, Promise promise);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<del> public abstract void sendIntent(String action, ReadableArray extras, Promise promise);
<add> public abstract void canOpenURL(String url, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void getInitialURL(Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void canOpenURL(String url, Promise promise);
<add> public abstract void openSettings(Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void getInitialURL(Promise promise);
<add> public abstract void openURL(String url, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void openSettings(Promise promise);
<add> public abstract void removeListeners(double count);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void sendIntent(String action, ReadableArray extras, Promise promise);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeModalManagerSpec.java
<ide> public NativeModalManagerSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void removeListeners(double count);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeNetworkingAndroidSpec.java
<ide> public NativeNetworkingAndroidSpec(ReactApplicationContext reactContext) {
<ide> public abstract void abortRequest(double requestId);
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<ide> public abstract void clearCookies(Callback callback);
<ide>
<add> @ReactMethod
<add> public abstract void removeListeners(double count);
<add>
<ide> @ReactMethod
<ide> public abstract void sendRequest(String method, String url, double requestId,
<ide> ReadableArray headers, ReadableMap data, String responseType, boolean useIncrementalUpdates,
<ide> double timeout, boolean withCredentials);
<del>
<del> @ReactMethod
<del> public abstract void addListener(String eventName);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeNetworkingIOSSpec.java
<ide> public NativeNetworkingIOSSpec(ReactApplicationContext reactContext) {
<ide> public abstract void abortRequest(double requestId);
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<ide> public abstract void clearCookies(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void sendRequest(ReadableMap query, Callback callback);
<add> public abstract void removeListeners(double count);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void sendRequest(ReadableMap query, Callback callback);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativePermissionsAndroidSpec.java
<ide> public NativePermissionsAndroidSpec(ReactApplicationContext reactContext) {
<ide> public abstract void checkPermission(String permission, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void requestPermission(String permission, Promise promise);
<add> public abstract void requestMultiplePermissions(ReadableArray permissions, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void shouldShowRequestPermissionRationale(String permission, Promise promise);
<add> public abstract void requestPermission(String permission, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void requestMultiplePermissions(ReadableArray permissions, Promise promise);
<add> public abstract void shouldShowRequestPermissionRationale(String permission, Promise promise);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativePushNotificationManagerIOSSpec.java
<ide> public NativePushNotificationManagerIOSSpec(ReactApplicationContext reactContext
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void getInitialNotification(Promise promise);
<add> public abstract void abandonPermissions();
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventType);
<ide>
<ide> @ReactMethod
<del> public abstract void setApplicationIconBadgeNumber(double num);
<add> public abstract void cancelAllLocalNotifications();
<ide>
<ide> @ReactMethod
<del> public abstract void scheduleLocalNotification(ReadableMap notification);
<add> public abstract void cancelLocalNotifications(ReadableMap userInfo);
<ide>
<ide> @ReactMethod
<del> public abstract void requestPermissions(ReadableMap permission, Promise promise);
<add> public abstract void checkPermissions(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void checkPermissions(Callback callback);
<add> public abstract void getApplicationIconBadgeNumber(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void getScheduledLocalNotifications(Callback callback);
<add> public abstract void getDeliveredNotifications(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void removeAllDeliveredNotifications();
<add> public abstract void getInitialNotification(Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void onFinishRemoteNotification(String notificationId, String fetchResult);
<add> public abstract void getScheduledLocalNotifications(Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void abandonPermissions();
<add> public abstract void onFinishRemoteNotification(String notificationId, String fetchResult);
<ide>
<ide> @ReactMethod
<del> public abstract void cancelLocalNotifications(ReadableMap userInfo);
<add> public abstract void presentLocalNotification(ReadableMap notification);
<ide>
<ide> @ReactMethod
<del> public abstract void cancelAllLocalNotifications();
<add> public abstract void removeAllDeliveredNotifications();
<ide>
<ide> @ReactMethod
<ide> public abstract void removeDeliveredNotifications(ReadableArray identifiers);
<ide>
<ide> @ReactMethod
<del> public abstract void getDeliveredNotifications(Callback callback);
<add> public abstract void removeListeners(double count);
<ide>
<ide> @ReactMethod
<del> public abstract void getApplicationIconBadgeNumber(Callback callback);
<add> public abstract void requestPermissions(ReadableMap permission, Promise promise);
<ide>
<ide> @ReactMethod
<del> public abstract void presentLocalNotification(ReadableMap notification);
<add> public abstract void scheduleLocalNotification(ReadableMap notification);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventType);
<add> public abstract void setApplicationIconBadgeNumber(double num);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeRedBoxSpec.java
<ide> public NativeRedBoxSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void setExtraData(ReadableMap extraData, String forIdentifier);
<add> public abstract void dismiss();
<ide>
<ide> @ReactMethod
<del> public abstract void dismiss();
<add> public abstract void setExtraData(ReadableMap extraData, String forIdentifier);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeSettingsManagerSpec.java
<ide> public NativeSettingsManagerSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void setValues(ReadableMap values);
<add> public abstract void deleteValues(ReadableArray values);
<ide>
<ide> @ReactMethod
<del> public abstract void deleteValues(ReadableArray values);
<add> public abstract void setValues(ReadableMap values);
<ide>
<ide> protected abstract Map<String, Object> getTypedExportedConstants();
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeStatusBarManagerAndroidSpec.java
<ide> public NativeStatusBarManagerAndroidSpec(ReactApplicationContext reactContext) {
<ide> super(reactContext);
<ide> }
<ide>
<del> @ReactMethod
<del> public abstract void setTranslucent(boolean translucent);
<del>
<ide> @ReactMethod
<ide> public abstract void setColor(double color, boolean animated);
<ide>
<ide> public NativeStatusBarManagerAndroidSpec(ReactApplicationContext reactContext) {
<ide> @ReactMethod
<ide> public abstract void setStyle(@Nullable String statusBarStyle);
<ide>
<add> @ReactMethod
<add> public abstract void setTranslucent(boolean translucent);
<add>
<ide> protected abstract Map<String, Object> getTypedExportedConstants();
<ide>
<ide> @Override
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeStatusBarManagerIOSSpec.java
<ide> public NativeStatusBarManagerIOSSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void setNetworkActivityIndicatorVisible(boolean visible);
<add> public abstract void addListener(String eventType);
<ide>
<ide> @ReactMethod
<ide> public abstract void getHeight(Callback callback);
<ide> public NativeStatusBarManagerIOSSpec(ReactApplicationContext reactContext) {
<ide> public abstract void setHidden(boolean hidden, String withAnimation);
<ide>
<ide> @ReactMethod
<del> public abstract void setStyle(@Nullable String statusBarStyle, boolean animated);
<add> public abstract void setNetworkActivityIndicatorVisible(boolean visible);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventType);
<add> public abstract void setStyle(@Nullable String statusBarStyle, boolean animated);
<ide>
<ide> protected abstract Map<String, Object> getTypedExportedConstants();
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeTVNavigationEventEmitterSpec.java
<ide> public NativeTVNavigationEventEmitterSpec(ReactApplicationContext reactContext)
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void removeListeners(double count);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeTimingSpec.java
<ide> public NativeTimingSpec(ReactApplicationContext reactContext) {
<ide> super(reactContext);
<ide> }
<ide>
<del> @ReactMethod
<del> public abstract void deleteTimer(double timerID);
<del>
<ide> @ReactMethod
<ide> public abstract void createTimer(double callbackID, double duration, double jsSchedulingTime,
<ide> boolean repeats);
<ide>
<add> @ReactMethod
<add> public abstract void deleteTimer(double timerID);
<add>
<ide> @ReactMethod
<ide> public abstract void setSendIdleEvents(boolean sendIdleEvents);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeToastAndroidSpec.java
<ide> public NativeToastAndroidSpec(ReactApplicationContext reactContext) {
<ide> public abstract void show(String message, double duration);
<ide>
<ide> @ReactMethod
<del> public abstract void showWithGravityAndOffset(String message, double duration, double gravity,
<del> double xOffset, double yOffset);
<add> public abstract void showWithGravity(String message, double duration, double gravity);
<ide>
<ide> @ReactMethod
<del> public abstract void showWithGravity(String message, double duration, double gravity);
<add> public abstract void showWithGravityAndOffset(String message, double duration, double gravity,
<add> double xOffset, double yOffset);
<ide>
<ide> protected abstract Map<String, Object> getTypedExportedConstants();
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeUIManagerSpec.java
<ide> public NativeUIManagerSpec(ReactApplicationContext reactContext) {
<ide> super(reactContext);
<ide> }
<ide>
<del> @ReactMethod(
<del> isBlockingSynchronousMethod = true
<del> )
<del> public abstract WritableArray getDefaultEventTypes();
<add> @ReactMethod
<add> public abstract void blur(Double reactTag);
<ide>
<ide> @ReactMethod
<del> public abstract void setLayoutAnimationEnabledExperimental(boolean enabled);
<add> public abstract void clearJSResponder();
<ide>
<ide> @ReactMethod
<ide> public abstract void configureNextLayoutAnimation(ReadableMap config, Callback callback,
<ide> Callback errorCallback);
<ide>
<ide> @ReactMethod
<del> public abstract void focus(Double reactTag);
<del>
<del> @ReactMethod
<del> public abstract void blur(Double reactTag);
<add> public abstract void createView(Double reactTag, String viewName, double rootTag,
<add> ReadableMap props);
<ide>
<ide> @ReactMethod
<del> public abstract void removeSubviewsFromContainerWithID(double containerID);
<add> public abstract void dismissPopupMenu();
<ide>
<ide> @ReactMethod
<del> public abstract void setJSResponder(Double reactTag, boolean blockNativeResponder);
<add> public abstract void dispatchViewManagerCommand(Double reactTag, double commandID,
<add> ReadableArray commandArgs);
<ide>
<ide> @ReactMethod
<del> public abstract void measureLayout(Double reactTag, Double ancestorReactTag,
<del> Callback errorCallback, Callback callback);
<add> public abstract void findSubviewIn(Double reactTag, ReadableArray point, Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void clearJSResponder();
<add> public abstract void focus(Double reactTag);
<ide>
<ide> @ReactMethod(
<ide> isBlockingSynchronousMethod = true
<ide> )
<ide> public abstract WritableMap getConstantsForViewManager(String viewManagerName);
<ide>
<del> @ReactMethod
<del> public abstract void updateView(double reactTag, String viewName, ReadableMap props);
<add> @ReactMethod(
<add> isBlockingSynchronousMethod = true
<add> )
<add> public abstract WritableArray getDefaultEventTypes();
<add>
<add> @ReactMethod(
<add> isBlockingSynchronousMethod = true
<add> )
<add> public abstract WritableMap lazilyLoadView(String name);
<ide>
<ide> @ReactMethod
<del> public abstract void dispatchViewManagerCommand(Double reactTag, double commandID,
<del> ReadableArray commandArgs);
<add> public abstract void manageChildren(Double containerTag, ReadableArray moveFromIndices,
<add> ReadableArray moveToIndices, ReadableArray addChildReactTags, ReadableArray addAtIndices,
<add> ReadableArray removeAtIndices);
<ide>
<ide> @ReactMethod
<del> public abstract void createView(Double reactTag, String viewName, double rootTag,
<del> ReadableMap props);
<add> public abstract void measure(Double reactTag, Callback callback);
<ide>
<ide> @ReactMethod
<ide> public abstract void measureInWindow(Double reactTag, Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void sendAccessibilityEvent(Double reactTag, double eventType);
<del>
<del> @ReactMethod(
<del> isBlockingSynchronousMethod = true
<del> )
<del> public abstract WritableMap lazilyLoadView(String name);
<add> public abstract void measureLayout(Double reactTag, Double ancestorReactTag,
<add> Callback errorCallback, Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void viewIsDescendantOf(Double reactTag, Double ancestorReactTag,
<add> public abstract void measureLayoutRelativeToParent(Double reactTag, Callback errorCallback,
<ide> Callback callback);
<ide>
<ide> @ReactMethod
<del> public abstract void findSubviewIn(Double reactTag, ReadableArray point, Callback callback);
<add> public abstract void removeSubviewsFromContainerWithID(double containerID);
<ide>
<ide> @ReactMethod
<del> public abstract void manageChildren(Double containerTag, ReadableArray moveFromIndices,
<del> ReadableArray moveToIndices, ReadableArray addChildReactTags, ReadableArray addAtIndices,
<del> ReadableArray removeAtIndices);
<add> public abstract void replaceExistingNonRootView(Double reactTag, Double newReactTag);
<ide>
<ide> @ReactMethod
<del> public abstract void showPopupMenu(Double reactTag, ReadableArray items, Callback error,
<del> Callback success);
<add> public abstract void sendAccessibilityEvent(Double reactTag, double eventType);
<ide>
<ide> @ReactMethod
<del> public abstract void replaceExistingNonRootView(Double reactTag, Double newReactTag);
<add> public abstract void setChildren(Double containerTag, ReadableArray reactTags);
<ide>
<ide> @ReactMethod
<del> public abstract void dismissPopupMenu();
<add> public abstract void setJSResponder(Double reactTag, boolean blockNativeResponder);
<ide>
<ide> @ReactMethod
<del> public abstract void measure(Double reactTag, Callback callback);
<add> public abstract void setLayoutAnimationEnabledExperimental(boolean enabled);
<ide>
<ide> @ReactMethod
<del> public abstract void measureLayoutRelativeToParent(Double reactTag, Callback errorCallback,
<del> Callback callback);
<add> public abstract void showPopupMenu(Double reactTag, ReadableArray items, Callback error,
<add> Callback success);
<ide>
<ide> @ReactMethod
<del> public abstract void setChildren(Double containerTag, ReadableArray reactTags);
<add> public abstract void updateView(double reactTag, String viewName, ReadableMap props);
<add>
<add> @ReactMethod
<add> public abstract void viewIsDescendantOf(Double reactTag, Double ancestorReactTag,
<add> Callback callback);
<ide>
<ide> protected abstract Map<String, Object> getTypedExportedConstants();
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeVibrationSpec.java
<ide> public NativeVibrationSpec(ReactApplicationContext reactContext) {
<ide> public abstract void cancel();
<ide>
<ide> @ReactMethod
<del> public abstract void vibrateByPattern(ReadableArray pattern, double repeat);
<add> public abstract void vibrate(double pattern);
<ide>
<ide> @ReactMethod
<del> public abstract void vibrate(double pattern);
<add> public abstract void vibrateByPattern(ReadableArray pattern, double repeat);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/fbreact/specs/NativeWebSocketModuleSpec.java
<ide> public NativeWebSocketModuleSpec(ReactApplicationContext reactContext) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public abstract void sendBinary(String base64String, double forSocketID);
<add> public abstract void addListener(String eventName);
<ide>
<ide> @ReactMethod
<del> public abstract void removeListeners(double count);
<add> public abstract void close(double code, String reason, double socketID);
<ide>
<ide> @ReactMethod
<del> public abstract void ping(double socketID);
<add> public abstract void connect(String url, ReadableArray protocols, ReadableMap options,
<add> double socketID);
<ide>
<ide> @ReactMethod
<del> public abstract void send(String message, double forSocketID);
<add> public abstract void ping(double socketID);
<ide>
<ide> @ReactMethod
<del> public abstract void close(double code, String reason, double socketID);
<add> public abstract void removeListeners(double count);
<ide>
<ide> @ReactMethod
<del> public abstract void connect(String url, ReadableArray protocols, ReadableMap options,
<del> double socketID);
<add> public abstract void send(String message, double forSocketID);
<ide>
<ide> @ReactMethod
<del> public abstract void addListener(String eventName);
<add> public abstract void sendBinary(String base64String, double forSocketID);
<ide> } | 35 |
Ruby | Ruby | add a form hook to erb scaffold. customize and go! | bcf754d8c471187da386e147707598e5c8e52090 | <ide><path>railties/lib/generators.rb
<ide> module Generators
<ide> },
<ide>
<ide> :erb => {
<add> :form => false,
<ide> :layout => true
<ide> },
<ide>
<ide><path>railties/lib/generators/erb/scaffold/scaffold_generator.rb
<ide> class ScaffoldGenerator < Base
<ide>
<ide> argument :attributes, :type => :hash, :default => {}, :banner => "field:type field:type"
<ide>
<del> class_option :singleton, :type => :boolean, :desc => "Supply to skip index action"
<del> class_option :layout, :type => :boolean
<add> class_option :form, :type => :boolean
<add> class_option :layout, :type => :boolean
<add> class_option :singleton, :type => :boolean, :desc => "Supply to skip index view"
<ide>
<ide> def create_root_folder
<ide> empty_directory File.join("app/views", controller_file_path)
<ide> def copy_new_file
<ide> copy_view :new
<ide> end
<ide>
<add> def copy_form_file
<add> return unless options[:form]
<add> copy_view :_form
<add> end
<add>
<ide> def copy_layout_file
<ide> return unless options[:layout]
<ide> template "layout.html.erb", | 2 |
Javascript | Javascript | fix a bunch of flow annotations | 2a3fe0625d8ec322c468314421326ea512d9c6f9 | <ide><path>packager/src/Bundler/Bundle.js
<ide> class Bundle extends BundleBase {
<ide>
<ide> finalize(options: FinalizeOptions) {
<ide> options = options || {};
<del> if (options.runMainModule) {
<add> if (options.runModule) {
<ide> /* $FlowFixMe: this is unsound, as nothing enforces runBeforeMainModule
<del> * to be available if `runMainModule` is true. Refactor. */
<add> * to be available if `runModule` is true. Refactor. */
<ide> options.runBeforeMainModule.forEach(this._addRequireCall, this);
<ide> /* $FlowFixMe: this is unsound, as nothing enforces the module ID to have
<ide> * been set beforehand. */
<ide> class Bundle extends BundleBase {
<ide> this._numRequireCalls += 1;
<ide> }
<ide>
<del> _getInlineSourceMap(dev) {
<add> _getInlineSourceMap(dev: ?boolean) {
<ide> if (this._inlineSourceMap == null) {
<ide> const sourceMap = this.getSourceMapString({excludeSource: true, dev});
<ide> /*eslint-env node*/
<ide> class Bundle extends BundleBase {
<ide> * that makes use of of the `sections` field to combine sourcemaps by adding
<ide> * an offset. This is supported only by Chrome for now.
<ide> */
<del> _getCombinedSourceMaps(options): CombinedSourceMap {
<add> _getCombinedSourceMaps(options: {excludeSource?: boolean}): CombinedSourceMap {
<ide> const result = {
<ide> version: 3,
<ide> file: this._getSourceMapFile(),
<ide><path>packager/src/Bundler/BundleBase.js
<ide> const ModuleTransport = require('../lib/ModuleTransport');
<ide> export type FinalizeOptions = {
<ide> allowUpdates?: boolean,
<ide> runBeforeMainModule?: Array<string>,
<del> runMainModule?: boolean,
<add> runModule?: boolean,
<ide> };
<ide>
<ide> export type GetSourceOptions = {
<ide><path>packager/src/Bundler/__tests__/Bundle-test.js
<ide> describe('Bundle', () => {
<ide> bundle.setMainModuleId('foo');
<ide> bundle.finalize({
<ide> runBeforeMainModule: ['bar'],
<del> runMainModule: true,
<add> runModule: true,
<ide> });
<ide> expect(bundle.getSource({dev: true})).toBe([
<ide> 'transformed foo;',
<ide> describe('Bundle', () => {
<ide> otherBundle.setMainModuleId('foo');
<ide> otherBundle.finalize({
<ide> runBeforeMainModule: ['InitializeCore'],
<del> runMainModule: true,
<add> runModule: true,
<ide> });
<ide>
<ide> const sourceMap = otherBundle.getSourceMap({dev: true});
<ide><path>packager/src/Bundler/__tests__/Bundler-test.js
<ide> describe('Bundler', function() {
<ide> expect(ithAddedModule(3)).toEqual('/root/file.json');
<ide>
<ide> expect(bundle.finalize.mock.calls[0]).toEqual([{
<del> runMainModule: true,
<add> runModule: true,
<ide> runBeforeMainModule: [],
<ide> allowUpdates: false,
<ide> }]);
<ide><path>packager/src/Bundler/index.js
<ide> export type GetTransformOptions = (
<ide> getDependencies: string => Promise<Array<string>>,
<ide> ) => {} | Promise<{}>;
<ide>
<add>type Asset = {
<add> __packager_asset: boolean,
<add> fileSystemLocation: string,
<add> httpServerLocation: string,
<add> width: ?number,
<add> height: ?number,
<add> scales: number,
<add> files: Array<string>,
<add> hash: string,
<add> name: string,
<add> type: string,
<add>};
<add>
<ide> const sizeOf = denodeify(imageSize);
<ide>
<ide> const noop = () => {};
<ide> class Bundler {
<ide> });
<ide> }
<ide>
<del> _sourceHMRURL(platform, hmrpath) {
<add> _sourceHMRURL(platform: ?string, hmrpath: string) {
<ide> return this._hmrURL(
<ide> '',
<ide> platform,
<ide> class Bundler {
<ide> );
<ide> }
<ide>
<del> _sourceMappingHMRURL(platform, hmrpath) {
<add> _sourceMappingHMRURL(platform: ?string, hmrpath: string) {
<ide> // Chrome expects `sourceURL` when eval'ing code
<ide> return this._hmrURL(
<ide> '\/\/# sourceURL=',
<ide> class Bundler {
<ide> );
<ide> }
<ide>
<del> _hmrURL(prefix, platform, extensionOverride, filePath) {
<add> _hmrURL(prefix: string, platform: ?string, extensionOverride: string, filePath: string) {
<ide> const matchingRoot = this._projectRoots.find(root => filePath.startsWith(root));
<ide>
<ide> if (!matchingRoot) {
<ide> class Bundler {
<ide> return (
<ide> prefix + resource +
<ide> '.' + extensionOverride + '?' +
<del> 'platform=' + platform + '&runModule=false&entryModuleOnly=true&hot=true'
<add> 'platform=' + (platform || '') + '&runModule=false&entryModuleOnly=true&hot=true'
<ide> );
<ide> }
<ide>
<ide> class Bundler {
<ide> }
<ide>
<ide> _bundle({
<add> assetPlugins,
<ide> bundle,
<del> entryFile,
<del> runModule: runMainModule,
<del> runBeforeMainModule,
<ide> dev,
<del> minify,
<del> platform,
<del> moduleSystemDeps = [],
<del> hot,
<del> unbundle,
<add> entryFile,
<ide> entryModuleOnly,
<del> resolutionResponse,
<del> isolateModuleIDs,
<ide> generateSourceMaps,
<del> assetPlugins,
<add> hot,
<add> isolateModuleIDs,
<add> minify,
<add> moduleSystemDeps = [],
<ide> onProgress,
<add> platform,
<add> resolutionResponse,
<add> runBeforeMainModule,
<add> runModule,
<add> unbundle,
<add> }: {
<add> assetPlugins?: Array<string>,
<add> bundle: Bundle,
<add> dev: boolean,
<add> entryFile?: string,
<add> entryModuleOnly?: boolean,
<add> generateSourceMaps?: boolean,
<add> hot?: boolean,
<add> isolateModuleIDs?: boolean,
<add> minify?: boolean,
<add> moduleSystemDeps?: Array<Module>,
<add> onProgress?: () => void,
<add> platform?: ?string,
<add> resolutionResponse?: ResolutionResponse,
<add> runBeforeMainModule?: boolean,
<add> runModule?: boolean,
<add> unbundle?: boolean,
<ide> }) {
<ide> const onResolutionResponse = (response: ResolutionResponse) => {
<ide> /* $FlowFixMe: looks like ResolutionResponse is monkey-patched
<ide> * with `getModuleId`. */
<ide> bundle.setMainModuleId(response.getModuleId(getMainModule(response)));
<del> if (entryModuleOnly) {
<add> if (entryModuleOnly && entryFile) {
<ide> response.dependencies = response.dependencies.filter(module =>
<del> module.path.endsWith(entryFile)
<add> module.path.endsWith(entryFile || '')
<ide> );
<ide> } else {
<ide> response.dependencies = moduleSystemDeps.concat(response.dependencies);
<ide> class Bundler {
<ide> : undefined;
<ide>
<ide> finalBundle.finalize({
<del> runMainModule,
<add> runModule,
<ide> runBeforeMainModule: runBeforeMainModuleIds,
<ide> allowUpdates: this._opts.allowBundleUpdates,
<ide> });
<ide> class Bundler {
<ide> });
<ide> }
<ide>
<del> _generateAssetObjAndCode(module, assetPlugins, platform: ?string = null) {
<add> _generateAssetObjAndCode(
<add> module: Module,
<add> assetPlugins: Array<string>,
<add> platform: ?string = null,
<add> ) {
<ide> const relPath = getPathRelativeToRoot(this._projectRoots, module.path);
<ide> var assetUrlPath = joinPath('/assets', pathDirname(relPath));
<ide>
<ide> class Bundler {
<ide> });
<ide> }
<ide>
<del> _applyAssetPlugins(assetPlugins, asset) {
<add> _applyAssetPlugins(
<add> assetPlugins: Array<string>,
<add> asset: Asset,
<add> ) {
<ide> if (!assetPlugins.length) {
<ide> return asset;
<ide> }
<ide><path>packager/src/Bundler/source-map/B64Builder.js
<ide> class B64Builder {
<ide> return this.buffer.toString('ascii', 0, this.pos);
<ide> }
<ide>
<del> _writeByte(byte) {
<add> _writeByte(byte: number) {
<ide> if (this.pos === this.buffer.length) {
<ide> this._realloc();
<ide> }
<ide><path>packager/src/ModuleGraph/node-haste/HasteFS.js
<ide> module.exports = class HasteFS {
<ide> return Array.from(this.files.keys());
<ide> }
<ide>
<add> matchFiles() {
<add> throw new Error(
<add> 'HasteFS.matchFiles is not implemented yet.'
<add> );
<add> }
<add>
<ide> matches(directory: string, pattern: RegExp) {
<ide> const entries = this.directoryEntries.get(directory);
<ide> return entries ? entries.filter(pattern.test, pattern) : [];
<ide><path>packager/src/ModuleGraph/node-haste/node-haste.js
<ide> exports.createResolveFn = function(options: ResolveOptions): ResolveFn {
<ide> dirExists: filePath => hasteFS.dirExists(filePath),
<ide> entryPath: '',
<ide> extraNodeModules,
<del> /* $FlowFixMe: object is missing matchFiles method */
<ide> hasteFS,
<ide> hasteMap,
<ide> helpers,
<ide><path>packager/src/node-haste/DependencyGraph/ResolutionRequest.js
<ide> class ResolutionRequest {
<ide> this._resetResolutionCache();
<ide> }
<ide>
<del> _tryResolve(action, secondaryAction) {
<add> _tryResolve(action: () => Promise<string>, secondaryAction: () => ?Promise<string>) {
<ide> return action().catch((error) => {
<ide> if (error.type !== 'UnableToResolveError') {
<ide> throw error;
<ide> class ResolutionRequest {
<ide> });
<ide> }
<ide>
<del> _resolveHasteDependency(fromModule, toModuleName) {
<add> _resolveHasteDependency(fromModule: Module, toModuleName: string) {
<ide> toModuleName = normalizePath(toModuleName);
<ide>
<ide> let p = fromModule.getPackage();
<ide> class ResolutionRequest {
<ide> });
<ide> }
<ide>
<del> _redirectRequire(fromModule, modulePath) {
<add> _redirectRequire(fromModule: Module, modulePath: string) {
<ide> return Promise.resolve(fromModule.getPackage()).then(p => {
<ide> if (p) {
<ide> return p.redirectRequire(modulePath);
<ide> class ResolutionRequest {
<ide> });
<ide> }
<ide>
<del> _resolveFileOrDir(fromModule, toModuleName) {
<add> _resolveFileOrDir(fromModule: Module, toModuleName: string) {
<ide> const potentialModulePath = isAbsolutePath(toModuleName) ?
<ide> toModuleName :
<ide> path.join(path.dirname(fromModule.path), toModuleName);
<ide> class ResolutionRequest {
<ide> );
<ide> }
<ide>
<del> _resolveNodeDependency(fromModule, toModuleName) {
<add> _resolveNodeDependency(fromModule: Module, toModuleName: string) {
<ide> if (isRelativeImport(toModuleName) || isAbsolutePath(toModuleName)) {
<ide> return this._resolveFileOrDir(fromModule, toModuleName);
<ide> } else {
<ide> class ResolutionRequest {
<ide> }
<ide> }
<ide>
<del> _loadAsFile(potentialModulePath, fromModule, toModule) {
<add> _loadAsFile(potentialModulePath: string, fromModule: Module, toModule: string) {
<ide> return Promise.resolve().then(() => {
<ide> if (this._helpers.isAssetFile(potentialModulePath)) {
<ide> let dirname = path.dirname(potentialModulePath);
<ide> class ResolutionRequest {
<ide> });
<ide> }
<ide>
<del> _loadAsDir(potentialDirPath, fromModule, toModule) {
<add> _loadAsDir(potentialDirPath: string, fromModule: Module, toModule: string) {
<ide> return Promise.resolve().then(() => {
<ide> if (!this._dirExists(potentialDirPath)) {
<ide> throw new UnableToResolveError(
<ide><path>packager/src/node-haste/index.js
<ide> class DependencyGraph {
<ide> return platform;
<ide> }
<ide>
<del> _getAbsolutePath(filePath) {
<add> _getAbsolutePath(filePath: string) {
<ide> if (isAbsolutePath(filePath)) {
<ide> return path.resolve(filePath);
<ide> } | 10 |
Javascript | Javascript | use optional chaining to simplify checks | f34c0e0bc00bac9d1f4e375b81b8669ff0b3ce6e | <ide><path>lib/internal/tls/secure-context.js
<ide> function configSecureContext(context, options = {}, name = 'options') {
<ide> for (let i = 0; i < key.length; ++i) {
<ide> const val = key[i];
<ide> const pem = (
<del> val !== undefined && val !== null &&
<del> val.pem !== undefined ? val.pem : val);
<add> val?.pem !== undefined ? val.pem : val);
<ide> const pass = (
<del> val !== undefined && val !== null &&
<del> val.passphrase !== undefined ? val.passphrase : passphrase);
<add> val?.passphrase !== undefined ? val.passphrase : passphrase);
<ide> setKey(context, pem, pass, name);
<ide> }
<ide> } else { | 1 |
Go | Go | remove directory when removing devicemapper device | 2343fe44533f19ebae5e6127f4a2a19d1d8773fa | <ide><path>graphdriver/devmapper/driver.go
<ide> import (
<ide> "github.com/dotcloud/docker/graphdriver"
<ide> "github.com/dotcloud/docker/utils"
<ide> "io/ioutil"
<add> "os"
<ide> "path"
<ide> )
<ide>
<ide> func (d *Driver) Remove(id string) error {
<ide> return err
<ide> }
<ide> // This assumes the device has been properly Get/Put:ed and thus is unmounted
<del> return d.DeviceSet.DeleteDevice(id)
<add> if err := d.DeviceSet.DeleteDevice(id); err != nil {
<add> return err
<add> }
<add>
<add> mp := path.Join(d.home, "mnt", id)
<add> if err := os.RemoveAll(mp); err != nil && !os.IsNotExist(err) {
<add> return err
<add> }
<add>
<add> return nil
<ide> }
<ide>
<ide> func (d *Driver) Get(id string) (string, error) { | 1 |
Java | Java | make use of enhanced messageheaderaccessor support | ae942ffdb89ae103b6f9e076ec9548594317e2f9 | <ide><path>spring-messaging/src/main/java/org/springframework/messaging/core/AbstractMessageSendingTemplate.java
<ide> import org.springframework.messaging.converter.MessageConversionException;
<ide> import org.springframework.messaging.converter.MessageConverter;
<ide> import org.springframework.messaging.converter.SimpleMessageConverter;
<add>import org.springframework.messaging.support.MessageHeaderAccessor;
<ide> import org.springframework.util.Assert;
<ide>
<ide> /**
<ide> public void convertAndSend(D destination, Object payload, Map<String, Object> he
<ide> MessagePostProcessor postProcessor) throws MessagingException {
<ide>
<ide> headers = processHeadersToSend(headers);
<add>
<add> MessageHeaders messageHeaders;
<add> if (headers != null && headers instanceof MessageHeaders) {
<add> MessageHeaderAccessor.getAccessor()
<add>
<add> }
<add>
<ide> MessageHeaders messageHeaders = (headers != null) ? new MessageHeaders(headers) : null;
<ide> Message<?> message = this.converter.toMessage(payload, messageHeaders);
<ide>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/support/HeadersMethodArgumentResolver.java
<ide> import org.springframework.messaging.handler.annotation.Headers;
<ide> import org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolver;
<ide> import org.springframework.messaging.support.MessageHeaderAccessor;
<del>import org.springframework.util.ClassUtils;
<add>import org.springframework.util.Assert;
<ide> import org.springframework.util.ReflectionUtils;
<ide>
<ide> /**
<ide> */
<ide> public class HeadersMethodArgumentResolver implements HandlerMethodArgumentResolver {
<ide>
<del>
<ide> @Override
<ide> public boolean supportsParameter(MethodParameter parameter) {
<ide> Class<?> paramType = parameter.getParameterType();
<ide> public Object resolveArgument(MethodParameter parameter, Message<?> message) thr
<ide> return message.getHeaders();
<ide> }
<ide> else if (MessageHeaderAccessor.class.equals(paramType)) {
<del> return new MessageHeaderAccessor(message);
<add> MessageHeaderAccessor accessor = MessageHeaderAccessor.getAccessor(message, MessageHeaderAccessor.class);
<add> return (accessor != null ? accessor : new MessageHeaderAccessor(message));
<ide> }
<ide> else if (MessageHeaderAccessor.class.isAssignableFrom(paramType)) {
<del> Method factoryMethod = ClassUtils.getMethod(paramType, "wrap", Message.class);
<del> return ReflectionUtils.invokeMethod(factoryMethod, null, message);
<add> MessageHeaderAccessor accessor = MessageHeaderAccessor.getAccessor(message, MessageHeaderAccessor.class);
<add> if (accessor != null && paramType.isAssignableFrom(accessor.getClass())) {
<add> return accessor;
<add> }
<add> else {
<add> Method method = ReflectionUtils.findMethod(paramType, "wrap", Message.class);
<add> Assert.notNull(method, "Cannot create accessor of type " + paramType + " for message " + message);
<add> return ReflectionUtils.invokeMethod(method, null, message);
<add> }
<ide> }
<ide> else {
<del> throw new IllegalStateException("Unexpected method parameter type "
<del> + paramType + "in method " + parameter.getMethod() + ". "
<add> throw new IllegalStateException(
<add> "Unexpected method parameter type " + paramType + "in method " + parameter.getMethod() + ". "
<ide> + "@Headers method arguments must be assignable to java.util.Map.");
<ide> }
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/handler/invocation/AbstractMethodMessageHandler.java
<ide> import org.springframework.messaging.handler.HandlerMethod;
<ide> import org.springframework.messaging.handler.HandlerMethodSelector;
<ide> import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.messaging.support.MessageHeaderAccessor;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.ClassUtils;
<ide> import org.springframework.util.CollectionUtils;
<ide> public void handleMessage(Message<?> message) throws MessagingException {
<ide> logger.debug("Handling message, lookupDestination=" + lookupDestination);
<ide> }
<ide>
<del> message = MessageBuilder.fromMessage(message).setHeader(
<del> DestinationPatternsMessageCondition.LOOKUP_DESTINATION_HEADER, lookupDestination).build();
<add> MessageHeaderAccessor headerAccessor = MessageHeaderAccessor.getMutableAccessor(message);
<add> headerAccessor.setHeader(DestinationPatternsMessageCondition.LOOKUP_DESTINATION_HEADER, lookupDestination);
<add> message = MessageBuilder.createMessage(message.getPayload(), headerAccessor.getMessageHeaders());
<ide>
<ide> handleMessageInternal(message, lookupDestination);
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/SimpMessageTypeMessageCondition.java
<ide> public SimpMessageTypeMessageCondition combine(SimpMessageTypeMessageCondition o
<ide> @Override
<ide> public SimpMessageTypeMessageCondition getMatchingCondition(Message<?> message) {
<ide>
<del> Object actualMessageType = message.getHeaders().get(SimpMessageHeaderAccessor.MESSAGE_TYPE_HEADER);
<add> Object actualMessageType = SimpMessageHeaderAccessor.getMessageType(message.getHeaders());
<ide> if (actualMessageType == null) {
<ide> return null;
<ide> }
<ide> public SimpMessageTypeMessageCondition getMatchingCondition(Message<?> message)
<ide>
<ide> @Override
<ide> public int compareTo(SimpMessageTypeMessageCondition other, Message<?> message) {
<del> Object actualMessageType = message.getHeaders().get(SimpMessageHeaderAccessor.MESSAGE_TYPE_HEADER);
<add> Object actualMessageType = SimpMessageHeaderAccessor.getMessageType(message.getHeaders());
<ide> if (actualMessageType != null) {
<ide> if (actualMessageType.equals(this.getMessageType()) && actualMessageType.equals(other.getMessageType())) {
<ide> return 0;
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/SimpMessagingTemplate.java
<ide> public long getSendTimeout() {
<ide>
<ide> @Override
<ide> public void send(Message<?> message) {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> String destination = headers.getDestination();
<add> String destination = SimpMessageHeaderAccessor.getDestination(message.getHeaders());
<ide> destination = (destination != null) ? destination : getRequiredDefaultDestination();
<ide> doSend(destination, message);
<ide> }
<ide> public void send(Message<?> message) {
<ide> protected void doSend(String destination, Message<?> message) {
<ide> Assert.notNull(destination, "Destination must not be null");
<ide>
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> headers.setDestination(destination);
<del> headers.setMessageTypeIfNotSet(SimpMessageType.MESSAGE);
<del> message = MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<add> SimpMessageHeaderAccessor headerAccessor = SimpMessageHeaderAccessor.wrap(message);
<add> headerAccessor.setDestination(destination);
<add> headerAccessor.setMessageTypeIfNotSet(SimpMessageType.MESSAGE);
<add> message = MessageBuilder.createMessage(message.getPayload(), headerAccessor.getMessageHeaders());
<ide>
<ide> long timeout = this.sendTimeout;
<ide> boolean sent = (timeout >= 0)
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/annotation/support/PrincipalMethodArgumentResolver.java
<ide> public boolean supportsParameter(MethodParameter parameter) {
<ide>
<ide> @Override
<ide> public Object resolveArgument(MethodParameter parameter, Message<?> message) throws Exception {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> Principal user = headers.getUser();
<add> Principal user = SimpMessageHeaderAccessor.getUser(message.getHeaders());
<ide> if (user == null) {
<ide> throw new MissingSessionUserException(message);
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/annotation/support/SendToMethodReturnValueHandler.java
<ide> import org.springframework.core.annotation.AnnotationUtils;
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.MessageChannel;
<add>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.core.MessagePostProcessor;
<ide> import org.springframework.messaging.handler.DestinationPatternsMessageCondition;
<ide> import org.springframework.messaging.handler.annotation.SendTo;
<ide> public boolean supportsReturnType(MethodParameter returnType) {
<ide> }
<ide>
<ide> @Override
<del> public void handleReturnValue(Object returnValue, MethodParameter returnType, Message<?> inputMessage)
<add> public void handleReturnValue(Object returnValue, MethodParameter returnType, Message<?> message)
<ide> throws Exception {
<ide>
<ide> if (returnValue == null) {
<ide> return;
<ide> }
<ide>
<del> SimpMessageHeaderAccessor inputHeaders = SimpMessageHeaderAccessor.wrap(inputMessage);
<del>
<del> String sessionId = inputHeaders.getSessionId();
<add> MessageHeaders headers = message.getHeaders();
<add> String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
<ide> MessagePostProcessor postProcessor = new SessionHeaderPostProcessor(sessionId);
<ide>
<ide> SendToUser sendToUser = returnType.getMethodAnnotation(SendToUser.class);
<ide> if (sendToUser != null) {
<del> Principal principal = inputHeaders.getUser();
<add> Principal principal = SimpMessageHeaderAccessor.getUser(headers);
<ide> if (principal == null) {
<del> throw new MissingSessionUserException(inputMessage);
<add> throw new MissingSessionUserException(message);
<ide> }
<ide> String userName = principal.getName();
<ide> if (principal instanceof DestinationUserNameProvider) {
<ide> userName = ((DestinationUserNameProvider) principal).getDestinationUserName();
<ide> }
<del> String[] destinations = getTargetDestinations(sendToUser, inputHeaders, this.defaultUserDestinationPrefix);
<add> String[] destinations = getTargetDestinations(sendToUser, message, this.defaultUserDestinationPrefix);
<ide> for (String destination : destinations) {
<ide> this.messagingTemplate.convertAndSendToUser(userName, destination, returnValue, postProcessor);
<ide> }
<ide> return;
<ide> }
<ide> else {
<ide> SendTo sendTo = returnType.getMethodAnnotation(SendTo.class);
<del> String[] destinations = getTargetDestinations(sendTo, inputHeaders, this.defaultDestinationPrefix);
<add> String[] destinations = getTargetDestinations(sendTo, message, this.defaultDestinationPrefix);
<ide> for (String destination : destinations) {
<ide> this.messagingTemplate.convertAndSend(destination, returnValue, postProcessor);
<ide> }
<ide> }
<ide> }
<ide>
<del> protected String[] getTargetDestinations(Annotation annot, SimpMessageHeaderAccessor inputHeaders,
<del> String defaultPrefix) {
<add> protected String[] getTargetDestinations(Annotation annot, Message<?> message, String defaultPrefix) {
<ide>
<ide> if (annot != null) {
<ide> String[] value = (String[]) AnnotationUtils.getValue(annot);
<ide> if (!ObjectUtils.isEmpty(value)) {
<ide> return value;
<ide> }
<ide> }
<del> return new String[] { defaultPrefix +
<del> inputHeaders.getHeader(DestinationPatternsMessageCondition.LOOKUP_DESTINATION_HEADER) };
<add> String name = DestinationPatternsMessageCondition.LOOKUP_DESTINATION_HEADER;
<add> return new String[] { defaultPrefix + message.getHeaders().get(name) };
<ide> }
<ide>
<ide>
<ide> public SessionHeaderPostProcessor(String sessionId) {
<ide> public Message<?> postProcessMessage(Message<?> message) {
<ide> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<ide> headers.setSessionId(this.sessionId);
<del> return MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<add> return MessageBuilder.createMessage(message.getPayload(), headers.getMessageHeaders());
<ide> }
<ide> }
<ide>
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/annotation/support/SimpAnnotationMethodMessageHandler.java
<ide> import org.springframework.util.AntPathMatcher;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.ClassUtils;
<add>import org.springframework.util.CollectionUtils;
<ide> import org.springframework.util.PathMatcher;
<ide> import org.springframework.validation.Errors;
<ide> import org.springframework.validation.Validator;
<ide> protected Set<String> getDirectLookupDestinations(SimpMessageMappingInfo mapping
<ide>
<ide> @Override
<ide> protected String getDestination(Message<?> message) {
<del> return (String) message.getHeaders().get(SimpMessageHeaderAccessor.DESTINATION_HEADER);
<add> return (String) SimpMessageHeaderAccessor.getDestination(message.getHeaders());
<ide> }
<ide>
<ide> @Override
<ide> public int compare(SimpMessageMappingInfo info1, SimpMessageMappingInfo info2) {
<ide> protected void handleMatch(SimpMessageMappingInfo mapping, HandlerMethod handlerMethod,
<ide> String lookupDestination, Message<?> message) {
<ide>
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del>
<ide> String matchedPattern = mapping.getDestinationConditions().getPatterns().iterator().next();
<ide> Map<String, String> vars = getPathMatcher().extractUriTemplateVariables(matchedPattern, lookupDestination);
<ide>
<del> headers.setHeader(DestinationVariableMethodArgumentResolver.DESTINATION_TEMPLATE_VARIABLES_HEADER, vars);
<del> message = MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<add> if (!CollectionUtils.isEmpty(vars)) {
<add> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<add> headers.setHeader(DestinationVariableMethodArgumentResolver.DESTINATION_TEMPLATE_VARIABLES_HEADER, vars);
<add> message = MessageBuilder.createMessage(message.getPayload(), headers.getMessageHeaders());
<add>
<add> }
<ide>
<ide> super.handleMatch(mapping, handlerMethod, lookupDestination, message);
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/annotation/support/SubscriptionMethodReturnValueHandler.java
<ide>
<ide> import org.springframework.core.MethodParameter;
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.core.MessagePostProcessor;
<ide> import org.springframework.messaging.core.MessageSendingOperations;
<ide> import org.springframework.messaging.handler.annotation.SendTo;
<ide> public void handleReturnValue(Object returnValue, MethodParameter returnType, Me
<ide> return;
<ide> }
<ide>
<del> SimpMessageHeaderAccessor inputHeaders = SimpMessageHeaderAccessor.wrap(message);
<del> String sessionId = inputHeaders.getSessionId();
<del> String subscriptionId = inputHeaders.getSubscriptionId();
<del> String destination = inputHeaders.getDestination();
<add> MessageHeaders headers = message.getHeaders();
<add> String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
<add> String subscriptionId = SimpMessageHeaderAccessor.getSubscriptionId(headers);
<add> String destination = SimpMessageHeaderAccessor.getDestination(headers);
<ide>
<del> Assert.state(inputHeaders.getSubscriptionId() != null,
<del> "No subsriptiondId in input message to method " + returnType.getMethod());
<add> Assert.state(subscriptionId != null, "No subsriptiondId in input message to method " + returnType.getMethod());
<ide>
<ide> MessagePostProcessor postProcessor = new SubscriptionHeaderPostProcessor(sessionId, subscriptionId);
<ide> this.messagingTemplate.convertAndSend(destination, returnValue, postProcessor);
<ide> public SubscriptionHeaderPostProcessor(String sessionId, String subscriptionId)
<ide>
<ide> @Override
<ide> public Message<?> postProcessMessage(Message<?> message) {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> headers.setSessionId(this.sessionId);
<del> headers.setSubscriptionId(this.subscriptionId);
<del> headers.setMessageTypeIfNotSet(SimpMessageType.MESSAGE);
<del> return MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<add> SimpMessageHeaderAccessor headerAccessor = SimpMessageHeaderAccessor.wrap(message);
<add> headerAccessor.setSessionId(this.sessionId);
<add> headerAccessor.setSubscriptionId(this.subscriptionId);
<add> headerAccessor.setMessageTypeIfNotSet(SimpMessageType.MESSAGE);
<add> return MessageBuilder.createMessage(message.getPayload(), headerAccessor.getMessageHeaders());
<ide> }
<ide> }
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/broker/AbstractSubscriptionRegistry.java
<ide> import org.apache.commons.logging.Log;
<ide> import org.apache.commons.logging.LogFactory;
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<add>import org.springframework.messaging.support.MessageHeaderAccessor;
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> /**
<ide> public abstract class AbstractSubscriptionRegistry implements SubscriptionRegist
<ide>
<ide> @Override
<ide> public final void registerSubscription(Message<?> message) {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> if (!SimpMessageType.SUBSCRIBE.equals(headers.getMessageType())) {
<add>
<add> MessageHeaders headers = message.getHeaders();
<add> SimpMessageType type = SimpMessageHeaderAccessor.getMessageType(headers);
<add>
<add> if (!SimpMessageType.SUBSCRIBE.equals(type)) {
<ide> logger.error("Expected SUBSCRIBE message: " + message);
<ide> return;
<ide> }
<del> String sessionId = headers.getSessionId();
<add> String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
<ide> if (sessionId == null) {
<ide> logger.error("Ignoring subscription. No sessionId in message: " + message);
<ide> return;
<ide> }
<del> String subscriptionId = headers.getSubscriptionId();
<add> String subscriptionId = SimpMessageHeaderAccessor.getSubscriptionId(headers);
<ide> if (subscriptionId == null) {
<ide> logger.error("Ignoring subscription. No subscriptionId in message: " + message);
<ide> return;
<ide> }
<del> String destination = headers.getDestination();
<add> String destination = SimpMessageHeaderAccessor.getDestination(headers);
<ide> if (destination == null) {
<ide> logger.error("Ignoring destination. No destination in message: " + message);
<ide> return;
<ide> }
<ide> if (logger.isDebugEnabled()) {
<del> logger.debug("Adding subscription id=" + headers.getSubscriptionId()
<del> + ", destination=" + headers.getDestination());
<add> logger.debug("Adding subscription id=" + subscriptionId + ", destination=" + destination);
<ide> }
<ide> addSubscriptionInternal(sessionId, subscriptionId, destination, message);
<ide> }
<ide> protected abstract void addSubscriptionInternal(String sessionId, String subscri
<ide>
<ide> @Override
<ide> public final void unregisterSubscription(Message<?> message) {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> if (!SimpMessageType.UNSUBSCRIBE.equals(headers.getMessageType())) {
<add>
<add> MessageHeaders headers = message.getHeaders();
<add> SimpMessageType type = SimpMessageHeaderAccessor.getMessageType(headers);
<add>
<add> if (!SimpMessageType.UNSUBSCRIBE.equals(type)) {
<ide> logger.error("Expected UNSUBSCRIBE message: " + message);
<ide> return;
<ide> }
<del> String sessionId = headers.getSessionId();
<add> String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
<ide> if (sessionId == null) {
<ide> logger.error("Ignoring subscription. No sessionId in message: " + message);
<ide> return;
<ide> }
<del> String subscriptionId = headers.getSubscriptionId();
<add> String subscriptionId = SimpMessageHeaderAccessor.getSubscriptionId(headers);
<ide> if (subscriptionId == null) {
<ide> logger.error("Ignoring subscription. No subscriptionId in message: " + message);
<ide> return;
<ide> public final void unregisterSubscription(Message<?> message) {
<ide>
<ide> @Override
<ide> public final MultiValueMap<String, String> findSubscriptions(Message<?> message) {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> if (!SimpMessageType.MESSAGE.equals(headers.getMessageType())) {
<del> logger.trace("Ignoring message type " + headers.getMessageType());
<add>
<add> MessageHeaders headers = message.getHeaders();
<add> SimpMessageType type = SimpMessageHeaderAccessor.getMessageType(headers);
<add>
<add> if (!SimpMessageType.MESSAGE.equals(type)) {
<add> logger.trace("Ignoring message type " + type);
<ide> return null;
<ide> }
<del> String destination = headers.getDestination();
<add> String destination = SimpMessageHeaderAccessor.getDestination(headers);
<ide> if (destination == null) {
<ide> logger.trace("Ignoring message, no destination");
<ide> return null;
<ide> }
<ide> MultiValueMap<String, String> result = findSubscriptionsInternal(destination, message);
<ide> if (logger.isTraceEnabled()) {
<del> logger.trace("Found " + result.size() + " subscriptions for destination=" + headers.getDestination());
<add> logger.trace("Found " + result.size() + " subscriptions for destination=" + destination);
<ide> }
<ide> return result;
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/broker/SimpleBrokerMessageHandler.java
<ide>
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.MessageChannel;
<add>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.SubscribableChannel;
<ide> import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<ide> public void stopInternal() {
<ide> @Override
<ide> protected void handleMessageInternal(Message<?> message) {
<ide>
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> SimpMessageType messageType = headers.getMessageType();
<del> String destination = headers.getDestination();
<add> MessageHeaders headers = message.getHeaders();
<add> SimpMessageType messageType = SimpMessageHeaderAccessor.getMessageType(headers);
<add> String destination = SimpMessageHeaderAccessor.getDestination(headers);
<add> String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
<ide>
<ide> if (!checkDestinationPrefix(destination)) {
<ide> if (logger.isTraceEnabled()) {
<ide> protected void handleMessageInternal(Message<?> message) {
<ide> return;
<ide> }
<ide>
<del> if (SimpMessageType.SUBSCRIBE.equals(messageType)) {
<add> if (SimpMessageType.MESSAGE.equals(messageType)) {
<add> sendMessageToSubscribers(destination, message);
<add> }
<add> else if (SimpMessageType.SUBSCRIBE.equals(messageType)) {
<ide> this.subscriptionRegistry.registerSubscription(message);
<ide> }
<ide> else if (SimpMessageType.UNSUBSCRIBE.equals(messageType)) {
<ide> this.subscriptionRegistry.unregisterSubscription(message);
<ide> }
<del> else if (SimpMessageType.MESSAGE.equals(messageType)) {
<del> sendMessageToSubscribers(headers.getDestination(), message);
<del> }
<ide> else if (SimpMessageType.DISCONNECT.equals(messageType)) {
<del> String sessionId = headers.getSessionId();
<del> this.subscriptionRegistry.unregisterAllSubscriptions(sessionId);
<add> this.subscriptionRegistry.unregisterAllSubscriptions(sessionId);
<ide> }
<ide> else if (SimpMessageType.CONNECT.equals(messageType)) {
<del> SimpMessageHeaderAccessor replyHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.CONNECT_ACK);
<del> replyHeaders.setSessionId(headers.getSessionId());
<del> replyHeaders.setHeader(SimpMessageHeaderAccessor.CONNECT_MESSAGE_HEADER, message);
<del>
<del> Message<byte[]> connectAck = MessageBuilder.withPayload(EMPTY_PAYLOAD).setHeaders(replyHeaders).build();
<add> SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create(SimpMessageType.CONNECT_ACK);
<add> accessor.setSessionId(sessionId);
<add> accessor.setHeader(SimpMessageHeaderAccessor.CONNECT_MESSAGE_HEADER, message);
<add> Message<byte[]> connectAck = MessageBuilder.createMessage(EMPTY_PAYLOAD, accessor.getMessageHeaders());
<ide> this.clientOutboundChannel.send(connectAck);
<ide> }
<add> else {
<add> if (logger.isTraceEnabled()) {
<add> logger.trace("Message type not supported. Ignoring: " + message);
<add> }
<add> }
<ide> }
<ide>
<ide> protected void sendMessageToSubscribers(String destination, Message<?> message) {
<ide> protected void sendMessageToSubscribers(String destination, Message<?> message)
<ide> }
<ide> for (String sessionId : subscriptions.keySet()) {
<ide> for (String subscriptionId : subscriptions.get(sessionId)) {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> headers.setSessionId(sessionId);
<del> headers.setSubscriptionId(subscriptionId);
<add> SimpMessageHeaderAccessor headerAccessor = SimpMessageHeaderAccessor.create(SimpMessageType.MESSAGE);
<add> headerAccessor.setSessionId(sessionId);
<add> headerAccessor.setSubscriptionId(subscriptionId);
<add> headerAccessor.copyHeadersIfAbsent(message.getHeaders());
<ide> Object payload = message.getPayload();
<del> Message<?> clientMessage = MessageBuilder.withPayload(payload).setHeaders(headers).build();
<add> Message<?> reply = MessageBuilder.createMessage(payload, headerAccessor.getMessageHeaders());
<ide> try {
<del> this.clientOutboundChannel.send(clientMessage);
<add> this.clientOutboundChannel.send(reply);
<ide> }
<ide> catch (Throwable ex) {
<del> logger.error("Failed to send message to destination=" + destination +
<del> ", sessionId=" + sessionId + ", subscriptionId=" + subscriptionId, ex);
<add> logger.error("Failed to send message=" + message, ex);
<ide> }
<ide> }
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/StompBrokerRelayMessageHandler.java
<ide> import java.util.Map;
<ide> import java.util.concurrent.Callable;
<ide> import java.util.concurrent.ConcurrentHashMap;
<del>import java.util.concurrent.atomic.AtomicBoolean;
<ide>
<ide> import org.springframework.messaging.Message;
<ide> import org.springframework.messaging.MessageChannel;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<ide> import org.springframework.messaging.simp.broker.AbstractBrokerMessageHandler;
<ide> import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.messaging.support.MessageHeaderAccessor;
<ide> import org.springframework.messaging.tcp.FixedIntervalReconnectStrategy;
<ide> import org.springframework.messaging.tcp.TcpConnection;
<ide> import org.springframework.messaging.tcp.TcpConnectionHandler;
<ide> public class StompBrokerRelayMessageHandler extends AbstractBrokerMessageHandler
<ide> private static final Message<byte[]> HEARTBEAT_MESSAGE;
<ide>
<ide> static {
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.create(SimpMessageType.HEARTBEAT);
<del> HEARTBEAT_MESSAGE = MessageBuilder.withPayload(new byte[] {'\n'}).setHeaders(headers).build();
<ide> EMPTY_TASK.run();
<add> StompHeaderAccessor accessor = StompHeaderAccessor.createForHeartbeat();
<add> HEARTBEAT_MESSAGE = MessageBuilder.createMessage(StompDecoder.HEARTBEAT_PAYLOAD, accessor.getMessageHeaders());
<ide> }
<ide>
<ide>
<ide> protected void stopInternal() {
<ide> @Override
<ide> protected void handleMessageInternal(Message<?> message) {
<ide>
<del> StompHeaderAccessor headers = StompHeaderAccessor.wrap(message);
<del> String sessionId = headers.getSessionId();
<add> String sessionId = SimpMessageHeaderAccessor.getSessionId(message.getHeaders());
<ide>
<ide> if (!isBrokerAvailable()) {
<del> if (sessionId == null || sessionId == SystemStompConnectionHandler.SESSION_ID) {
<add> if (sessionId == null || SystemStompConnectionHandler.SESSION_ID.equals(sessionId)) {
<ide> throw new MessageDeliveryException("Message broker is not active.");
<ide> }
<ide> if (logger.isTraceEnabled()) {
<del> logger.trace("Message broker is not active. Ignoring message id=" + message.getHeaders().getId());
<add> logger.trace("Message broker is not active. Ignoring: " + message);
<ide> }
<ide> return;
<ide> }
<ide>
<del> String destination = headers.getDestination();
<del> StompCommand command = headers.getCommand();
<del> SimpMessageType messageType = headers.getMessageType();
<add> StompHeaderAccessor stompAccessor;
<add> StompCommand command;
<ide>
<del> if (SimpMessageType.MESSAGE.equals(messageType)) {
<del> sessionId = (sessionId == null) ? SystemStompConnectionHandler.SESSION_ID : sessionId;
<del> headers.setSessionId(sessionId);
<del> command = headers.updateStompCommandAsClientMessage();
<del> message = MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<add> MessageHeaderAccessor accessor = MessageHeaderAccessor.getAccessor(message, MessageHeaderAccessor.class);
<add> if (accessor == null) {
<add> logger.error("No header accessor, please use SimpMessagingTemplate. Ignoring: " + message);
<add> return;
<add> }
<add> else if (accessor instanceof StompHeaderAccessor) {
<add> stompAccessor = (StompHeaderAccessor) accessor;
<add> command = stompAccessor.getCommand();
<add> }
<add> else if (accessor instanceof SimpMessageHeaderAccessor) {
<add> stompAccessor = StompHeaderAccessor.wrap(message);
<add> command = stompAccessor.getCommand();
<add> if (command == null) {
<add> command = stompAccessor.updateStompCommandAsClientMessage();
<add> }
<add> }
<add> else {
<add> // Should not happen
<add> logger.error("Unexpected header accessor type: " + accessor + ". Ignoring: " + message);
<add> return;
<ide> }
<ide>
<ide> if (sessionId == null) {
<del> if (logger.isWarnEnabled()) {
<del> logger.warn("No sessionId, ignoring message: " + message);
<add> if (!SimpMessageType.MESSAGE.equals(stompAccessor.getMessageType())) {
<add> logger.error("Only STOMP SEND frames supported on \"system\" connection. Ignoring: " + message);
<add> return;
<ide> }
<del> return;
<add> sessionId = SystemStompConnectionHandler.SESSION_ID;
<add> stompAccessor.setSessionId(sessionId);
<ide> }
<ide>
<add> String destination = stompAccessor.getDestination();
<ide> if ((command != null) && command.requiresDestination() && !checkDestinationPrefix(destination)) {
<ide> if (logger.isTraceEnabled()) {
<ide> logger.trace("Ignoring message to destination=" + destination);
<ide> protected void handleMessageInternal(Message<?> message) {
<ide> logger.trace("Processing message=" + message);
<ide> }
<ide>
<del> if (SimpMessageType.CONNECT.equals(messageType)) {
<add> if (StompCommand.CONNECT.equals(command)) {
<ide> if (logger.isDebugEnabled()) {
<ide> logger.debug("Processing CONNECT (total connected=" + this.connectionHandlers.size() + ")");
<ide> }
<del> headers.setLogin(this.clientLogin);
<del> headers.setPasscode(this.clientPasscode);
<add> stompAccessor = (stompAccessor.isMutable() ? stompAccessor : StompHeaderAccessor.wrap(message));
<add> stompAccessor.setLogin(this.clientLogin);
<add> stompAccessor.setPasscode(this.clientPasscode);
<ide> if (getVirtualHost() != null) {
<del> headers.setHost(getVirtualHost());
<add> stompAccessor.setHost(getVirtualHost());
<ide> }
<del> StompConnectionHandler handler = new StompConnectionHandler(sessionId, headers);
<add> StompConnectionHandler handler = new StompConnectionHandler(sessionId, stompAccessor);
<ide> this.connectionHandlers.put(sessionId, handler);
<ide> this.tcpClient.connect(handler);
<ide> }
<del> else if (SimpMessageType.DISCONNECT.equals(messageType)) {
<add> else if (StompCommand.DISCONNECT.equals(command)) {
<ide> StompConnectionHandler handler = this.connectionHandlers.get(sessionId);
<ide> if (handler == null) {
<ide> if (logger.isTraceEnabled()) {
<ide> logger.trace("Connection already removed for sessionId '" + sessionId + "'");
<ide> }
<ide> return;
<ide> }
<del> handler.forward(message);
<add> handler.forward(message, stompAccessor);
<ide> }
<ide> else {
<ide> StompConnectionHandler handler = this.connectionHandlers.get(sessionId);
<ide> else if (SimpMessageType.DISCONNECT.equals(messageType)) {
<ide> }
<ide> return;
<ide> }
<del> handler.forward(message);
<add> handler.forward(message, stompAccessor);
<ide> }
<ide> }
<ide>
<ide> public void afterConnected(TcpConnection<byte[]> connection) {
<ide> logger.debug("Established TCP connection to broker in session '" + this.sessionId + "'");
<ide> }
<ide> this.tcpConnection = connection;
<del> connection.send(MessageBuilder.withPayload(EMPTY_PAYLOAD).setHeaders(this.connectHeaders).build());
<add> connection.send(MessageBuilder.createMessage(EMPTY_PAYLOAD, this.connectHeaders.getMessageHeaders()));
<ide> }
<ide>
<ide> @Override
<ide> private void sendStompErrorToClient(String errorText) {
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.ERROR);
<ide> headers.setSessionId(this.sessionId);
<ide> headers.setMessage(errorText);
<del> Message<?> errorMessage = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<?> errorMessage = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> sendMessageToClient(errorMessage);
<ide> }
<ide> }
<ide> protected void sendMessageToClient(Message<?> message) {
<ide> @Override
<ide> public void handleMessage(Message<byte[]> message) {
<ide>
<del> StompHeaderAccessor headers = StompHeaderAccessor.wrap(message);
<del> if (SimpMessageType.HEARTBEAT.equals(headers.getMessageType())) {
<add> StompHeaderAccessor headerAccessor =
<add> MessageHeaderAccessor.getAccessor(message, StompHeaderAccessor.class);
<add>
<add> if (headerAccessor.isHeartbeat()) {
<ide> logger.trace("Received broker heartbeat");
<ide> }
<ide> else if (logger.isDebugEnabled()) {
<ide> logger.debug("Received message from broker in session '" + this.sessionId + "'");
<ide> }
<ide>
<del> if (StompCommand.CONNECTED == headers.getCommand()) {
<del> afterStompConnected(headers);
<add> if (StompCommand.CONNECTED == headerAccessor.getCommand()) {
<add> afterStompConnected(headerAccessor);
<ide> }
<ide>
<del> headers.setSessionId(this.sessionId);
<del> message = MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<add> headerAccessor.setSessionId(this.sessionId);
<add> headerAccessor.setImmutable();
<add>
<ide> sendMessageToClient(message);
<ide> }
<ide>
<ide> public void afterConnectionClosed() {
<ide> clearConnection();
<ide> }
<ide> catch (Throwable t) {
<del> if (logger.isErrorEnabled()) {
<del> // Ignore
<del> }
<add> // Ignore
<ide> }
<ide> }
<ide> }
<ide> public void afterConnectionClosed() {
<ide> * @return a future to wait for the result
<ide> */
<ide> @SuppressWarnings("unchecked")
<del> public ListenableFuture<Void> forward(final Message<?> message) {
<add> public ListenableFuture<Void> forward(Message<?> message, final StompHeaderAccessor headerAccessor) {
<ide>
<ide> TcpConnection<byte[]> conn = this.tcpConnection;
<ide>
<ide> public ListenableFuture<Void> forward(final Message<?> message) {
<ide> }
<ide>
<ide> if (logger.isDebugEnabled()) {
<del> StompHeaderAccessor headers = StompHeaderAccessor.wrap(message);
<del> if (SimpMessageType.HEARTBEAT.equals(headers.getMessageType())) {
<add> if (headerAccessor.isHeartbeat()) {
<ide> logger.trace("Forwarding heartbeat to broker");
<ide> }
<ide> else {
<ide> logger.debug("Forwarding message to broker");
<ide> }
<ide> }
<ide>
<add> if (headerAccessor.isMutable() && headerAccessor.isModified()) {
<add> message = MessageBuilder.createMessage(message.getPayload(), headerAccessor.getMessageHeaders());
<add> }
<add>
<ide> ListenableFuture<Void> future = conn.send((Message<byte[]>) message);
<ide>
<ide> future.addCallback(new ListenableFutureCallback<Void>() {
<ide> @Override
<ide> public void onSuccess(Void result) {
<del> StompCommand command = StompHeaderAccessor.wrap(message).getCommand();
<del> if (command == StompCommand.DISCONNECT) {
<add> if (headerAccessor.getCommand() == StompCommand.DISCONNECT) {
<ide> clearConnection();
<ide> }
<ide> }
<ide> public void onFailure(Throwable t) {
<ide> // already reset
<ide> }
<ide> else {
<del> handleTcpConnectionFailure("Failed to send message " + message, t);
<add> handleTcpConnectionFailure("Failed to send message " + headerAccessor, t);
<ide> }
<ide> }
<ide> });
<ide> public void afterConnectionClosed() {
<ide> }
<ide>
<ide> @Override
<del> public ListenableFuture<Void> forward(Message<?> message) {
<add> public ListenableFuture<Void> forward(Message<?> message, StompHeaderAccessor headerAccessor) {
<ide> try {
<del> ListenableFuture<Void> future = super.forward(message);
<add> ListenableFuture<Void> future = super.forward(message, headerAccessor);
<ide> future.get();
<ide> return future;
<ide> }
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/user/DefaultUserDestinationResolver.java
<ide> import org.apache.commons.logging.Log;
<ide> import org.apache.commons.logging.LogFactory;
<ide> import org.springframework.messaging.Message;
<add>import org.springframework.messaging.MessageHeaders;
<ide> import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<ide> import org.springframework.util.Assert;
<ide> public UserSessionRegistry getUserSessionRegistry() {
<ide> @Override
<ide> public UserDestinationResult resolveDestination(Message<?> message) {
<ide>
<del> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.wrap(message);
<del> DestinationInfo info = parseUserDestination(headers);
<add> String destination = SimpMessageHeaderAccessor.getDestination(message.getHeaders());
<add> DestinationInfo info = parseUserDestination(message);
<ide> if (info == null) {
<ide> return null;
<ide> }
<ide>
<ide> Set<String> targetDestinations = new HashSet<String>();
<ide> for (String sessionId : info.getSessionIds()) {
<del> targetDestinations.add(getTargetDestination(
<del> headers.getDestination(), info.getDestinationWithoutPrefix(), sessionId, info.getUser()));
<add> targetDestinations.add(getTargetDestination(destination,
<add> info.getDestinationWithoutPrefix(), sessionId, info.getUser()));
<ide> }
<ide>
<del> return new UserDestinationResult(headers.getDestination(),
<add> return new UserDestinationResult(destination,
<ide> targetDestinations, info.getSubscribeDestination(), info.getUser());
<ide> }
<ide>
<del> private DestinationInfo parseUserDestination(SimpMessageHeaderAccessor headers) {
<add> private DestinationInfo parseUserDestination(Message<?> message) {
<ide>
<del> String destination = headers.getDestination();
<add> MessageHeaders headers = message.getHeaders();
<add> SimpMessageType messageType = SimpMessageHeaderAccessor.getMessageType(headers);
<add> String destination = SimpMessageHeaderAccessor.getDestination(headers);
<add> Principal principal = SimpMessageHeaderAccessor.getUser(headers);
<ide>
<ide> String destinationWithoutPrefix;
<ide> String subscribeDestination;
<ide> String user;
<ide> Set<String> sessionIds;
<ide>
<del> Principal principal = headers.getUser();
<del> SimpMessageType messageType = headers.getMessageType();
<del>
<ide> if (SimpMessageType.SUBSCRIBE.equals(messageType) || SimpMessageType.UNSUBSCRIBE.equals(messageType)) {
<ide> if (!checkDestination(destination, this.destinationPrefix)) {
<ide> return null;
<ide> private DestinationInfo parseUserDestination(SimpMessageHeaderAccessor headers)
<ide> logger.error("Ignoring message, no principal info available");
<ide> return null;
<ide> }
<del> if (headers.getSessionId() == null) {
<add> String sessionId = SimpMessageHeaderAccessor.getSessionId(headers);
<add> if (sessionId == null) {
<ide> logger.error("Ignoring message, no session id available");
<ide> return null;
<ide> }
<ide> destinationWithoutPrefix = destination.substring(this.destinationPrefix.length()-1);
<ide> subscribeDestination = destination;
<ide> user = principal.getName();
<del> sessionIds = Collections.singleton(headers.getSessionId());
<add> sessionIds = Collections.singleton(sessionId);
<ide> }
<ide> else if (SimpMessageType.MESSAGE.equals(messageType)) {
<ide> if (!checkDestination(destination, this.destinationPrefix)) {
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/user/UserDestinationMessageHandler.java
<ide> public void handleMessage(Message<?> message) throws MessagingException {
<ide> if (destinations.isEmpty()) {
<ide> return;
<ide> }
<del> SimpMessageHeaderAccessor headerAccessor = SimpMessageHeaderAccessor.wrap(message);
<del> if (SimpMessageType.MESSAGE.equals(headerAccessor.getMessageType())) {
<add> if (SimpMessageType.MESSAGE.equals(SimpMessageHeaderAccessor.getMessageType(message.getHeaders()))) {
<add> SimpMessageHeaderAccessor headerAccessor = SimpMessageHeaderAccessor.wrap(message);
<ide> headerAccessor.setHeader(SimpMessageHeaderAccessor.ORIGINAL_DESTINATION, result.getSubscribeDestination());
<del> message = MessageBuilder.withPayload(message.getPayload()).setHeaders(headerAccessor).build();
<add> message = MessageBuilder.createMessage(message.getPayload(), headerAccessor.getMessageHeaders());
<ide> }
<del> for (String targetDestination : destinations) {
<add> for (String destination : destinations) {
<ide> if (logger.isDebugEnabled()) {
<del> logger.debug("Sending message to resolved destination=" + targetDestination);
<add> logger.debug("Sending message to resolved destination=" + destination);
<ide> }
<del> this.brokerMessagingTemplate.send(targetDestination, message);
<add> this.brokerMessagingTemplate.send(destination, message);
<ide> }
<ide> }
<ide>
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/converter/MessageConverterTests.java
<ide> public void setStrictContentTypeMatchWithNoSupportedMimeTypes() {
<ide> public void toMessageHeadersCopied() {
<ide> Map<String, Object> map = new HashMap<String, Object>();
<ide> map.put("foo", "bar");
<del> MessageHeaders headers = new MessageHeaders(map );
<add> MessageHeaders headers = new MessageHeaders(map);
<ide> Message<?> message = this.converter.toMessage("ABC", headers);
<ide>
<ide> assertEquals("bar", message.getHeaders().get("foo"));
<add> assertNotNull(message.getHeaders().getId());
<add> assertNotNull(message.getHeaders().getTimestamp());
<ide> }
<ide>
<ide> @Test
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/config/MessageBrokerConfigurationTests.java
<ide> import org.springframework.context.annotation.Configuration;
<ide> import org.springframework.context.support.StaticApplicationContext;
<ide> import org.springframework.messaging.Message;
<del>import org.springframework.messaging.MessageChannel;
<ide> import org.springframework.messaging.MessageHandler;
<ide> import org.springframework.messaging.converter.*;
<ide> import org.springframework.messaging.handler.annotation.MessageMapping;
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/stomp/StompBrokerRelayMessageHandlerIntegrationTests.java
<ide> import org.springframework.messaging.MessageHandler;
<ide> import org.springframework.messaging.MessagingException;
<ide> import org.springframework.messaging.StubMessageChannel;
<add>import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
<ide> import org.springframework.messaging.simp.broker.BrokerAvailabilityEvent;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<ide> import org.springframework.messaging.support.ExecutorSubscribableChannel;
<ide> public void publishSubscribe() throws Exception {
<ide> public void messageDeliverExceptionIfSystemSessionForwardFails() throws Exception {
<ide> stopActiveMqBrokerAndAwait();
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.SEND);
<del> this.relay.handleMessage(MessageBuilder.withPayload("test".getBytes()).setHeaders(headers).build());
<add> this.relay.handleMessage(MessageBuilder.createMessage("test".getBytes(), headers.getMessageHeaders()));
<ide> }
<ide>
<ide> @Test
<ide> public void disconnectClosesRelaySessionCleanly() throws Exception {
<ide>
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.DISCONNECT);
<ide> headers.setSessionId("sess1");
<del> this.relay.handleMessage(MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build());
<add> this.relay.handleMessage(MessageBuilder.createMessage(new byte[0], headers.getMessageHeaders()));
<ide>
<ide> Thread.sleep(2000);
<ide>
<ide> public static MessageExchangeBuilder connect(String sessionId) {
<ide> headers.setSessionId(sessionId);
<ide> headers.setAcceptVersion("1.1,1.2");
<ide> headers.setHeartbeat(0, 0);
<del> Message<?> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<?> message = MessageBuilder.createMessage(new byte[0], headers.getMessageHeaders());
<ide>
<ide> MessageExchangeBuilder builder = new MessageExchangeBuilder(message);
<ide> builder.expected.add(new StompConnectedFrameMessageMatcher(sessionId));
<ide> public static MessageExchangeBuilder connectWithError(String sessionId) {
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECT);
<ide> headers.setSessionId(sessionId);
<ide> headers.setAcceptVersion("1.1,1.2");
<del> Message<?> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<?> message = MessageBuilder.createMessage(new byte[0], headers.getMessageHeaders());
<ide> MessageExchangeBuilder builder = new MessageExchangeBuilder(message);
<ide> return builder.andExpectError();
<ide> }
<ide> public static MessageExchangeBuilder subscribeWithReceipt(String sessionId, Stri
<ide> headers.setSubscriptionId(subscriptionId);
<ide> headers.setDestination(destination);
<ide> headers.setReceipt(receiptId);
<del> Message<?> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<?> message = MessageBuilder.createMessage(new byte[0], headers.getMessageHeaders());
<ide>
<ide> MessageExchangeBuilder builder = new MessageExchangeBuilder(message);
<ide> builder.expected.add(new StompReceiptFrameMessageMatcher(sessionId, receiptId));
<ide> return builder;
<ide> }
<ide>
<ide> public static MessageExchangeBuilder send(String destination, String payload) {
<del> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.SEND);
<add> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.create(SimpMessageType.MESSAGE);
<ide> headers.setDestination(destination);
<del> Message<?> message = MessageBuilder.withPayload(payload.getBytes(UTF_8)).setHeaders(headers).build();
<add> Message<?> message = MessageBuilder.createMessage(payload.getBytes(UTF_8), headers.getMessageHeaders());
<ide> return new MessageExchangeBuilder(message);
<ide> }
<ide>
<ide> public MessageExchangeBuilder andExpectMessage(String sessionId, String subscriptionId) {
<del> Assert.isTrue(StompCommand.SEND.equals(headers.getCommand()), "MESSAGE can only be expected after SEND");
<add> Assert.isTrue(SimpMessageType.MESSAGE.equals(headers.getMessageType()));
<ide> String destination = this.headers.getDestination();
<ide> Object payload = this.message.getPayload();
<ide> this.expected.add(new StompMessageFrameMessageMatcher(sessionId, subscriptionId, destination, payload));
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/stomp/StompBrokerRelayMessageHandlerTests.java
<ide> import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
<ide> import org.springframework.messaging.simp.SimpMessageType;
<ide> import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.messaging.support.MessageHeaderAccessor;
<ide> import org.springframework.messaging.tcp.ReconnectStrategy;
<ide> import org.springframework.messaging.tcp.TcpConnection;
<ide> import org.springframework.messaging.tcp.TcpConnectionHandler;
<ide> public void testVirtualHostHeader() throws Exception {
<ide> String sessionId = "sess1";
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECT);
<ide> headers.setSessionId(sessionId);
<del> this.brokerRelay.handleMessage(MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build());
<add> this.brokerRelay.handleMessage(MessageBuilder.createMessage(new byte[0], headers.getMessageHeaders()));
<ide>
<ide> List<Message<byte[]>> sent = this.tcpClient.connection.messages;
<ide> assertEquals(2, sent.size());
<ide>
<ide> StompHeaderAccessor headers1 = StompHeaderAccessor.wrap(sent.get(0));
<ide> assertEquals(virtualHost, headers1.getHost());
<add> assertNotNull("The prepared message does not have an accessor",
<add> MessageHeaderAccessor.getAccessor(sent.get(0), MessageHeaderAccessor.class));
<ide>
<ide> StompHeaderAccessor headers2 = StompHeaderAccessor.wrap(sent.get(1));
<ide> assertEquals(sessionId, headers2.getSessionId());
<ide> assertEquals(virtualHost, headers2.getHost());
<add> assertNotNull("The prepared message does not have an accessor",
<add> MessageHeaderAccessor.getAccessor(sent.get(1), MessageHeaderAccessor.class));
<ide> }
<ide>
<ide> @Test
<ide> public void testLoginPasscode() throws Exception {
<ide> String sessionId = "sess1";
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECT);
<ide> headers.setSessionId(sessionId);
<del> this.brokerRelay.handleMessage(MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build());
<add> this.brokerRelay.handleMessage(MessageBuilder.createMessage(new byte[0], headers.getMessageHeaders()));
<ide>
<ide> List<Message<byte[]>> sent = this.tcpClient.connection.messages;
<ide> assertEquals(2, sent.size());
<ide> public void testDestinationExcluded() throws Exception {
<ide> SimpMessageHeaderAccessor headers = SimpMessageHeaderAccessor.create(SimpMessageType.MESSAGE);
<ide> headers.setSessionId("sess1");
<ide> headers.setDestination("/user/daisy/foo");
<del> this.brokerRelay.handleMessage(MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build());
<add> this.brokerRelay.handleMessage(MessageBuilder.createMessage(new byte[0], headers.getMessageHeaders()));
<ide>
<ide> List<Message<byte[]>> sent = this.tcpClient.connection.messages;
<ide> assertEquals(1, sent.size());
<ide> assertEquals(StompCommand.CONNECT, StompHeaderAccessor.wrap(sent.get(0)).getCommand());
<add> assertNotNull("The prepared message does not have an accessor",
<add> MessageHeaderAccessor.getAccessor(sent.get(0), MessageHeaderAccessor.class));
<ide> }
<ide>
<ide>
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/user/UserDestinationMessageHandlerTests.java
<ide> public void handleSubscribe() {
<ide> ArgumentCaptor<Message> captor = ArgumentCaptor.forClass(Message.class);
<ide> Mockito.verify(this.brokerChannel).send(captor.capture());
<ide>
<del> assertEquals("/queue/foo-user123",
<del> captor.getValue().getHeaders().get(SimpMessageHeaderAccessor.DESTINATION_HEADER));
<add> assertEquals("/queue/foo-user123", SimpMessageHeaderAccessor.getDestination(captor.getValue().getHeaders()));
<ide> }
<ide>
<ide> @Test
<ide> public void handleUnsubscribe() {
<ide> ArgumentCaptor<Message> captor = ArgumentCaptor.forClass(Message.class);
<ide> Mockito.verify(this.brokerChannel).send(captor.capture());
<ide>
<del> assertEquals("/queue/foo-user123",
<del> captor.getValue().getHeaders().get(SimpMessageHeaderAccessor.DESTINATION_HEADER));
<add> assertEquals("/queue/foo-user123", SimpMessageHeaderAccessor.getDestination(captor.getValue().getHeaders()));
<ide> }
<ide>
<ide> @Test
<ide> public void handleMessage() {
<ide> ArgumentCaptor<Message> captor = ArgumentCaptor.forClass(Message.class);
<ide> Mockito.verify(this.brokerChannel).send(captor.capture());
<ide>
<del> assertEquals("/queue/foo-user123",
<del> captor.getValue().getHeaders().get(SimpMessageHeaderAccessor.DESTINATION_HEADER));
<del> assertEquals("/user/queue/foo",
<del> captor.getValue().getHeaders().get(SimpMessageHeaderAccessor.ORIGINAL_DESTINATION));
<add> assertEquals("/queue/foo-user123", SimpMessageHeaderAccessor.getDestination(captor.getValue().getHeaders()));
<add> assertEquals("/user/queue/foo", captor.getValue().getHeaders().get(SimpMessageHeaderAccessor.ORIGINAL_DESTINATION));
<ide> }
<ide>
<ide>
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/messaging/StompSubProtocolHandler.java
<ide> import org.springframework.messaging.simp.stomp.StompEncoder;
<ide> import org.springframework.messaging.simp.stomp.StompHeaderAccessor;
<ide> import org.springframework.messaging.simp.user.DestinationUserNameProvider;
<del>import org.springframework.messaging.simp.user.UserDestinationMessageHandler;
<ide> import org.springframework.messaging.simp.user.UserSessionRegistry;
<ide> import org.springframework.messaging.support.MessageBuilder;
<add>import org.springframework.messaging.support.MessageHeaderAccessor;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.web.socket.CloseStatus;
<ide> import org.springframework.web.socket.TextMessage;
<ide> public class StompSubProtocolHandler implements SubProtocolHandler, ApplicationE
<ide>
<ide> private static final Log logger = LogFactory.getLog(StompSubProtocolHandler.class);
<ide>
<add> private static final byte[] EMPTY_PAYLOAD = new byte[0];
<add>
<ide>
<ide> private int messageSizeLimit = 64 * 1024;
<ide>
<ide> public void handleMessageFromClient(WebSocketSession session,
<ide>
<ide> for (Message<byte[]> message : messages) {
<ide> try {
<del> StompHeaderAccessor headers = StompHeaderAccessor.wrap(message);
<add>
<add> StompHeaderAccessor headerAccessor =
<add> MessageHeaderAccessor.getAccessor(message, StompHeaderAccessor.class);
<add>
<ide> if (logger.isTraceEnabled()) {
<del> if (SimpMessageType.HEARTBEAT.equals(headers.getMessageType())) {
<add> if (headerAccessor.isHeartbeat()) {
<ide> logger.trace("Received heartbeat from client session=" + session.getId());
<ide> }
<ide> else {
<ide> logger.trace("Received message from client session=" + session.getId());
<ide> }
<ide> }
<ide>
<del> headers.setSessionId(session.getId());
<del> headers.setSessionAttributes(session.getAttributes());
<del> headers.setUser(session.getPrincipal());
<del>
<del> message = MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<add> headerAccessor.setSessionId(session.getId());
<add> headerAccessor.setSessionAttributes(session.getAttributes());
<add> headerAccessor.setUser(session.getPrincipal());
<add> headerAccessor.setImmutable();
<ide>
<del> if (this.eventPublisher != null && StompCommand.CONNECT.equals(headers.getCommand())) {
<add> if (this.eventPublisher != null && StompCommand.CONNECT.equals(headerAccessor.getCommand())) {
<ide> publishEvent(new SessionConnectEvent(this, message));
<ide> }
<ide>
<ide> private void publishEvent(ApplicationEvent event) {
<ide>
<ide> protected void sendErrorMessage(WebSocketSession session, Throwable error) {
<ide>
<del> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.ERROR);
<del> headers.setMessage(error.getMessage());
<del> Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<del> byte[] bytes = this.stompEncoder.encode(message);
<add> StompHeaderAccessor headerAccessor = StompHeaderAccessor.create(StompCommand.ERROR);
<add> headerAccessor.setMessage(error.getMessage());
<add> byte[] bytes = this.stompEncoder.encode(headerAccessor.getMessageHeaders(), EMPTY_PAYLOAD);
<ide> try {
<ide> session.sendMessage(new TextMessage(bytes));
<ide> }
<ide> protected void sendErrorMessage(WebSocketSession session, Throwable error) {
<ide> @Override
<ide> public void handleMessageToClient(WebSocketSession session, Message<?> message) {
<ide>
<del> StompHeaderAccessor headers = StompHeaderAccessor.wrap(message);
<del>
<del> if (headers.getMessageType() == SimpMessageType.CONNECT_ACK) {
<del> StompHeaderAccessor connectedHeaders = StompHeaderAccessor.create(StompCommand.CONNECTED);
<del> connectedHeaders.setVersion(getVersion(headers));
<del> connectedHeaders.setHeartbeat(0, 0); // no heart-beat support with simple broker
<del> headers = connectedHeaders;
<add> if (!(message.getPayload() instanceof byte[])) {
<add> logger.error("Ignoring message, expected byte[] content: " + message);
<add> return;
<ide> }
<del> else if (SimpMessageType.MESSAGE.equals(headers.getMessageType())) {
<del> headers.updateStompCommandAsServerMessage();
<add>
<add> MessageHeaderAccessor accessor = MessageHeaderAccessor.getAccessor(message, MessageHeaderAccessor.class);
<add> if (accessor == null) {
<add> logger.error("No header accessor: " + message);
<add> return;
<ide> }
<ide>
<del> if (headers.getCommand() == StompCommand.CONNECTED) {
<del> afterStompSessionConnected(headers, session);
<add> StompHeaderAccessor stompAccessor;
<add> if (accessor instanceof StompHeaderAccessor) {
<add> stompAccessor = (StompHeaderAccessor) accessor;
<add> }
<add> else if (accessor instanceof SimpMessageHeaderAccessor) {
<add> stompAccessor = StompHeaderAccessor.wrap(message);
<add> if (SimpMessageType.CONNECT_ACK.equals(stompAccessor.getMessageType())) {
<add> StompHeaderAccessor connectedHeaders = StompHeaderAccessor.create(StompCommand.CONNECTED);
<add> connectedHeaders.setVersion(getVersion(stompAccessor));
<add> connectedHeaders.setHeartbeat(0, 0); // no heart-beat support with simple broker
<add> stompAccessor = connectedHeaders;
<add> }
<add> else if (stompAccessor.getCommand() == null || StompCommand.SEND.equals(stompAccessor.getCommand())) {
<add> stompAccessor.updateStompCommandAsServerMessage();
<add> }
<add> }
<add> else {
<add> // Should not happen
<add> logger.error("Unexpected header accessor type: " + accessor);
<add> return;
<ide> }
<ide>
<del> if (StompCommand.MESSAGE.equals(headers.getCommand())) {
<del> if (headers.getSubscriptionId() == null) {
<add> StompCommand command = stompAccessor.getCommand();
<add> if (StompCommand.MESSAGE.equals(command)) {
<add> if (stompAccessor.getSubscriptionId() == null) {
<ide> logger.error("Ignoring message, no subscriptionId header: " + message);
<ide> return;
<ide> }
<ide> String header = SimpMessageHeaderAccessor.ORIGINAL_DESTINATION;
<ide> if (message.getHeaders().containsKey(header)) {
<del> headers.setDestination((String) message.getHeaders().get(header));
<add> stompAccessor = toMutableAccessor(stompAccessor, message);
<add> stompAccessor.setDestination((String) message.getHeaders().get(header));
<ide> }
<ide> }
<del>
<del> if (!(message.getPayload() instanceof byte[])) {
<del> logger.error("Ignoring message, expected byte[] content: " + message);
<del> return;
<del> }
<del>
<del> try {
<del> message = MessageBuilder.withPayload(message.getPayload()).setHeaders(headers).build();
<del>
<del> if (this.eventPublisher != null && StompCommand.CONNECTED.equals(headers.getCommand())) {
<add> else if (StompCommand.CONNECTED.equals(command)) {
<add> stompAccessor = afterStompSessionConnected(message, stompAccessor, session);
<add> if (this.eventPublisher != null && StompCommand.CONNECTED.equals(command)) {
<ide> publishEvent(new SessionConnectedEvent(this, (Message<byte[]>) message));
<ide> }
<add> }
<ide>
<del> byte[] bytes = this.stompEncoder.encode((Message<byte[]>) message);
<add> try {
<add> byte[] bytes = this.stompEncoder.encode(stompAccessor.getMessageHeaders(), (byte[]) message.getPayload());
<ide> TextMessage textMessage = new TextMessage(bytes);
<ide>
<ide> session.sendMessage(textMessage);
<ide> else if (SimpMessageType.MESSAGE.equals(headers.getMessageType())) {
<ide> sendErrorMessage(session, ex);
<ide> }
<ide> finally {
<del> if (StompCommand.ERROR.equals(headers.getCommand())) {
<add> if (StompCommand.ERROR.equals(command)) {
<ide> try {
<ide> session.close(CloseStatus.PROTOCOL_ERROR);
<ide> }
<ide> else if (SimpMessageType.MESSAGE.equals(headers.getMessageType())) {
<ide> }
<ide> }
<ide>
<add> protected StompHeaderAccessor toMutableAccessor(StompHeaderAccessor headerAccessor, Message<?> message) {
<add> return (headerAccessor.isMutable() ? headerAccessor : StompHeaderAccessor.wrap(message));
<add> }
<add>
<ide> private String getVersion(StompHeaderAccessor connectAckHeaders) {
<ide>
<ide> String name = StompHeaderAccessor.CONNECT_MESSAGE_HEADER;
<ide> Message<?> connectMessage = (Message<?>) connectAckHeaders.getHeader(name);
<del> StompHeaderAccessor connectHeaders = StompHeaderAccessor.wrap(connectMessage);
<ide> Assert.notNull(connectMessage, "CONNECT_ACK does not contain original CONNECT " + connectAckHeaders);
<ide>
<add> StompHeaderAccessor connectHeaders =
<add> MessageHeaderAccessor.getAccessor(connectMessage, StompHeaderAccessor.class);
<add>
<ide> Set<String> acceptVersions = connectHeaders.getAcceptVersion();
<ide> if (acceptVersions.contains("1.2")) {
<ide> return "1.2";
<ide> else if (acceptVersions.isEmpty()) {
<ide> }
<ide> }
<ide>
<del> private void afterStompSessionConnected(StompHeaderAccessor headers, WebSocketSession session) {
<add> private StompHeaderAccessor afterStompSessionConnected(
<add> Message<?> message, StompHeaderAccessor headerAccessor, WebSocketSession session) {
<add>
<ide> Principal principal = session.getPrincipal();
<ide> if (principal != null) {
<del> headers.setNativeHeader(CONNECTED_USER_HEADER, principal.getName());
<add> headerAccessor = toMutableAccessor(headerAccessor, message);
<add> headerAccessor.setNativeHeader(CONNECTED_USER_HEADER, principal.getName());
<ide> if (this.userSessionRegistry != null) {
<ide> String userName = resolveNameForUserSessionRegistry(principal);
<ide> this.userSessionRegistry.registerSessionId(userName, session.getId());
<ide> }
<ide> }
<del> long[] heartbeat = headers.getHeartbeat();
<add> long[] heartbeat = headerAccessor.getHeartbeat();
<ide> if (heartbeat[1] > 0) {
<ide> session = WebSocketSessionDecorator.unwrap(session);
<ide> if (session instanceof SockJsSession) {
<ide> logger.debug("STOMP heartbeats negotiated, disabling SockJS heartbeats.");
<ide> ((SockJsSession) session).disableHeartbeat();
<ide> }
<ide> }
<add> return headerAccessor;
<ide> }
<ide>
<ide> private String resolveNameForUserSessionRegistry(Principal principal) {
<ide> private String resolveNameForUserSessionRegistry(Principal principal) {
<ide>
<ide> @Override
<ide> public String resolveSessionId(Message<?> message) {
<del> StompHeaderAccessor headers = StompHeaderAccessor.wrap(message);
<del> return headers.getSessionId();
<add> return SimpMessageHeaderAccessor.getSessionId(message.getHeaders());
<ide> }
<ide>
<ide> @Override
<ide> public void afterSessionEnded(WebSocketSession session, CloseStatus closeStatus,
<ide>
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.DISCONNECT);
<ide> headers.setSessionId(session.getId());
<del> Message<?> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<?> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide>
<ide> if (this.eventPublisher != null) {
<ide> publishEvent(new SessionDisconnectEvent(this, session.getId(), closeStatus));
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/messaging/StompSubProtocolHandlerTests.java
<ide>
<ide> package org.springframework.web.socket.messaging;
<ide>
<del>import java.io.IOException;
<ide> import java.nio.ByteBuffer;
<ide> import java.util.ArrayList;
<ide> import java.util.Arrays;
<ide> import org.springframework.messaging.simp.stomp.StompHeaderAccessor;
<ide> import org.springframework.messaging.simp.user.DefaultUserSessionRegistry;
<ide> import org.springframework.messaging.simp.user.DestinationUserNameProvider;
<del>import org.springframework.messaging.simp.user.UserDestinationMessageHandler;
<ide> import org.springframework.messaging.simp.user.UserSessionRegistry;
<ide> import org.springframework.messaging.support.MessageBuilder;
<ide> import org.springframework.web.socket.CloseStatus;
<ide> */
<ide> public class StompSubProtocolHandlerTests {
<ide>
<add> public static final byte[] EMPTY_PAYLOAD = new byte[0];
<add>
<ide> private StompSubProtocolHandler protocolHandler;
<ide>
<ide> private TestWebSocketSession session;
<ide> public void handleMessageToClientConnected() {
<ide> this.protocolHandler.setUserSessionRegistry(registry);
<ide>
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECTED);
<del> Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<byte[]> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> this.protocolHandler.handleMessageToClient(this.session, message);
<ide>
<ide> assertEquals(1, this.session.getSentMessages().size());
<ide> public void handleMessageToClientConnectedUniqueUserName() {
<ide> this.protocolHandler.setUserSessionRegistry(registry);
<ide>
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECTED);
<del> Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<byte[]> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> this.protocolHandler.handleMessageToClient(this.session, message);
<ide>
<ide> assertEquals(1, this.session.getSentMessages().size());
<ide> public void handleMessageToClientConnectedWithHeartbeats() {
<ide>
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECTED);
<ide> headers.setHeartbeat(0,10);
<del> Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<byte[]> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> this.protocolHandler.handleMessageToClient(sockJsSession, message);
<ide>
<ide> verify(sockJsSession).disableHeartbeat();
<ide> public void handleMessageToClientConnectAck() {
<ide>
<ide> StompHeaderAccessor connectHeaders = StompHeaderAccessor.create(StompCommand.CONNECT);
<ide> connectHeaders.setHeartbeat(10000, 10000);
<del> connectHeaders.setNativeHeader(StompHeaderAccessor.STOMP_ACCEPT_VERSION_HEADER, "1.0,1.1");
<del> Message<?> connectMessage = MessageBuilder.withPayload(new byte[0]).setHeaders(connectHeaders).build();
<add> connectHeaders.setAcceptVersion("1.0,1.1");
<add> Message<?> connectMessage = MessageBuilder.createMessage(EMPTY_PAYLOAD, connectHeaders.getMessageHeaders());
<ide>
<ide> SimpMessageHeaderAccessor connectAckHeaders = SimpMessageHeaderAccessor.create(SimpMessageType.CONNECT_ACK);
<ide> connectAckHeaders.setHeader(SimpMessageHeaderAccessor.CONNECT_MESSAGE_HEADER, connectMessage);
<del> Message<byte[]> connectAckMessage = MessageBuilder.withPayload(new byte[0]).setHeaders(connectAckHeaders).build();
<add> Message<byte[]> connectAckMessage = MessageBuilder.createMessage(EMPTY_PAYLOAD, connectAckHeaders.getMessageHeaders());
<ide>
<ide> this.protocolHandler.handleMessageToClient(this.session, connectAckMessage);
<ide>
<ide> public void eventPublication() {
<ide> this.protocolHandler.afterSessionStarted(this.session, this.channel);
<ide>
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECT);
<del> Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<byte[]> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> TextMessage textMessage = new TextMessage(new StompEncoder().encode(message));
<ide> this.protocolHandler.handleMessageFromClient(this.session, textMessage, this.channel);
<ide>
<ide> headers = StompHeaderAccessor.create(StompCommand.CONNECTED);
<del> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> this.protocolHandler.handleMessageToClient(this.session, message);
<ide>
<ide> this.protocolHandler.afterSessionEnded(this.session, CloseStatus.BAD_DATA, this.channel);
<ide> public void publishEvent(ApplicationEvent event) {
<ide> this.protocolHandler.afterSessionStarted(this.session, this.channel);
<ide>
<ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECT);
<del> Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<byte[]> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> TextMessage textMessage = new TextMessage(new StompEncoder().encode(message));
<ide> this.protocolHandler.handleMessageFromClient(this.session, textMessage, this.channel);
<ide>
<ide> public void publishEvent(ApplicationEvent event) {
<ide> reset(this.channel);
<ide>
<ide> headers = StompHeaderAccessor.create(StompCommand.CONNECTED);
<del> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> this.protocolHandler.handleMessageToClient(this.session, message);
<ide>
<ide> assertEquals(1, this.session.getSentMessages().size());
<ide> public void handleMessageToClientUserDestination() {
<ide> headers.setSubscriptionId("sub0");
<ide> headers.setDestination("/queue/foo-user123");
<ide> headers.setHeader(StompHeaderAccessor.ORIGINAL_DESTINATION, "/user/queue/foo");
<del> Message<byte[]> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build();
<add> Message<byte[]> message = MessageBuilder.createMessage(EMPTY_PAYLOAD, headers.getMessageHeaders());
<ide> this.protocolHandler.handleMessageToClient(this.session, message);
<ide>
<ide> assertEquals(1, this.session.getSentMessages().size());
<ide> public void handleMessageFromClient() {
<ide> @Test
<ide> public void handleMessageFromClientInvalidStompCommand() {
<ide>
<del> TextMessage textMessage = new TextMessage("FOO");
<add> TextMessage textMessage = new TextMessage("FOO\n\n\0");
<ide>
<add> this.protocolHandler.afterSessionStarted(this.session, this.channel);
<ide> this.protocolHandler.handleMessageFromClient(this.session, textMessage, this.channel);
<ide>
<ide> verifyZeroInteractions(this.channel);
<ide><path>spring-websocket/src/test/java/org/springframework/web/socket/messaging/StompWebSocketIntegrationTests.java
<ide> public void sendMessageToBrokerAndReceiveReplyViaTopic() throws Exception {
<ide> assertTrue(clientHandler.latch.await(2, TimeUnit.SECONDS));
<ide>
<ide> String payload = clientHandler.actual.get(0).getPayload();
<del> assertTrue("Expected STOMP Command=MESSAGE, got " + payload, payload.startsWith("MESSAGE\n"));
<add> assertTrue("Expected STOMP MESSAGE, got " + payload, payload.startsWith("MESSAGE\n"));
<ide> }
<ide> finally {
<ide> session.close(); | 22 |
Python | Python | add serializer tests for tokenizer | de974f7bef19dbddc046f07bb2a58b8afa3dba09 | <ide><path>spacy/tests/serialize/test_serialize_tokenizer.py
<add># coding: utf-8
<add>from __future__ import unicode_literals
<add>
<add>from ..util import make_tempdir
<add>
<add>import pytest
<add>
<add>
<add>@pytest.mark.parametrize('text', ["I can't do this"])
<add>def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
<add> tokenizer_b = en_tokenizer.to_bytes()
<add> new_tokenizer = en_tokenizer.from_bytes(tokenizer_b)
<add> assert new_tokenizer.to_bytes() == tokenizer_b
<add> doc1 = en_tokenizer(text)
<add> doc2 = new_tokenizer(text)
<add> assert [token.text for token in doc1] == [token.text for token in doc2]
<add>
<add>
<add>def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
<add> tokenizer = en_tokenizer
<add> with make_tempdir() as d:
<add> file_path = d / 'tokenizer'
<add> tokenizer.to_disk(file_path)
<add> tokenizer_d = en_tokenizer.from_disk(file_path)
<add> assert tokenizer.to_bytes() == tokenizer_d.to_bytes() | 1 |
Ruby | Ruby | add tests for empty fetch | 5b5c37eaf7969c15fd59d0e6bbc9972f343ec11f | <ide><path>activesupport/test/cache/behaviors/cache_instrumentation_behavior.rb
<ide> def test_instrumentation_with_fetch_multi_as_super_operation
<ide>
<ide> assert_equal %w[ cache_read_multi.active_support ], events.map(&:name)
<ide> assert_equal :fetch_multi, events[0].payload[:super_operation]
<add> assert_equal ["a", "b"], events[0].payload[:key]
<ide> assert_equal ["b"], events[0].payload[:hits]
<ide> end
<ide>
<add> def test_instrumentation_empty_fetch_multi
<add> events = with_instrumentation "read_multi" do
<add> @cache.fetch_multi() { |key| key * 2 }
<add> end
<add>
<add> assert_equal %w[ cache_read_multi.active_support ], events.map(&:name)
<add> assert_equal :fetch_multi, events[0].payload[:super_operation]
<add> assert_equal [], events[0].payload[:key]
<add> assert_equal [], events[0].payload[:hits]
<add> end
<add>
<ide> def test_read_multi_instrumentation
<ide> @cache.write("b", "bb")
<ide>
<ide> events = with_instrumentation "read_multi" do
<del> @cache.read_multi("a", "b") { |key| key * 2 }
<add> @cache.read_multi("a", "b")
<ide> end
<ide>
<ide> assert_equal %w[ cache_read_multi.active_support ], events.map(&:name)
<add> assert_equal ["a", "b"], events[0].payload[:key]
<ide> assert_equal ["b"], events[0].payload[:hits]
<ide> end
<ide>
<add> def test_empty_read_multi_instrumentation
<add> events = with_instrumentation "read_multi" do
<add> @cache.read_multi()
<add> end
<add>
<add> assert_equal %w[ cache_read_multi.active_support ], events.map(&:name)
<add> assert_equal [], events[0].payload[:key]
<add> assert_equal [], events[0].payload[:hits]
<add> end
<add>
<ide> private
<ide> def with_instrumentation(method)
<ide> event_name = "cache_#{method}.active_support" | 1 |
PHP | PHP | fix bug with file conversion | 1f2124ef57bdd647979ab75a2863ce448118cca3 | <ide><path>src/Illuminate/Http/Request.php
<ide> public function allFiles()
<ide> protected function convertUploadedFiles(array $files)
<ide> {
<ide> return array_map(function ($file) {
<add> if (is_array($file) && empty(array_filter($file))) {
<add> return $file;
<add> }
<add>
<ide> return is_array($file)
<ide> ? $this->convertUploadedFiles($file)
<ide> : UploadedFile::createFromBase($file); | 1 |
Javascript | Javascript | support multi-dot file extension | 22f7d0a4bdf4e33cfb85be1f341b1f8cadf1b668 | <ide><path>lib/internal/modules/cjs/loader.js
<ide> function tryExtensions(p, exts, isMain) {
<ide> return false;
<ide> }
<ide>
<add>// find the longest (possibly multi-dot) extension registered in
<add>// Module._extensions
<add>function findLongestRegisteredExtension(filename) {
<add> const name = path.basename(filename);
<add> let currentExtension;
<add> let index;
<add> let startIndex = 0;
<add> while ((index = name.indexOf('.', startIndex)) !== -1) {
<add> startIndex = index + 1;
<add> if (index === 0) continue; // Skip dotfiles like .gitignore
<add> currentExtension = name.slice(index);
<add> if (Module._extensions[currentExtension]) return currentExtension;
<add> }
<add> return '.js';
<add>}
<add>
<ide> var warned = false;
<ide> Module._findPath = function(request, paths, isMain) {
<ide> if (path.isAbsolute(request)) {
<ide> Module.prototype.load = function(filename) {
<ide> this.filename = filename;
<ide> this.paths = Module._nodeModulePaths(path.dirname(filename));
<ide>
<del> var extension = path.extname(filename) || '.js';
<del> if (!Module._extensions[extension]) extension = '.js';
<add> var extension = findLongestRegisteredExtension(filename);
<ide> Module._extensions[extension](this, filename);
<ide> this.loaded = true;
<ide>
<ide><path>test/known_issues/test-module-deleted-extensions.js
<del>'use strict';
<del>// Refs: https://github.com/nodejs/node/issues/4778
<del>const common = require('../common');
<del>const assert = require('assert');
<del>const fs = require('fs');
<del>const path = require('path');
<del>const tmpdir = require('../common/tmpdir');
<del>const file = path.join(tmpdir.path, 'test-extensions.foo.bar');
<del>
<del>tmpdir.refresh();
<del>fs.writeFileSync(file, '', 'utf8');
<del>require.extensions['.foo.bar'] = (module, path) => {};
<del>delete require.extensions['.foo.bar'];
<del>require.extensions['.bar'] = common.mustCall((module, path) => {
<del> assert.strictEqual(module.id, file);
<del> assert.strictEqual(path, file);
<del>});
<del>require(path.join(tmpdir.path, 'test-extensions'));
<ide><path>test/parallel/test-module-multi-extensions.js
<add>'use strict';
<add>
<add>// Refs: https://github.com/nodejs/node/issues/4778
<add>
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const fs = require('fs');
<add>const path = require('path');
<add>const Module = require('module');
<add>const tmpdir = require('../common/tmpdir');
<add>const file = path.join(tmpdir.path, 'test-extensions.foo.bar');
<add>const dotfile = path.join(tmpdir.path, '.bar');
<add>const dotfileWithExtension = path.join(tmpdir.path, '.foo.bar');
<add>
<add>tmpdir.refresh();
<add>fs.writeFileSync(file, 'console.log(__filename);', 'utf8');
<add>fs.writeFileSync(dotfile, 'console.log(__filename);', 'utf8');
<add>fs.writeFileSync(dotfileWithExtension, 'console.log(__filename);', 'utf8');
<add>
<add>{
<add> require.extensions['.bar'] = common.mustNotCall();
<add> require.extensions['.foo.bar'] = common.mustCall();
<add> const modulePath = path.join(tmpdir.path, 'test-extensions');
<add> require(modulePath);
<add> require(file);
<add> delete require.cache[file];
<add> delete require.extensions['.bar'];
<add> delete require.extensions['.foo.bar'];
<add> Module._pathCache = Object.create(null);
<add>}
<add>
<add>{
<add> require.extensions['.foo.bar'] = common.mustCall();
<add> const modulePath = path.join(tmpdir.path, 'test-extensions');
<add> require(modulePath);
<add> assert.throws(
<add> () => require(`${modulePath}.foo`),
<add> new Error(`Cannot find module '${modulePath}.foo'`)
<add> );
<add> require(`${modulePath}.foo.bar`);
<add> delete require.cache[file];
<add> delete require.extensions['.foo.bar'];
<add> Module._pathCache = Object.create(null);
<add>}
<add>
<add>{
<add> const modulePath = path.join(tmpdir.path, 'test-extensions');
<add> assert.throws(
<add> () => require(modulePath),
<add> new Error(`Cannot find module '${modulePath}'`)
<add> );
<add> delete require.cache[file];
<add> Module._pathCache = Object.create(null);
<add>}
<add>
<add>{
<add> require.extensions['.bar'] = common.mustNotCall();
<add> require.extensions['.foo.bar'] = common.mustCall();
<add> const modulePath = path.join(tmpdir.path, 'test-extensions.foo');
<add> require(modulePath);
<add> delete require.cache[file];
<add> delete require.extensions['.bar'];
<add> delete require.extensions['.foo.bar'];
<add> Module._pathCache = Object.create(null);
<add>}
<add>
<add>{
<add> require.extensions['.foo.bar'] = common.mustNotCall();
<add> const modulePath = path.join(tmpdir.path, 'test-extensions.foo');
<add> assert.throws(
<add> () => require(modulePath),
<add> new Error(`Cannot find module '${modulePath}'`)
<add> );
<add> delete require.extensions['.foo.bar'];
<add> Module._pathCache = Object.create(null);
<add>}
<add>
<add>{
<add> require.extensions['.bar'] = common.mustNotCall();
<add> require(dotfile);
<add> delete require.cache[dotfile];
<add> delete require.extensions['.bar'];
<add> Module._pathCache = Object.create(null);
<add>}
<add>
<add>{
<add> require.extensions['.bar'] = common.mustCall();
<add> require.extensions['.foo.bar'] = common.mustNotCall();
<add> require(dotfileWithExtension);
<add> delete require.cache[dotfileWithExtension];
<add> delete require.extensions['.bar'];
<add> delete require.extensions['.foo.bar'];
<add> Module._pathCache = Object.create(null);
<add>} | 3 |
Text | Text | fix a couple of misspelled words. | 6a43b4d61ebf5194aeef822a06ae33082be11000 | <ide><path>guide/english/ruby/common-array-methods/index.md
<ide> array[5]
<ide> ```
<ide>
<ide> #### .pop
<del>The .pop method will permantently remove the last element of an array and return this element:
<add>The .pop method will permanently remove the last element of an array and return this element:
<ide>
<ide> ``` ruby
<ide> array.pop
<ide> => [0, 1, 2, 3]
<ide> ```
<ide>
<ide> #### .shift
<del>The .shift method will permantently remove the first element of an array and return this element:
<add>The .shift method will permanently remove the first element of an array and return this element:
<ide>
<ide> ``` ruby
<ide> array.shift | 1 |
Python | Python | add autobackbone + resnetbackbone | 6b217c52e626729bd5de7142358dbaf67402bb40 | <ide><path>src/transformers/__init__.py
<ide> "MODEL_WITH_LM_HEAD_MAPPING",
<ide> "MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING",
<ide> "AutoModel",
<add> "AutoBackbone",
<ide> "AutoModelForAudioClassification",
<ide> "AutoModelForAudioFrameClassification",
<ide> "AutoModelForAudioXVector",
<ide> "ResNetForImageClassification",
<ide> "ResNetModel",
<ide> "ResNetPreTrainedModel",
<add> "ResNetBackbone",
<ide> ]
<ide> )
<ide> _import_structure["models.retribert"].extend(
<ide> MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING,
<ide> MODEL_MAPPING,
<ide> MODEL_WITH_LM_HEAD_MAPPING,
<add> AutoBackbone,
<ide> AutoModel,
<ide> AutoModelForAudioClassification,
<ide> AutoModelForAudioFrameClassification,
<ide> )
<ide> from .models.resnet import (
<ide> RESNET_PRETRAINED_MODEL_ARCHIVE_LIST,
<add> ResNetBackbone,
<ide> ResNetForImageClassification,
<ide> ResNetModel,
<ide> ResNetPreTrainedModel,
<ide><path>src/transformers/modeling_outputs.py
<ide> class XVectorOutput(ModelOutput):
<ide> embeddings: torch.FloatTensor = None
<ide> hidden_states: Optional[Tuple[torch.FloatTensor]] = None
<ide> attentions: Optional[Tuple[torch.FloatTensor]] = None
<add>
<add>
<add>@dataclass
<add>class BackboneOutput(ModelOutput):
<add> """
<add> Base class for outputs of backbones.
<add>
<add> Args:
<add> feature_maps (`tuple(torch.FloatTensor)` of shape `(batch_size, num_channels, height, width)`):
<add> Feature maps of the stages.
<add> """
<add>
<add> feature_maps: Tuple[torch.FloatTensor] = None
<ide><path>src/transformers/models/auto/__init__.py
<ide> "MODEL_WITH_LM_HEAD_MAPPING",
<ide> "MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING",
<ide> "AutoModel",
<add> "AutoBackbone",
<ide> "AutoModelForAudioClassification",
<ide> "AutoModelForAudioFrameClassification",
<ide> "AutoModelForAudioXVector",
<ide> MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING,
<ide> MODEL_MAPPING,
<ide> MODEL_WITH_LM_HEAD_MAPPING,
<add> AutoBackbone,
<ide> AutoModel,
<ide> AutoModelForAudioClassification,
<ide> AutoModelForAudioFrameClassification,
<ide><path>src/transformers/models/auto/modeling_auto.py
<ide> ]
<ide> )
<ide>
<add>MODEL_FOR_BACKBONE_MAPPING_NAMES = OrderedDict(
<add> [
<add> # Backbone mapping
<add> ("resnet", "ResNetBackbone"),
<add> ]
<add>)
<add>
<ide> MODEL_MAPPING = _LazyAutoMapping(CONFIG_MAPPING_NAMES, MODEL_MAPPING_NAMES)
<ide> MODEL_FOR_PRETRAINING_MAPPING = _LazyAutoMapping(CONFIG_MAPPING_NAMES, MODEL_FOR_PRETRAINING_MAPPING_NAMES)
<ide> MODEL_WITH_LM_HEAD_MAPPING = _LazyAutoMapping(CONFIG_MAPPING_NAMES, MODEL_WITH_LM_HEAD_MAPPING_NAMES)
<ide> )
<ide> MODEL_FOR_AUDIO_XVECTOR_MAPPING = _LazyAutoMapping(CONFIG_MAPPING_NAMES, MODEL_FOR_AUDIO_XVECTOR_MAPPING_NAMES)
<ide>
<add>MODEL_FOR_BACKBONE_MAPPING = _LazyAutoMapping(CONFIG_MAPPING_NAMES, MODEL_FOR_BACKBONE_MAPPING_NAMES)
<add>
<ide>
<ide> class AutoModel(_BaseAutoModelClass):
<ide> _model_mapping = MODEL_MAPPING
<ide> class AutoModelForAudioXVector(_BaseAutoModelClass):
<ide> _model_mapping = MODEL_FOR_AUDIO_XVECTOR_MAPPING
<ide>
<ide>
<add>class AutoBackbone(_BaseAutoModelClass):
<add> _model_mapping = MODEL_FOR_BACKBONE_MAPPING
<add>
<add>
<ide> AutoModelForAudioXVector = auto_class_update(AutoModelForAudioXVector, head_doc="audio retrieval via x-vector")
<ide>
<ide>
<ide><path>src/transformers/models/resnet/__init__.py
<ide> "ResNetForImageClassification",
<ide> "ResNetModel",
<ide> "ResNetPreTrainedModel",
<add> "ResNetBackbone",
<ide> ]
<ide>
<ide> try:
<ide> else:
<ide> from .modeling_resnet import (
<ide> RESNET_PRETRAINED_MODEL_ARCHIVE_LIST,
<add> ResNetBackbone,
<ide> ResNetForImageClassification,
<ide> ResNetModel,
<ide> ResNetPreTrainedModel,
<ide><path>src/transformers/models/resnet/configuration_resnet.py
<ide> class ResNetConfig(PretrainedConfig):
<ide> are supported.
<ide> downsample_in_first_stage (`bool`, *optional*, defaults to `False`):
<ide> If `True`, the first stage will downsample the inputs using a `stride` of 2.
<add> out_features (`List[str]`, *optional*):
<add> If used as backbone, list of features to output. Can be any of `"stem"`, `"stage1"`, `"stage2"`,
<add> `"stage3"`, `"stage4"`.
<ide>
<ide> Example:
<ide> ```python
<ide> def __init__(
<ide> layer_type="bottleneck",
<ide> hidden_act="relu",
<ide> downsample_in_first_stage=False,
<add> out_features=None,
<ide> **kwargs
<ide> ):
<ide> super().__init__(**kwargs)
<ide> def __init__(
<ide> self.layer_type = layer_type
<ide> self.hidden_act = hidden_act
<ide> self.downsample_in_first_stage = downsample_in_first_stage
<add> self.stage_names = ["stem"] + [f"stage{idx}" for idx in range(1, len(depths) + 1)]
<add> if out_features is not None:
<add> if not isinstance(out_features, list):
<add> raise ValueError("out_features should be a list")
<add> for feature in out_features:
<add> if feature not in self.stage_names:
<add> raise ValueError(
<add> f"Feature {feature} is not a valid feature name. Valid names are {self.stage_names}"
<add> )
<add> self.out_features = out_features
<ide>
<ide>
<ide> class ResNetOnnxConfig(OnnxConfig):
<ide><path>src/transformers/models/resnet/modeling_resnet.py
<ide>
<ide> from ...activations import ACT2FN
<ide> from ...modeling_outputs import (
<add> BackboneOutput,
<ide> BaseModelOutputWithNoAttention,
<ide> BaseModelOutputWithPoolingAndNoAttention,
<ide> ImageClassifierOutputWithNoAttention,
<ide> )
<ide> from ...modeling_utils import PreTrainedModel
<del>from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
<add>from ...utils import (
<add> add_code_sample_docstrings,
<add> add_start_docstrings,
<add> add_start_docstrings_to_model_forward,
<add> logging,
<add> replace_return_docstrings,
<add>)
<ide> from .configuration_resnet import ResNetConfig
<ide>
<ide>
<ide> def forward(
<ide> return (loss,) + output if loss is not None else output
<ide>
<ide> return ImageClassifierOutputWithNoAttention(loss=loss, logits=logits, hidden_states=outputs.hidden_states)
<add>
<add>
<add>@add_start_docstrings(
<add> """
<add> ResNet backbone, to be used with frameworks like DETR and MaskFormer.
<add> """,
<add> RESNET_START_DOCSTRING,
<add>)
<add>class ResNetBackbone(ResNetPreTrainedModel):
<add> def __init__(self, config):
<add> super().__init__(config)
<add>
<add> self.stage_names = config.stage_names
<add> self.resnet = ResNetModel(config)
<add>
<add> self.out_features = config.out_features
<add>
<add> self.out_feature_channels = {
<add> "stem": config.embedding_size,
<add> "stage1": config.hidden_sizes[0],
<add> "stage2": config.hidden_sizes[1],
<add> "stage3": config.hidden_sizes[2],
<add> "stage4": config.hidden_sizes[3],
<add> }
<add>
<add> # initialize weights and apply final processing
<add> self.post_init()
<add>
<add> @property
<add> def channels(self):
<add> return [self.out_feature_channels[name] for name in self.out_features]
<add>
<add> @add_start_docstrings_to_model_forward(RESNET_INPUTS_DOCSTRING)
<add> @replace_return_docstrings(output_type=BackboneOutput, config_class=_CONFIG_FOR_DOC)
<add> def forward(self, pixel_values: Optional[torch.FloatTensor] = None) -> BackboneOutput:
<add> """
<add> Returns:
<add>
<add> Examples:
<add>
<add> ```python
<add> >>> from transformers import AutoImageProcessor, AutoBackbone
<add> >>> import torch
<add> >>> from PIL import Image
<add> >>> import requests
<add>
<add> >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
<add> >>> image = Image.open(requests.get(url, stream=True).raw)
<add>
<add> >>> processor = AutoImageProcessor.from_pretrained("microsoft/resnet-50")
<add> >>> model = AutoBackbone.from_pretrained("microsoft/resnet-50")
<add>
<add> >>> inputs = processor(image, return_tensors="pt")
<add>
<add> >>> outputs = model(**inputs)
<add> ```"""
<add> outputs = self.resnet(pixel_values, output_hidden_states=True, return_dict=True)
<add>
<add> hidden_states = outputs.hidden_states
<add>
<add> feature_maps = ()
<add> for idx, stage in enumerate(self.stage_names):
<add> if stage in self.out_features:
<add> feature_maps += (hidden_states[idx],)
<add>
<add> return BackboneOutput(feature_maps=feature_maps)
<ide><path>src/transformers/utils/dummy_pt_objects.py
<ide> def load_tf_weights_in_albert(*args, **kwargs):
<ide> MODEL_WITH_LM_HEAD_MAPPING = None
<ide>
<ide>
<add>class AutoBackbone(metaclass=DummyObject):
<add> _backends = ["torch"]
<add>
<add> def __init__(self, *args, **kwargs):
<add> requires_backends(self, ["torch"])
<add>
<add>
<ide> class AutoModel(metaclass=DummyObject):
<ide> _backends = ["torch"]
<ide>
<ide> def load_tf_weights_in_rembert(*args, **kwargs):
<ide> RESNET_PRETRAINED_MODEL_ARCHIVE_LIST = None
<ide>
<ide>
<add>class ResNetBackbone(metaclass=DummyObject):
<add> _backends = ["torch"]
<add>
<add> def __init__(self, *args, **kwargs):
<add> requires_backends(self, ["torch"])
<add>
<add>
<ide> class ResNetForImageClassification(metaclass=DummyObject):
<ide> _backends = ["torch"]
<ide>
<ide><path>tests/models/resnet/test_modeling_resnet.py
<ide> import torch
<ide> from torch import nn
<ide>
<del> from transformers import ResNetForImageClassification, ResNetModel
<add> from transformers import ResNetBackbone, ResNetForImageClassification, ResNetModel
<ide> from transformers.models.resnet.modeling_resnet import RESNET_PRETRAINED_MODEL_ARCHIVE_LIST
<ide>
<ide>
<ide> def __init__(
<ide> hidden_act="relu",
<ide> num_labels=3,
<ide> scope=None,
<add> out_features=["stage1", "stage2", "stage3", "stage4"],
<ide> ):
<ide> self.parent = parent
<ide> self.batch_size = batch_size
<ide> def __init__(
<ide> self.num_labels = num_labels
<ide> self.scope = scope
<ide> self.num_stages = len(hidden_sizes)
<add> self.out_features = out_features
<ide>
<ide> def prepare_config_and_inputs(self):
<ide> pixel_values = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size])
<ide> def get_config(self):
<ide> depths=self.depths,
<ide> hidden_act=self.hidden_act,
<ide> num_labels=self.num_labels,
<add> out_features=self.out_features,
<ide> )
<ide>
<ide> def create_and_check_model(self, config, pixel_values, labels):
<ide> def create_and_check_for_image_classification(self, config, pixel_values, labels
<ide> result = model(pixel_values, labels=labels)
<ide> self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
<ide>
<add> def create_and_check_backbone(self, config, pixel_values, labels):
<add> model = ResNetBackbone(config=config)
<add> model.to(torch_device)
<add> model.eval()
<add> result = model(pixel_values)
<add>
<add> # verify hidden states
<add> self.parent.assertEqual(len(result.feature_maps), len(config.out_features))
<add> self.parent.assertListEqual(list(result.feature_maps[0].shape), [3, 10, 8, 8])
<add>
<add> # verify channels
<add> self.parent.assertListEqual(model.channels, config.hidden_sizes)
<add>
<ide> def prepare_config_and_inputs_for_common(self):
<ide> config_and_inputs = self.prepare_config_and_inputs()
<ide> config, pixel_values, labels = config_and_inputs
<ide> def test_model(self):
<ide> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<ide> self.model_tester.create_and_check_model(*config_and_inputs)
<ide>
<add> def test_backbone(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_backbone(*config_and_inputs)
<add>
<ide> def test_initialization(self):
<ide> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<ide>
<ide><path>utils/check_repo.py
<ide> # Being in this list is an exception and should **not** be the rule.
<ide> IGNORE_NON_TESTED = PRIVATE_MODELS.copy() + [
<ide> # models to ignore for not tested
<add> "ResNetBackbone", # Backbones have their own tests.
<ide> "CLIPSegDecoder", # Building part of bigger (tested) model.
<ide> "TableTransformerEncoder", # Building part of bigger (tested) model.
<ide> "TableTransformerDecoder", # Building part of bigger (tested) model.
<ide> def find_all_documented_objects():
<ide> "PyTorchBenchmarkArguments",
<ide> "TensorFlowBenchmark",
<ide> "TensorFlowBenchmarkArguments",
<add> "ResNetBackbone",
<add> "AutoBackbone",
<ide> ]
<ide>
<ide> | 10 |
Text | Text | fix typos in scene graph article | 393bb5838e519dca89e1f9c9c67163db99a7d826 | <ide><path>threejs/lessons/threejs-scenegraph.md
<del>Title: Three.js Scenegraph
<add>Title: Three.js Scene Graph
<ide> Description: What's a scene graph?
<ide>
<ide> This article is part of a series of articles about three.js. The
<ide> first article is [three.js fundamentals](threejs-fundamentals.html). If
<del>you haven't read yet you might want to consider starting there.
<add>you haven't read that yet you might want to consider starting there.
<ide>
<ide> Three.js's core is arguably its scene graph. A scene graph in a 3D
<ide> engine is a hierarchy of nodes in a graph where each node represents
<ide> in the Earth's "local space" even though relative to the sun you are
<ide> spinning around the earth at around 1000 miles per hour and around
<ide> the sun at around 67,000 miles per hour. Your position in the solar
<ide> system is similar to that of the moon above but you don't have to concern
<del>yourself. You just worry about your position relative to the earth its
<add>yourself. You just worry about your position relative to the earth in its
<ide> "local space".
<ide>
<ide> Let's take it one step at a time. Imagine we want to make
<ide> a diagram of the sun, earth, and moon. We'll start with the sun by
<ide> just making a sphere and putting it at the origin. Note: We're using
<del>sun, earth, moon as a demonstration of how to use a scenegraph. Of course
<add>sun, earth, moon as a demonstration of how to use a scene graph. Of course
<ide> the real sun, earth, and moon use physics but for our purposes we'll
<del>fake it with a scenegraph.
<add>fake it with a scene graph.
<ide>
<ide> ```js
<del>// an array of objects who's rotation to update
<add>// an array of objects whose rotation to update
<ide> const objects = [];
<ide>
<ide> // use just one sphere for everything
<ide> represents light that eminates from a single point.
<ide> ```
<ide>
<ide> To make it easy to see we're going to put the camera directly above the origin
<del>looking down. The easist way to do that us to use the `lookAt` function. The `lookAt`
<del>function will orient the camera from its position to "lookAt the position
<add>looking down. The easist way to do that is to use the `lookAt` function. The `lookAt`
<add>function will orient the camera from its position to "look at" the position
<ide> we pass to `lookAt`. Before we do that though we need to tell the camera
<ide> which way the top of the camera is facing or rather which way is "up" for the
<del>camera. For most situations positive Y being up is good enough but since
<add>camera. For most situations positive Y being up is good enough but since
<ide> we are looking straight down we need to tell the camera that positive Z is up.
<ide>
<ide> ```js
<ide> going around the sun. Let's make the earth a child of the sun
<ide> ```js
<ide> -scene.add(earthMesh);
<ide> +sunMesh.add(earthMesh);
<del>```
<add>```
<ide>
<ide> and...
<ide>
<ide> earthMesh.position.x = 10;
<ide> objects.push(earthMesh);
<ide> ```
<ide>
<del>Here we made a `Object3D`. Like a `Mesh` it is also a node in the scene graph
<add>Here we made an `Object3D`. Like a `Mesh` it is also a node in the scene graph
<ide> but unlike a `Mesh` it has no material or geometry. It just represents a local space.
<ide>
<ide> Our new scene graph looks like this
<ide> objects.push(earthMesh);
<ide> +objects.push(moonMesh);
<ide> ```
<ide>
<del>Again we added another invisible scene graph node, a `Object3D` called `earthOrbit`
<add>Again we added another invisible scene graph node, an `Object3D` called `earthOrbit`
<ide> and added both the `earthMesh` and the `moonMesh` to it. The new scene graph looks like
<ide> this.
<ide>
<ide> all the spheres. Otherwise a sphere might draw over them and cover them up.
<ide>
<ide> {{{example url="../threejs-scenegraph-sun-earth-moon-axes.html" }}}
<ide>
<del>We can see the
<add>We can see the
<ide> <span style="color:red">x (red)</span> and
<ide> <span style="color:blue">z (blue)</span> axes. Since we are looking
<ide> straight down and each of our objects is only rotating around its
<ide> y axis we don't see much of the <span style="color:green">y (green)</span> axes.
<ide> It might be hard to see some of them as there are 2 pairs of overlapping axes. Both the `sunMesh`
<ide> and the `solarSystem` are at the same position. Similarly the `earthMesh` and
<ide> `earthOrbit` are at the same position. Let's add some simple controls to allow us
<del>to turn them on/off for each node.
<add>to turn them on/off for each node.
<ide> While we're at it let's also add another helper called the `GridHelper`. It
<ide> makes a 2D grid on the X,Z plane. By default the grid is 10x10 units.
<ide>
<ide> some function to add the helpers for each node
<ide> +makeAxisGrid(moonMesh, 'moonMesh');
<ide> ```
<ide>
<del>`makeAxisGrid` makes a `AxisGridHelper` which is class we'll create
<add>`makeAxisGrid` makes an `AxisGridHelper` which is a class we'll create
<ide> to make dat.GUI happy. Like it says above dat.GUI
<ide> will automagically make a UI that manipulates the named property
<ide> of some object. It will create a different UI depending on the type
<ide> the visible property of both the `AxesHelper` and `GridHelper` for a node.
<ide> // Turns both axes and grid visible on/off
<ide> // dat.GUI requires a property that returns a bool
<ide> // to decide to make a checkbox so we make a setter
<del>// can getter for `visible` which we can tell dat.GUI
<add>// and getter for `visible` which we can tell dat.GUI
<ide> // to look at.
<ide> class AxisGridHelper {
<ide> constructor(node, units = 10) {
<ide> Another example is a human in a game world.
<ide>
<ide> You can see the scene graph gets pretty complex for a human. In fact
<ide> that scene graph above is simplified. For example you might extend it
<del>to cover the every finger (at least another 28 nodes) and every toe
<del>(yet another 28 nodes) plus ones for the and jaw, the eyes and maybe more.
<add>to cover every finger (at least another 28 nodes) and every toe
<add>(yet another 28 nodes) plus ones for the face and jaw, the eyes and maybe more.
<ide>
<del>Let's make one semi-complex scenegraph. We'll make a tank. The tank will have
<add>Let's make one semi-complex scene graph. We'll make a tank. The tank will have
<ide> 6 wheels and a turret. The tank will follow a path. There will be a sphere that
<ide> moves around and the tank will target the sphere.
<ide>
<ide> Here's the scene graph. The meshes are colored in green, the `Object3D`s in blue,
<del>and the lights in gold, and the cameras in purple. One camera has not been added
<add>the lights in gold, and the cameras in purple. One camera has not been added
<ide> to the scene graph.
<ide>
<ide> <div class="threejs_center"><img src="resources/images/scenegraph-tank.svg" style="width: 800px;"></div>
<ide> targetMaterial.emissive.setHSL(time * 10 % 1, 1, .25);
<ide> targetMaterial.color.setHSL(time * 10 % 1, 1, .25);
<ide> ```
<ide>
<del>For the tank there's an `Object3D` called `tank` which used to move everything
<add>For the tank there's an `Object3D` called `tank` which is used to move everything
<ide> below it around. The code uses a `SplineCurve` which it can ask for positions
<ide> along that curve. 0.0 is the start of the curve. 1.0 is the end of the curve. It
<ide> asks for the current position where it puts the tank. It then asks for a
<ide> a child of `targetBob` and just aimed the camera itself it would be inside the
<ide> target.
<ide>
<ide> ```js
<del>// make the targetCameraPivot look at the at the tank
<add>// make the targetCameraPivot look at the tank
<ide> tank.getWorldPosition(targetPosition);
<ide> targetCameraPivot.lookAt(targetPosition);
<ide> ``` | 1 |
Python | Python | add print statement to avoid code quality problem | efb619235ca0574720f84e90a8b0f1b451da0b02 | <ide><path>tests/test_modeling_tf_transfo_xl.py
<ide> def test_lm_generate_transfo_xl_wt103(self):
<ide> # TODO: add this test when trasnfo-xl-lmhead is implemented
<ide> with self.assertRaises(NotImplementedError):
<ide> model.generate(input_ids, max_length=200, do_sample=False)
<add> print(expected_output_ids)
<ide> # self.assertListEqual(output_ids[0].numpy().tolist(), expected_output_ids) TODO: (PVP) to add when transfo-xl is implemented | 1 |
Text | Text | add comparison operators | e6b4de233a5044558a58f2735aa5cab9311c5e8b | <ide><path>guide/english/python/bool-function/index.md
<ide> It takes one argument, `x`. `x` is converted using the standard <a href='https:/
<ide>
<ide> If `x` is false or omitted, this returns `False`; otherwise it returns `True`.
<ide>
<add>## Comparison Operators
<add>
<add>There are three Boolean Operators they are `and`, `or`, and `not`.
<add>
<add>### and
<add>
<add>| expression | result |
<add>| --- | --- |
<add>| true `and` true | true |
<add>| true `and` false | false |
<add>| false `and` true | false |
<add>| false `and` false | false |
<add>
<add>### or
<add>
<add>| expression | result |
<add>| --- | --- |
<add>| true `or` true | true |
<add>| true `or` false | true |
<add>| false `or` true | true |
<add>| false `or` false | false |
<add>
<add>### not
<add>
<add>| expression | result |
<add>| --- | --- |
<add>| `not` true | false |
<add>| `not` false | true |
<add>
<ide> ## Code Sample
<ide>
<ide> print(bool(4 > 2)) # Returns True as 4 is greater than 2
<ide> If `x` is false or omitted, this returns `False`; otherwise it returns `True`.
<ide>
<ide>  <a href='https://repl.it/CVCS/2' target='_blank' rel='nofollow'>Run Code</a>
<ide>
<del><a href='https://docs.python.org/3/library/functions.html#bool' target='_blank' rel='nofollow'>Official Docs</a>
<ide>\ No newline at end of file
<add><a href='https://docs.python.org/3/library/functions.html#bool' target='_blank' rel='nofollow'>Official Docs</a> | 1 |
Ruby | Ruby | fix audit annotations for casks | 72a79d934e9c52a8468a6eab4708f36228fff2c6 | <ide><path>Library/Homebrew/cask/audit.rb
<ide> def warnings
<ide> @warnings ||= []
<ide> end
<ide>
<del> def add_error(message)
<del> errors << message
<add> def add_error(message, location: nil)
<add> errors << ({ message: message, location: location })
<ide> end
<ide>
<del> def add_warning(message)
<add> def add_warning(message, location: nil)
<ide> if strict?
<del> add_error message
<add> add_error message, location: location
<ide> else
<del> warnings << message
<add> warnings << ({ message: message, location: location })
<ide> end
<ide> end
<ide>
<ide> def summary(include_passed: false, include_warnings: true)
<ide> summary = ["audit for #{cask}: #{result}"]
<ide>
<ide> errors.each do |error|
<del> summary << " #{Formatter.error("-")} #{error}"
<add> summary << " #{Formatter.error("-")} #{error[:message]}"
<ide> end
<ide>
<ide> if include_warnings
<ide> warnings.each do |warning|
<del> summary << " #{Formatter.warning("-")} #{warning}"
<add> summary << " #{Formatter.warning("-")} #{warning[:message]}"
<ide> end
<ide> end
<ide>
<ide><path>Library/Homebrew/cask/cmd/audit.rb
<ide> def run
<ide> display_failures_only: args.display_failures_only?,
<ide> )
<ide>
<del> self.class.print_annotations(results)
<del>
<ide> failed_casks = results.reject { |_, result| result[:errors].empty? }.map(&:first)
<ide> return if failed_casks.empty?
<ide>
<ide> def self.audit_casks(
<ide>
<ide> casks.map do |cask|
<ide> odebug "Auditing Cask #{cask}"
<del> [cask, Auditor.audit(cask, **options)]
<add> [cask.sourcefile_path, Auditor.audit(cask, **options)]
<ide> end.to_h
<ide> end
<del>
<del> def self.print_annotations(results)
<del> return unless ENV["GITHUB_ACTIONS"]
<del>
<del> results.each do |cask, result|
<del> cask_path = cask.sourcefile_path
<del> annotations = (result[:warnings].map { |w| [:warning, w] } + result[:errors].map { |e| [:error, e] })
<del> .map { |type, message| GitHub::Actions::Annotation.new(type, message, file: cask_path) }
<del>
<del> annotations.each do |annotation|
<del> puts annotation if annotation.relevant?
<del> end
<del> end
<del> end
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/dev-cmd/audit.rb
<ide> def audit
<ide> spdx_license_data = SPDX.license_data
<ide> spdx_exception_data = SPDX.exception_data
<ide> new_formula_problem_lines = []
<del> audit_formulae.sort.each do |f|
<add> formula_results = audit_formulae.sort.map do |f|
<ide> only = only_cops ? ["style"] : args.only
<ide> options = {
<ide> new_formula: new_formula,
<ide> def audit
<ide>
<ide> fa = FormulaAuditor.new(f, **options)
<ide> fa.audit
<del> next if fa.problems.empty? && fa.new_formula_problems.empty?
<del>
<del> formula_count += 1
<del> problem_count += fa.problems.size
<del> problem_lines = format_problem_lines(fa.problems)
<del> corrected_problem_count += options.fetch(:style_offenses, []).count(&:corrected?)
<del> new_formula_problem_lines += format_problem_lines(fa.new_formula_problems)
<del> if args.display_filename?
<del> puts problem_lines.map { |s| "#{f.path}: #{s}" }
<del> else
<del> puts "#{f.full_name}:", problem_lines.map { |s| " #{s}" }
<del> end
<del>
<del> next unless ENV["GITHUB_ACTIONS"]
<ide>
<del> (fa.problems + fa.new_formula_problems).each do |message:, location:|
<del> annotation = GitHub::Actions::Annotation.new(
<del> :error, message, file: f.path, line: location&.line, column: location&.column
<del> )
<del> puts annotation if annotation.relevant?
<add> if fa.problems.any? || fa.new_formula_problems.any?
<add> formula_count += 1
<add> problem_count += fa.problems.size
<add> problem_lines = format_problem_lines(fa.problems)
<add> corrected_problem_count += options.fetch(:style_offenses, []).count(&:corrected?)
<add> new_formula_problem_lines += format_problem_lines(fa.new_formula_problems)
<add> if args.display_filename?
<add> puts problem_lines.map { |s| "#{f.path}: #{s}" }
<add> else
<add> puts "#{f.full_name}:", problem_lines.map { |s| " #{s}" }
<add> end
<ide> end
<del> end
<ide>
<del> casks_results = if audit_casks.empty?
<del> []
<add> [f.path, { errors: fa.problems + fa.new_formula_problems, warnings: [] }]
<add> end.to_h
<add>
<add> cask_results = if audit_casks.empty?
<add> {}
<ide> else
<ide> require "cask/cmd/audit"
<ide>
<ide> def audit
<ide> )
<ide> end
<ide>
<del> failed_casks = casks_results.reject { |_, result| result[:errors].empty? }
<add> failed_casks = cask_results.reject { |_, result| result[:errors].empty? }
<ide>
<ide> cask_count = failed_casks.count
<ide>
<ide> cask_problem_count = failed_casks.sum { |_, result| result[:warnings].count + result[:errors].count }
<ide> new_formula_problem_count += new_formula_problem_lines.count
<ide> total_problems_count = problem_count + new_formula_problem_count + cask_problem_count + tap_problem_count
<del> return unless total_problems_count.positive?
<ide>
<del> puts new_formula_problem_lines.map { |s| " #{s}" }
<add> if total_problems_count.positive?
<add> puts new_formula_problem_lines.map { |s| " #{s}" }
<add>
<add> errors_summary = "#{total_problems_count} #{"problem".pluralize(total_problems_count)}"
<ide>
<del> errors_summary = "#{total_problems_count} #{"problem".pluralize(total_problems_count)}"
<add> error_sources = []
<add> error_sources << "#{formula_count} #{"formula".pluralize(formula_count)}" if formula_count.positive?
<add> error_sources << "#{cask_count} #{"cask".pluralize(cask_count)}" if cask_count.positive?
<add> error_sources << "#{tap_count} #{"tap".pluralize(tap_count)}" if tap_count.positive?
<ide>
<del> error_sources = []
<del> error_sources << "#{formula_count} #{"formula".pluralize(formula_count)}" if formula_count.positive?
<del> error_sources << "#{cask_count} #{"cask".pluralize(cask_count)}" if cask_count.positive?
<del> error_sources << "#{tap_count} #{"tap".pluralize(tap_count)}" if tap_count.positive?
<add> errors_summary += " in #{error_sources.to_sentence}" if error_sources.any?
<ide>
<del> errors_summary += " in #{error_sources.to_sentence}" if error_sources.any?
<add> errors_summary += " detected"
<ide>
<del> errors_summary += " detected"
<add> if corrected_problem_count.positive?
<add> errors_summary += ", #{corrected_problem_count} #{"problem".pluralize(corrected_problem_count)} corrected"
<add> end
<add>
<add> ofail errors_summary
<add> end
<ide>
<del> if corrected_problem_count.positive?
<del> errors_summary += ", #{corrected_problem_count} #{"problem".pluralize(corrected_problem_count)} corrected"
<add> return unless ENV["GITHUB_ACTIONS"]
<add>
<add> annotations = formula_results.merge(cask_results).flat_map do |path, result|
<add> (
<add> result[:warnings].map { |w| [:warning, w] } +
<add> result[:errors].map { |e| [:error, e] }
<add> ).map do |type, problem|
<add> GitHub::Actions::Annotation.new(
<add> type,
<add> problem[:message],
<add> file: path,
<add> line: problem[:location]&.line,
<add> column: problem[:location]&.column,
<add> )
<add> end
<ide> end
<ide>
<del> ofail errors_summary
<add> annotations.each do |annotation|
<add> puts annotation if annotation.relevant?
<add> end
<ide> end
<ide>
<ide> def format_problem_lines(problems)
<ide><path>Library/Homebrew/test/cask/audit_spec.rb
<ide> require "cask/audit"
<ide>
<ide> describe Cask::Audit, :cask do
<del> def include_msg?(messages, msg)
<add> def include_msg?(problems, msg)
<ide> if msg.is_a?(Regexp)
<del> Array(messages).any? { |m| m =~ msg }
<add> Array(problems).any? { |problem| problem[:message] =~ msg }
<ide> else
<del> Array(messages).include?(msg)
<add> Array(problems).any? { |problem| problem[:message] == msg }
<ide> end
<ide> end
<ide> | 4 |
PHP | PHP | use realpath when checking modification time | ffcd5d8f6621c77dfb98f390452d5980bf329092 | <ide><path>src/Illuminate/Filesystem/Filesystem.php
<ide> public function size($path)
<ide> */
<ide> public function lastModified($path)
<ide> {
<del> return filemtime($path);
<add> return filemtime(realpath($path));
<ide> }
<ide>
<ide> /** | 1 |
Python | Python | add tf bert files | 11fae9e636d9d0ae45d578e05193c52ff40f6e30 | <ide><path>pytorch_transformers/file_utils.py
<ide> def url_to_filename(url, etag=None):
<ide> Convert `url` into a hashed filename in a repeatable way.
<ide> If `etag` is specified, append its hash to the url's, delimited
<ide> by a period.
<add> If the url ends with .h5 (Keras HDF5 weights) ands '.h5' to the name
<add> so that TF 2.0 can identify it as a HDF5 file
<add> (see https://github.com/tensorflow/tensorflow/blob/00fad90125b18b80fe054de1055770cfb8fe4ba3/tensorflow/python/keras/engine/network.py#L1380)
<ide> """
<ide> url_bytes = url.encode('utf-8')
<ide> url_hash = sha256(url_bytes)
<ide> def url_to_filename(url, etag=None):
<ide> etag_hash = sha256(etag_bytes)
<ide> filename += '.' + etag_hash.hexdigest()
<ide>
<add> if url.endswith('.h5'):
<add> filename += '.h5'
<add>
<ide> return filename
<ide>
<ide>
<ide><path>pytorch_transformers/modeling_tf_bert.py
<add># coding=utf-8
<add># Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
<add># Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add>""" TF 2.0 BERT model. """
<add>
<add>from __future__ import absolute_import, division, print_function, unicode_literals
<add>
<add>import json
<add>import logging
<add>import math
<add>import os
<add>import sys
<add>from io import open
<add>
<add>import numpy as np
<add>import tensorflow as tf
<add>
<add>from .configuration_bert import BertConfig
<add>from .modeling_tf_utils import TFPreTrainedModel
<add>from .file_utils import add_start_docstrings
<add>
<add>logger = logging.getLogger(__name__)
<add>
<add>
<add>TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP = {
<add> 'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-tf_model.h5",
<add> 'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-tf_model.h5",
<add> 'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-tf_model.h5",
<add> 'bert-large-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-tf_model.h5",
<add> 'bert-base-multilingual-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased-tf_model.h5",
<add> 'bert-base-multilingual-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased-tf_model.h5",
<add> 'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese-tf_model.h5",
<add> 'bert-base-german-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-german-cased-tf_model.h5",
<add> 'bert-large-uncased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-tf_model.h5",
<add> 'bert-large-cased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-tf_model.h5",
<add> 'bert-large-uncased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-finetuned-squad-tf_model.h5",
<add> 'bert-large-cased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-finetuned-squad-tf_model.h5",
<add> 'bert-base-cased-finetuned-mrpc': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-finetuned-mrpc-tf_model.h5",
<add>}
<add>
<add>
<add>def load_pt_weights_in_bert(tf_model, config, pytorch_checkpoint_path):
<add> """ Load pytorch checkpoints in a TF 2.0 model and save it using HDF5 format
<add> We use HDF5 to easily do transfer learning
<add> (see https://github.com/tensorflow/tensorflow/blob/ee16fcac960ae660e0e4496658a366e2f745e1f0/tensorflow/python/keras/engine/network.py#L1352-L1357).
<add> """
<add> try:
<add> import re
<add> import torch
<add> import numpy
<add> from tensorflow.python.keras import backend as K
<add> except ImportError:
<add> logger.error("Loading a PyTorch model in TensorFlow, requires PyTorch to be installed. Please see "
<add> "https://pytorch.org/ for installation instructions.")
<add> raise
<add>
<add> pt_path = os.path.abspath(pytorch_checkpoint_path)
<add> logger.info("Loading PyTorch weights from {}".format(pt_path))
<add> # Load pytorch model
<add> state_dict = torch.load(pt_path, map_location='cpu')
<add>
<add> inputs_list = [[7, 6, 0, 0, 1], [1, 2, 3, 0, 0], [0, 0, 0, 4, 5]]
<add> tf_inputs = tf.constant(inputs_list)
<add> tfo = tf_model(tf_inputs, training=False) # build the network
<add>
<add> symbolic_weights = tf_model.trainable_weights + tf_model.non_trainable_weights
<add> weight_value_tuples = []
<add> for symbolic_weight in symbolic_weights:
<add> name = symbolic_weight.name
<add> name = name.replace('cls_mlm', 'cls') # We had to split this layer in two in the TF model to be
<add> name = name.replace('cls_nsp', 'cls') # able to do transfer learning (Keras only allow to remove full layers)
<add> name = name.replace(':0', '')
<add> name = name.replace('layer_', 'layer/')
<add> name = name.split('/')
<add> name = name[1:]
<add>
<add> transpose = bool(name[-1] == 'kernel')
<add> if name[-1] == 'kernel' or name[-1] == 'embeddings':
<add> name[-1] = 'weight'
<add>
<add> name = '.'.join(name)
<add> assert name in state_dict
<add> array = state_dict[name].numpy()
<add>
<add> if transpose:
<add> array = numpy.transpose(array)
<add>
<add> try:
<add> assert list(symbolic_weight.shape) == list(array.shape)
<add> except AssertionError as e:
<add> e.args += (symbolic_weight.shape, array.shape)
<add> raise e
<add>
<add> logger.info("Initialize TF weight {}".format(symbolic_weight.name))
<add>
<add> weight_value_tuples.append((symbolic_weight, array))
<add>
<add> K.batch_set_value(weight_value_tuples)
<add>
<add> tfo = tf_model(tf_inputs, training=False) # Make sure restore ops are run
<add> return tf_model
<add>
<add>
<add>def gelu(x):
<add> """Gaussian Error Linear Unit.
<add> This is a smoother version of the RELU.
<add> Original paper: https://arxiv.org/abs/1606.08415
<add> Args:
<add> x: float Tensor to perform activation.
<add> Returns:
<add> `x` with the GELU activation applied.
<add> """
<add> cdf = 0.5 * (1.0 + tf.tanh(
<add> (np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3)))))
<add> return x * cdf
<add>
<add>
<add>def swish(x):
<add> return x * tf.sigmoid(x)
<add>
<add>
<add>ACT2FN = {"gelu": tf.keras.layers.Activation(gelu),
<add> "relu": tf.keras.activations.relu,
<add> "swish": tf.keras.layers.Activation(swish)}
<add>
<add>
<add>class TFBertEmbeddings(tf.keras.layers.Layer):
<add> """Construct the embeddings from word, position and token_type embeddings.
<add> """
<add> def __init__(self, config, **kwargs):
<add> super(TFBertEmbeddings, self).__init__(**kwargs)
<add> self.word_embeddings = tf.keras.layers.Embedding(config.vocab_size, config.hidden_size, name='word_embeddings')
<add> self.position_embeddings = tf.keras.layers.Embedding(config.max_position_embeddings, config.hidden_size, name='position_embeddings')
<add> self.token_type_embeddings = tf.keras.layers.Embedding(config.type_vocab_size, config.hidden_size, name='token_type_embeddings')
<add>
<add> # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
<add> # any TensorFlow checkpoint file
<add> self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name='LayerNorm')
<add> self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
<add>
<add> def call(self, inputs, training=False):
<add> input_ids, position_ids, token_type_ids = inputs
<add>
<add> seq_length = tf.shape(input_ids)[1]
<add> if position_ids is None:
<add> position_ids = tf.range(seq_length, dtype=tf.int32)[tf.newaxis, :]
<add> if token_type_ids is None:
<add> token_type_ids = tf.fill(tf.shape(input_ids), 0)
<add>
<add> words_embeddings = self.word_embeddings(input_ids)
<add> position_embeddings = self.position_embeddings(position_ids)
<add> token_type_embeddings = self.token_type_embeddings(token_type_ids)
<add>
<add> embeddings = words_embeddings + position_embeddings + token_type_embeddings
<add> embeddings = self.LayerNorm(embeddings)
<add> if training:
<add> embeddings = self.dropout(embeddings)
<add> return embeddings
<add>
<add>
<add>class TFBertSelfAttention(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertSelfAttention, self).__init__(**kwargs)
<add> if config.hidden_size % config.num_attention_heads != 0:
<add> raise ValueError(
<add> "The hidden size (%d) is not a multiple of the number of attention "
<add> "heads (%d)" % (config.hidden_size, config.num_attention_heads))
<add> self.output_attentions = config.output_attentions
<add>
<add> self.num_attention_heads = config.num_attention_heads
<add> assert config.hidden_size % config.num_attention_heads == 0
<add> self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
<add> self.all_head_size = self.num_attention_heads * self.attention_head_size
<add>
<add> self.query = tf.keras.layers.Dense(self.all_head_size, name='query')
<add> self.key = tf.keras.layers.Dense(self.all_head_size, name='key')
<add> self.value = tf.keras.layers.Dense(self.all_head_size, name='value')
<add>
<add> self.dropout = tf.keras.layers.Dropout(config.attention_probs_dropout_prob)
<add>
<add> def transpose_for_scores(self, x, batch_size):
<add> x = tf.reshape(x, (batch_size, -1, self.num_attention_heads, self.attention_head_size))
<add> return tf.transpose(x, perm=[0, 2, 1, 3])
<add>
<add> def call(self, inputs, training=False):
<add> hidden_states, attention_mask, head_mask = inputs
<add>
<add> batch_size = tf.shape(hidden_states)[0]
<add> mixed_query_layer = self.query(hidden_states)
<add> mixed_key_layer = self.key(hidden_states)
<add> mixed_value_layer = self.value(hidden_states)
<add>
<add> query_layer = self.transpose_for_scores(mixed_query_layer, batch_size)
<add> key_layer = self.transpose_for_scores(mixed_key_layer, batch_size)
<add> value_layer = self.transpose_for_scores(mixed_value_layer, batch_size)
<add>
<add> # Take the dot product between "query" and "key" to get the raw attention scores.
<add> attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True) # (batch size, num_heads, seq_len_q, seq_len_k)
<add> dk = tf.cast(tf.shape(key_layer)[-1], tf.float32) # scale attention_scores
<add> attention_scores = attention_scores / tf.math.sqrt(dk)
<add> # Apply the attention mask is (precomputed for all layers in TFBertModel call() function)
<add> attention_scores = attention_scores + attention_mask
<add>
<add> # Normalize the attention scores to probabilities.
<add> attention_probs = tf.nn.softmax(attention_scores, axis=-1)
<add>
<add> if training:
<add> # This is actually dropping out entire tokens to attend to, which might
<add> # seem a bit unusual, but is taken from the original Transformer paper.
<add> attention_probs = self.dropout(attention_probs)
<add>
<add> # Mask heads if we want to
<add> if head_mask is not None:
<add> attention_probs = attention_probs * head_mask
<add>
<add> context_layer = tf.matmul(attention_probs, value_layer)
<add>
<add> context_layer = tf.transpose(context_layer, perm=[0, 2, 1, 3])
<add> context_layer = tf.reshape(context_layer,
<add> (batch_size, -1, self.all_head_size)) # (batch_size, seq_len_q, all_head_size)
<add>
<add> outputs = (context_layer, attention_probs) if self.output_attentions else (context_layer,)
<add> return outputs
<add>
<add>
<add>class TFBertSelfOutput(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertSelfOutput, self).__init__(**kwargs)
<add> self.dense = tf.keras.layers.Dense(config.hidden_size, name='dense')
<add> self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name='LayerNorm')
<add> self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
<add>
<add> def call(self, inputs, training=False):
<add> hidden_states, input_tensor = inputs
<add>
<add> hidden_states = self.dense(hidden_states)
<add> if training:
<add> hidden_states = self.dropout(hidden_states)
<add> hidden_states = self.LayerNorm(hidden_states + input_tensor)
<add> return hidden_states
<add>
<add>
<add>class TFBertAttention(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertAttention, self).__init__(**kwargs)
<add> self.self_attention = TFBertSelfAttention(config, name='self')
<add> self.dense_output = TFBertSelfOutput(config, name='output')
<add>
<add> def prune_heads(self, heads):
<add> raise NotImplementedError
<add>
<add> def call(self, inputs, training=False):
<add> input_tensor, attention_mask, head_mask = inputs
<add>
<add> self_outputs = self.self_attention([input_tensor, attention_mask, head_mask], training=training)
<add> attention_output = self.dense_output([self_outputs[0], input_tensor], training=training)
<add> outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
<add> return outputs
<add>
<add>
<add>class TFBertIntermediate(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertIntermediate, self).__init__(**kwargs)
<add> self.dense = tf.keras.layers.Dense(config.intermediate_size, name='dense')
<add> if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)):
<add> self.intermediate_act_fn = ACT2FN[config.hidden_act]
<add> else:
<add> self.intermediate_act_fn = config.hidden_act
<add>
<add> def call(self, hidden_states):
<add> hidden_states = self.dense(hidden_states)
<add> hidden_states = self.intermediate_act_fn(hidden_states)
<add> return hidden_states
<add>
<add>
<add>class TFBertOutput(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertOutput, self).__init__(**kwargs)
<add> self.dense = tf.keras.layers.Dense(config.hidden_size, name='dense')
<add> self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name='LayerNorm')
<add> self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
<add>
<add> def call(self, inputs, training=False):
<add> hidden_states, input_tensor = inputs
<add>
<add> hidden_states = self.dense(hidden_states)
<add> if training:
<add> hidden_states = self.dropout(hidden_states)
<add> hidden_states = self.LayerNorm(hidden_states + input_tensor)
<add> return hidden_states
<add>
<add>
<add>class TFBertLayer(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertLayer, self).__init__(**kwargs)
<add> self.attention = TFBertAttention(config, name='attention')
<add> self.intermediate = TFBertIntermediate(config, name='intermediate')
<add> self.bert_output = TFBertOutput(config, name='output')
<add>
<add> def call(self, inputs, training=False):
<add> hidden_states, attention_mask, head_mask = inputs
<add>
<add> attention_outputs = self.attention([hidden_states, attention_mask, head_mask], training=training)
<add> attention_output = attention_outputs[0]
<add> intermediate_output = self.intermediate(attention_output)
<add> layer_output = self.bert_output([intermediate_output, attention_output], training=training)
<add> outputs = (layer_output,) + attention_outputs[1:] # add attentions if we output them
<add> return outputs
<add>
<add>
<add>class TFBertEncoder(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertEncoder, self).__init__(**kwargs)
<add> self.output_attentions = config.output_attentions
<add> self.output_hidden_states = config.output_hidden_states
<add> self.layer = [TFBertLayer(config, name='layer_{}'.format(i)) for i in range(config.num_hidden_layers)]
<add>
<add> def call(self, inputs, training=False):
<add> hidden_states, attention_mask, head_mask = inputs
<add>
<add> all_hidden_states = ()
<add> all_attentions = ()
<add> for i, layer_module in enumerate(self.layer):
<add> if self.output_hidden_states:
<add> all_hidden_states = all_hidden_states + (hidden_states,)
<add>
<add> layer_outputs = layer_module([hidden_states, attention_mask, head_mask[i]], training=training)
<add> hidden_states = layer_outputs[0]
<add>
<add> if self.output_attentions:
<add> all_attentions = all_attentions + (layer_outputs[1],)
<add>
<add> # Add last layer
<add> if self.output_hidden_states:
<add> all_hidden_states = all_hidden_states + (hidden_states,)
<add>
<add> outputs = (hidden_states,)
<add> if self.output_hidden_states:
<add> outputs = outputs + (all_hidden_states,)
<add> if self.output_attentions:
<add> outputs = outputs + (all_attentions,)
<add> return outputs # outputs, (hidden states), (attentions)
<add>
<add>
<add>class TFBertPooler(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertPooler, self).__init__(**kwargs)
<add> self.dense = tf.keras.layers.Dense(config.hidden_size, activation='tanh', name='dense')
<add>
<add> def call(self, hidden_states):
<add> # We "pool" the model by simply taking the hidden state corresponding
<add> # to the first token.
<add> first_token_tensor = hidden_states[:, 0]
<add> pooled_output = self.dense(first_token_tensor)
<add> return pooled_output
<add>
<add>
<add>class TFBertPredictionHeadTransform(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertPredictionHeadTransform, self).__init__(**kwargs)
<add> self.dense = tf.keras.layers.Dense(config.hidden_size, name='dense')
<add> if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)):
<add> self.transform_act_fn = ACT2FN[config.hidden_act]
<add> else:
<add> self.transform_act_fn = config.hidden_act
<add> self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name='LayerNorm')
<add>
<add> def call(self, hidden_states):
<add> hidden_states = self.dense(hidden_states)
<add> hidden_states = self.transform_act_fn(hidden_states)
<add> hidden_states = self.LayerNorm(hidden_states)
<add> return hidden_states
<add>
<add>
<add>class TFBertLMPredictionHead(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertLMPredictionHead, self).__init__(**kwargs)
<add> self.vocab_size = config.vocab_size
<add> self.transform = TFBertPredictionHeadTransform(config, name='transform')
<add>
<add> # The output weights are the same as the input embeddings, but there is
<add> # an output-only bias for each token.
<add> self.decoder = tf.keras.layers.Dense(config.vocab_size, use_bias=False, name='decoder')
<add>
<add> def build(self, input_shape):
<add> self.bias = self.add_weight(shape=(self.vocab_size,),
<add> initializer='zeros',
<add> trainable=True,
<add> name='bias')
<add>
<add> def call(self, hidden_states):
<add> hidden_states = self.transform(hidden_states)
<add> hidden_states = self.decoder(hidden_states) + self.bias
<add> return hidden_states
<add>
<add>
<add>class TFBertMLMHead(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertMLMHead, self).__init__(**kwargs)
<add> self.predictions = TFBertLMPredictionHead(config, name='predictions')
<add>
<add> def call(self, sequence_output):
<add> prediction_scores = self.predictions(sequence_output)
<add> return prediction_scores
<add>
<add>
<add>class TFBertNSPHead(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertNSPHead, self).__init__(**kwargs)
<add> self.seq_relationship = tf.keras.layers.Dense(2, name='seq_relationship')
<add>
<add> def call(self, pooled_output):
<add> seq_relationship_score = self.seq_relationship(pooled_output)
<add> return seq_relationship_score
<add>
<add>
<add>class TFBertMainLayer(tf.keras.layers.Layer):
<add> def __init__(self, config, **kwargs):
<add> super(TFBertMainLayer, self).__init__(**kwargs)
<add> self.num_hidden_layers = config.num_hidden_layers
<add>
<add> self.embeddings = TFBertEmbeddings(config, name='embeddings')
<add> self.encoder = TFBertEncoder(config, name='encoder')
<add> self.pooler = TFBertPooler(config, name='pooler')
<add>
<add> # self.apply(self.init_weights) # TODO check weights initialization
<add>
<add> def _resize_token_embeddings(self, new_num_tokens):
<add> raise NotImplementedError
<add>
<add> def _prune_heads(self, heads_to_prune):
<add> """ Prunes heads of the model.
<add> heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
<add> See base class PreTrainedModel
<add> """
<add> raise NotImplementedError
<add>
<add> def call(self, inputs, training=False):
<add> if not isinstance(inputs, (dict, tuple, list)):
<add> input_ids = inputs
<add> attention_mask, head_mask, position_ids, token_type_ids = None, None, None, None
<add> elif isinstance(inputs, (tuple, list)):
<add> input_ids = inputs[0]
<add> attention_mask = inputs[1] if len(inputs) > 1 else None
<add> token_type_ids = inputs[2] if len(inputs) > 2 else None
<add> position_ids = inputs[3] if len(inputs) > 3 else None
<add> head_mask = inputs[4] if len(inputs) > 4 else None
<add> assert len(inputs) <= 5, "Too many inputs."
<add> else:
<add> input_ids = inputs.pop('input_ids')
<add> attention_mask = inputs.pop('attention_mask', None)
<add> token_type_ids = inputs.pop('token_type_ids', None)
<add> position_ids = inputs.pop('position_ids', None)
<add> head_mask = inputs.pop('head_mask', None)
<add> assert len(inputs) == 0, "Unexpected inputs detected: {}. Check inputs dict key names.".format(list(inputs.keys()))
<add>
<add> if attention_mask is None:
<add> attention_mask = tf.fill(tf.shape(input_ids), 1)
<add> if token_type_ids is None:
<add> token_type_ids = tf.fill(tf.shape(input_ids), 0)
<add>
<add> # We create a 3D attention mask from a 2D tensor mask.
<add> # Sizes are [batch_size, 1, 1, to_seq_length]
<add> # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
<add> # this attention mask is more simple than the triangular masking of causal attention
<add> # used in OpenAI GPT, we just need to prepare the broadcast dimension here.
<add> extended_attention_mask = attention_mask[:, tf.newaxis, tf.newaxis, :]
<add>
<add> # Since attention_mask is 1.0 for positions we want to attend and 0.0 for
<add> # masked positions, this operation will create a tensor which is 0.0 for
<add> # positions we want to attend and -10000.0 for masked positions.
<add> # Since we are adding it to the raw scores before the softmax, this is
<add> # effectively the same as removing these entirely.
<add>
<add> extended_attention_mask = tf.cast(extended_attention_mask, tf.float32)
<add> extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
<add>
<add> # Prepare head mask if needed
<add> # 1.0 in head_mask indicate we keep the head
<add> # attention_probs has shape bsz x n_heads x N x N
<add> # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
<add> # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
<add> if not head_mask is None:
<add> raise NotImplementedError
<add> else:
<add> head_mask = [None] * self.num_hidden_layers
<add> # head_mask = tf.constant([0] * self.num_hidden_layers)
<add>
<add> embedding_output = self.embeddings([input_ids, position_ids, token_type_ids], training=training)
<add> encoder_outputs = self.encoder([embedding_output, extended_attention_mask, head_mask], training=training)
<add>
<add> sequence_output = encoder_outputs[0]
<add> pooled_output = self.pooler(sequence_output)
<add>
<add> outputs = (sequence_output, pooled_output,) + encoder_outputs[1:] # add hidden_states and attentions if they are here
<add> return outputs # sequence_output, pooled_output, (hidden_states), (attentions)
<add>
<add>class TFBertPreTrainedModel(TFPreTrainedModel):
<add> """ An abstract class to handle weights initialization and
<add> a simple interface for dowloading and loading pretrained models.
<add> """
<add> config_class = BertConfig
<add> pretrained_model_archive_map = TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP
<add> load_pt_weights = load_pt_weights_in_bert
<add> base_model_prefix = "bert"
<add>
<add> def __init__(self, *inputs, **kwargs):
<add> super(TFBertPreTrainedModel, self).__init__(*inputs, **kwargs)
<add>
<add> def init_weights(self, module):
<add> """ Initialize the weights.
<add> """
<add> raise NotImplementedError
<add>
<add>
<add>BERT_START_DOCSTRING = r""" The BERT model was proposed in
<add> `BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`_
<add> by Jacob Devlin, Ming-Wei Chang, Kenton Lee and Kristina Toutanova. It's a bidirectional transformer
<add> pre-trained using a combination of masked language modeling objective and next sentence prediction
<add> on a large corpus comprising the Toronto Book Corpus and Wikipedia.
<add>
<add> This model is a tf.keras.Model `tf.keras.Model`_ sub-class. Use it as a regular TF 2.0 Keras Model and
<add> refer to the TF 2.0 documentation for all matter related to general usage and behavior.
<add>
<add> .. _`BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`:
<add> https://arxiv.org/abs/1810.04805
<add>
<add> .. _`tf.keras.Model`:
<add> https://www.tensorflow.org/versions/r2.0/api_docs/python/tf/keras/Model
<add>
<add> Important note on the model inputs:
<add> The inputs of the TF 2.0 models are slightly different from the PyTorch ones since
<add> TF 2.0 Keras doesn't accept named arguments with defaults values for input Tensor.
<add> More precisely, input Tensors are gathered in the first arguments of the model call function: `model(inputs)`.
<add> There are three possibilities to gather and feed the inputs to the model:
<add>
<add> - a single Tensor with input_ids only and nothing else: `model(inputs_ids)
<add> - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring:
<add> `model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])`
<add> - a dictionary with one or several input Tensors associaed to the input names given in the docstring:
<add> `model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`
<add>
<add> Parameters:
<add> config (:class:`~pytorch_transformers.BertConfig`): Model configuration class with all the parameters of the model.
<add> Initializing with a config file does not load the weights associated with the model, only the configuration.
<add> Check out the :meth:`~pytorch_transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<add>"""
<add>
<add>BERT_INPUTS_DOCSTRING = r"""
<add> Inputs:
<add> **input_ids**: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<add> Indices of input sequence tokens in the vocabulary.
<add> To match pre-training, BERT input sequence should be formatted with [CLS] and [SEP] tokens as follows:
<add>
<add> (a) For sequence pairs:
<add>
<add> ``tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]``
<add>
<add> ``token_type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1``
<add>
<add> (b) For single sequences:
<add>
<add> ``tokens: [CLS] the dog is hairy . [SEP]``
<add>
<add> ``token_type_ids: 0 0 0 0 0 0 0``
<add>
<add> Bert is a model with absolute position embeddings so it's usually advised to pad the inputs on
<add> the right rather than the left.
<add>
<add> Indices can be obtained using :class:`pytorch_transformers.BertTokenizer`.
<add> See :func:`pytorch_transformers.PreTrainedTokenizer.encode` and
<add> :func:`pytorch_transformers.PreTrainedTokenizer.convert_tokens_to_ids` for details.
<add> **attention_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, sequence_length)``:
<add> Mask to avoid performing attention on padding token indices.
<add> Mask values selected in ``[0, 1]``:
<add> ``1`` for tokens that are NOT MASKED, ``0`` for MASKED tokens.
<add> **token_type_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<add> Segment token indices to indicate first and second portions of the inputs.
<add> Indices are selected in ``[0, 1]``: ``0`` corresponds to a `sentence A` token, ``1``
<add> corresponds to a `sentence B` token
<add> (see `BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`_ for more details).
<add> **position_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<add> Indices of positions of each input sequence tokens in the position embeddings.
<add> Selected in the range ``[0, config.max_position_embeddings - 1]``.
<add> **head_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(num_heads,)`` or ``(num_layers, num_heads)``:
<add> Mask to nullify selected heads of the self-attention modules.
<add> Mask values selected in ``[0, 1]``:
<add> ``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**.
<add>"""
<add>
<add>@add_start_docstrings("The bare Bert Model transformer outputing raw hidden-states without any specific head on top.",
<add> BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING)
<add>class TFBertModel(TFBertPreTrainedModel):
<add> r"""
<add> Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
<add> **last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)``
<add> Sequence of hidden-states at the output of the last layer of the model.
<add> **pooler_output**: ``torch.FloatTensor`` of shape ``(batch_size, hidden_size)``
<add> Last layer hidden-state of the first token of the sequence (classification token)
<add> further processed by a Linear layer and a Tanh activation function. The Linear
<add> layer weights are trained from the next sentence prediction (classification)
<add> objective during Bert pretraining. This output is usually *not* a good summary
<add> of the semantic content of the input, you're often better with averaging or pooling
<add> the sequence of hidden-states for the whole input sequence.
<add> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<add> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<add> of shape ``(batch_size, sequence_length, hidden_size)``:
<add> Hidden-states of the model at the output of each layer plus the initial embedding outputs.
<add> **attentions**: (`optional`, returned when ``config.output_attentions=True``)
<add> list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
<add> Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
<add>
<add> Examples::
<add>
<add> tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
<add> model = TFBertModel.from_pretrained('bert-base-uncased')
<add> input_ids = tf.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
<add> outputs = model(input_ids)
<add> last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple
<add>
<add> """
<add> def __init__(self, config):
<add> super(TFBertModel, self).__init__(config)
<add> self.bert = TFBertMainLayer(config, name='bert')
<add>
<add> def call(self, inputs, training=False):
<add> outputs = self.bert(inputs, training=training)
<add> return outputs
<add>
<add>
<add>@add_start_docstrings("""Bert Model with two heads on top as done during the pre-training:
<add> a `masked language modeling` head and a `next sentence prediction (classification)` head. """,
<add> BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING)
<add>class TFBertForPreTraining(TFBertPreTrainedModel):
<add> r"""
<add> **masked_lm_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<add> Labels for computing the masked language modeling loss.
<add> Indices should be in ``[-1, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring)
<add> Tokens with indices set to ``-1`` are ignored (masked), the loss is only computed for the tokens with labels
<add> in ``[0, ..., config.vocab_size]``
<add> **next_sentence_label**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
<add> Labels for computing the next sequence prediction (classification) loss. Input should be a sequence pair (see ``input_ids`` docstring)
<add> Indices should be in ``[0, 1]``.
<add> ``0`` indicates sequence B is a continuation of sequence A,
<add> ``1`` indicates sequence B is a random sequence.
<add>
<add> Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
<add> **loss**: (`optional`, returned when both ``masked_lm_labels`` and ``next_sentence_label`` are provided) ``torch.FloatTensor`` of shape ``(1,)``:
<add> Total loss as the sum of the masked language modeling loss and the next sequence prediction (classification) loss.
<add> **prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)``
<add> Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
<add> **seq_relationship_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, 2)``
<add> Prediction scores of the next sequence prediction (classification) head (scores of True/False continuation before SoftMax).
<add> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<add> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<add> of shape ``(batch_size, sequence_length, hidden_size)``:
<add> Hidden-states of the model at the output of each layer plus the initial embedding outputs.
<add> **attentions**: (`optional`, returned when ``config.output_attentions=True``)
<add> list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
<add> Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
<add>
<add> Examples::
<add>
<add> tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
<add> model = TFBertForPreTraining.from_pretrained('bert-base-uncased')
<add> input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
<add> outputs = model(input_ids)
<add> prediction_scores, seq_relationship_scores = outputs[:2]
<add>
<add> """
<add> def __init__(self, config):
<add> super(TFBertForPreTraining, self).__init__(config)
<add>
<add> self.bert = TFBertMainLayer(config, name='bert')
<add> self.cls_mlm = TFBertMLMHead(config, name='cls_mlm')
<add> self.cls_nsp = TFBertNSPHead(config, name='cls_nsp')
<add>
<add> # self.apply(self.init_weights) # TODO check added weights initialization
<add> self.tie_weights()
<add>
<add> def tie_weights(self):
<add> """ Make sure we are sharing the input and output embeddings.
<add> """
<add> pass # TODO add weights tying
<add>
<add> def call(self, inputs, training=False):
<add> outputs = self.bert(inputs, training=training)
<add>
<add> sequence_output, pooled_output = outputs[:2]
<add> prediction_scores = self.cls_mlm(sequence_output)
<add> seq_relationship_score = self.cls_nsp(pooled_output)
<add>
<add> outputs = (prediction_scores, seq_relationship_score,) + outputs[2:] # add hidden states and attention if they are here
<add>
<add> # if masked_lm_labels is not None and next_sentence_label is not None:
<add> # loss_fct = CrossEntropyLoss(ignore_index=-1)
<add> # masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1))
<add> # next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1))
<add> # total_loss = masked_lm_loss + next_sentence_loss
<add> # outputs = (total_loss,) + outputs
<add> # TODO add example with losses using model.compile and a dictionary of losses (give names to the output layers)
<add>
<add> return outputs # prediction_scores, seq_relationship_score, (hidden_states), (attentions)
<add>
<add>
<add>@add_start_docstrings("""Bert Model with a `language modeling` head on top. """,
<add> BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING)
<add>class TFBertForMaskedLM(TFBertPreTrainedModel):
<add> r"""
<add> **masked_lm_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<add> Labels for computing the masked language modeling loss.
<add> Indices should be in ``[-1, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring)
<add> Tokens with indices set to ``-1`` are ignored (masked), the loss is only computed for the tokens with labels
<add> in ``[0, ..., config.vocab_size]``
<add>
<add> Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
<add> **loss**: (`optional`, returned when ``masked_lm_labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
<add> Masked language modeling loss.
<add> **prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)``
<add> Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax).
<add> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<add> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<add> of shape ``(batch_size, sequence_length, hidden_size)``:
<add> Hidden-states of the model at the output of each layer plus the initial embedding outputs.
<add> **attentions**: (`optional`, returned when ``config.output_attentions=True``)
<add> list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
<add> Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
<add>
<add> Examples::
<add>
<add> tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
<add> model = TFBertForMaskedLM.from_pretrained('bert-base-uncased')
<add> input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
<add> outputs = model(input_ids, masked_lm_labels=input_ids)
<add> loss, prediction_scores = outputs[:2]
<add>
<add> """
<add> def __init__(self, config):
<add> super(TFBertForMaskedLM, self).__init__(config)
<add>
<add> self.bert = TFBertMainLayer(config, name='bert')
<add> self.cls_mlm = TFBertMLMHead(config, name='cls_mlm')
<add>
<add> # self.apply(self.init_weights)
<add> self.tie_weights()
<add>
<add> def tie_weights(self):
<add> """ Make sure we are sharing the input and output embeddings.
<add> """
<add> pass # TODO add weights tying
<add>
<add> def call(self, inputs, training=False):
<add> outputs = self.bert(inputs, training=training)
<add>
<add> sequence_output = outputs[0]
<add> prediction_scores = self.cls_mlm(sequence_output)
<add>
<add> outputs = (prediction_scores,) + outputs[2:] # Add hidden states and attention if they are here
<add> # if masked_lm_labels is not None:
<add> # loss_fct = CrossEntropyLoss(ignore_index=-1)
<add> # masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1))
<add> # outputs = (masked_lm_loss,) + outputs
<add> # TODO example with losses
<add>
<add> return outputs # prediction_scores, (hidden_states), (attentions)
<add>
<add>
<add>@add_start_docstrings("""Bert Model with a `next sentence prediction (classification)` head on top. """,
<add> BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING)
<add>class TFBertForNextSentencePrediction(TFBertPreTrainedModel):
<add> r"""
<add> **next_sentence_label**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
<add> Labels for computing the next sequence prediction (classification) loss. Input should be a sequence pair (see ``input_ids`` docstring)
<add> Indices should be in ``[0, 1]``.
<add> ``0`` indicates sequence B is a continuation of sequence A,
<add> ``1`` indicates sequence B is a random sequence.
<add>
<add> Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
<add> **loss**: (`optional`, returned when ``next_sentence_label`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
<add> Next sequence prediction (classification) loss.
<add> **seq_relationship_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, 2)``
<add> Prediction scores of the next sequence prediction (classification) head (scores of True/False continuation before SoftMax).
<add> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<add> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<add> of shape ``(batch_size, sequence_length, hidden_size)``:
<add> Hidden-states of the model at the output of each layer plus the initial embedding outputs.
<add> **attentions**: (`optional`, returned when ``config.output_attentions=True``)
<add> list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
<add> Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
<add>
<add> Examples::
<add>
<add> tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
<add> model = TFBertForNextSentencePrediction.from_pretrained('bert-base-uncased')
<add> input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
<add> outputs = model(input_ids)
<add> seq_relationship_scores = outputs[0]
<add>
<add> """
<add> def __init__(self, config):
<add> super(TFBertForNextSentencePrediction, self).__init__(config)
<add>
<add> self.bert = TFBertMainLayer(config, name='bert')
<add> self.cls_nsp = TFBertNSPHead(config, name='cls_nsp')
<add>
<add> # self.apply(self.init_weights)
<add>
<add> def call(self, inputs, training=False):
<add> outputs = self.bert(inputs, training=training)
<add>
<add> pooled_output = outputs[1]
<add> seq_relationship_score = self.cls_nsp(pooled_output)
<add>
<add> outputs = (seq_relationship_score,) + outputs[2:] # add hidden states and attention if they are here
<add> # if next_sentence_label is not None:
<add> # loss_fct = CrossEntropyLoss(ignore_index=-1)
<add> # next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1))
<add> # outputs = (next_sentence_loss,) + outputs
<add>
<add> return outputs # seq_relationship_score, (hidden_states), (attentions)
<ide><path>pytorch_transformers/modeling_tf_utils.py
<add># coding=utf-8
<add># Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
<add># Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add>"""TF general model utils."""
<add>
<add>from __future__ import (absolute_import, division, print_function,
<add> unicode_literals)
<add>
<add>import logging
<add>import os
<add>
<add>import tensorflow as tf
<add>
<add>from .configuration_utils import PretrainedConfig
<add>from .file_utils import cached_path, WEIGHTS_NAME, TF_WEIGHTS_NAME
<add>
<add>logger = logging.getLogger(__name__)
<add>
<add>
<add>class TFPreTrainedModel(tf.keras.Model):
<add> r""" Base class for all TF models.
<add>
<add> :class:`~pytorch_transformers.TFPreTrainedModel` takes care of storing the configuration of the models and handles methods for loading/downloading/saving models
<add> as well as a few methods commons to all models to (i) resize the input embeddings and (ii) prune heads in the self-attention heads.
<add>
<add> Class attributes (overridden by derived classes):
<add> - ``config_class``: a class derived from :class:`~pytorch_transformers.PretrainedConfig` to use as configuration class for this model architecture.
<add> - ``pretrained_model_archive_map``: a python ``dict`` of with `short-cut-names` (string) as keys and `url` (string) of associated pretrained weights as values.
<add> - ``load_tf_weights``: a python ``method`` for loading a TensorFlow checkpoint in a PyTorch model, taking as arguments:
<add>
<add> - ``model``: an instance of the relevant subclass of :class:`~pytorch_transformers.PreTrainedModel`,
<add> - ``config``: an instance of the relevant subclass of :class:`~pytorch_transformers.PretrainedConfig`,
<add> - ``path``: a path (string) to the TensorFlow checkpoint.
<add>
<add> - ``base_model_prefix``: a string indicating the attribute associated to the base model in derived classes of the same architecture adding modules on top of the base model.
<add> """
<add> config_class = None
<add> pretrained_model_archive_map = {}
<add> load_pt_weights = lambda model, config, path: None
<add> base_model_prefix = ""
<add>
<add> def __init__(self, config, *inputs, **kwargs):
<add> super(TFPreTrainedModel, self).__init__()
<add> if not isinstance(config, PretrainedConfig):
<add> raise ValueError(
<add> "Parameter config in `{}(config)` should be an instance of class `PretrainedConfig`. "
<add> "To create a model from a pretrained model use "
<add> "`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`".format(
<add> self.__class__.__name__, self.__class__.__name__
<add> ))
<add> # Save config in model
<add> self.config = config
<add>
<add> def _get_resized_embeddings(self, old_embeddings, new_num_tokens=None):
<add> """ Build a resized Embedding Module from a provided token Embedding Module.
<add> Increasing the size will add newly initialized vectors at the end
<add> Reducing the size will remove vectors from the end
<add>
<add> Args:
<add> new_num_tokens: (`optional`) int
<add> New number of tokens in the embedding matrix.
<add> Increasing the size will add newly initialized vectors at the end
<add> Reducing the size will remove vectors from the end
<add> If not provided or None: return the provided token Embedding Module.
<add> Return: ``torch.nn.Embeddings``
<add> Pointer to the resized Embedding Module or the old Embedding Module if new_num_tokens is None
<add> """
<add> raise NotImplementedError
<add>
<add> def _tie_or_clone_weights(self, first_module, second_module):
<add> """ Tie or clone module weights depending of weither we are using TorchScript or not
<add> """
<add> raise NotImplementedError
<add>
<add> def resize_token_embeddings(self, new_num_tokens=None):
<add> """ Resize input token embeddings matrix of the model if new_num_tokens != config.vocab_size.
<add> Take care of tying weights embeddings afterwards if the model class has a `tie_weights()` method.
<add>
<add> Arguments:
<add>
<add> new_num_tokens: (`optional`) int:
<add> New number of tokens in the embedding matrix. Increasing the size will add newly initialized vectors at the end. Reducing the size will remove vectors from the end.
<add> If not provided or None: does nothing and just returns a pointer to the input tokens ``torch.nn.Embeddings`` Module of the model.
<add>
<add> Return: ``torch.nn.Embeddings``
<add> Pointer to the input tokens Embeddings Module of the model
<add> """
<add> raise NotImplementedError
<add>
<add> def prune_heads(self, heads_to_prune):
<add> """ Prunes heads of the base model.
<add>
<add> Arguments:
<add>
<add> heads_to_prune: dict with keys being selected layer indices (`int`) and associated values being the list of heads to prune in said layer (list of `int`).
<add> """
<add> raise NotImplementedError
<add>
<add> def save_pretrained(self, save_directory):
<add> """ Save a model and its configuration file to a directory, so that it
<add> can be re-loaded using the `:func:`~pytorch_transformers.PreTrainedModel.from_pretrained`` class method.
<add> """
<add> raise NotImplementedError
<add>
<add> @classmethod
<add> def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
<add> r"""Instantiate a pretrained pytorch model from a pre-trained model configuration.
<add>
<add> The model is set in evaluation mode by default using ``model.eval()`` (Dropout modules are deactivated)
<add> To train the model, you should first set it back in training mode with ``model.train()``
<add>
<add> The warning ``Weights from XXX not initialized from pretrained model`` means that the weights of XXX do not come pre-trained with the rest of the model.
<add> It is up to you to train those weights with a downstream fine-tuning task.
<add>
<add> The warning ``Weights from XXX not used in YYY`` means that the layer XXX is not used by YYY, therefore those weights are discarded.
<add>
<add> Parameters:
<add> pretrained_model_name_or_path: either:
<add>
<add> - a string with the `shortcut name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``.
<add> - a path to a `directory` containing model weights saved using :func:`~pytorch_transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/``.
<add> - a path or url to a `PyTorch state_dict save file` (e.g. `./pt_model/pytorch_model.bin`). In this case, ``from_pt`` should be set to True and a configuration object should be provided as ``config`` argument. This loading path is slower than converting the PyTorch checkpoint in a TensorFlow model using the provided conversion scripts and loading the TensorFlow model afterwards.
<add>
<add> model_args: (`optional`) Sequence of positional arguments:
<add> All remaning positional arguments will be passed to the underlying model's ``__init__`` method
<add>
<add> config: (`optional`) instance of a class derived from :class:`~pytorch_transformers.PretrainedConfig`:
<add> Configuration for the model to use instead of an automatically loaded configuation. Configuration can be automatically loaded when:
<add>
<add> - the model is a model provided by the library (loaded with the ``shortcut-name`` string of a pretrained model), or
<add> - the model was saved using :func:`~pytorch_transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory.
<add> - the model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory.
<add>
<add> from_pt: (`optional`) boolean, default False:
<add> Load the model weights from a PyTorch state_dict save file (see docstring of pretrained_model_name_or_path argument).
<add>
<add> cache_dir: (`optional`) string:
<add> Path to a directory in which a downloaded pre-trained model
<add> configuration should be cached if the standard cache should not be used.
<add>
<add> force_download: (`optional`) boolean, default False:
<add> Force to (re-)download the model weights and configuration files and override the cached versions if they exists.
<add>
<add> proxies: (`optional`) dict, default None:
<add> A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.
<add> The proxies are used on each request.
<add>
<add> output_loading_info: (`optional`) boolean:
<add> Set to ``True`` to also return a dictionnary containing missing keys, unexpected keys and error messages.
<add>
<add> kwargs: (`optional`) Remaining dictionary of keyword arguments:
<add> Can be used to update the configuration object (after it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config` is provided or automatically loaded:
<add>
<add> - If a configuration is provided with ``config``, ``**kwargs`` will be directly passed to the underlying model's ``__init__`` method (we assume all relevant updates to the configuration have already been done)
<add> - If a configuration is not provided, ``kwargs`` will be first passed to the configuration class initialization function (:func:`~pytorch_transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration attribute will be used to override said attribute with the supplied ``kwargs`` value. Remaining keys that do not correspond to any configuration attribute will be passed to the underlying model's ``__init__`` function.
<add>
<add> Examples::
<add>
<add> model = BertModel.from_pretrained('bert-base-uncased') # Download model and configuration from S3 and cache.
<add> model = BertModel.from_pretrained('./test/saved_model/') # E.g. model was saved using `save_pretrained('./test/saved_model/')`
<add> model = BertModel.from_pretrained('bert-base-uncased', output_attention=True) # Update configuration during loading
<add> assert model.config.output_attention == True
<add> # Loading from a TF checkpoint file instead of a PyTorch model (slower)
<add> config = BertConfig.from_json_file('./tf_model/my_tf_model_config.json')
<add> model = BertModel.from_pretrained('./tf_model/my_tf_checkpoint.ckpt.index', from_pt=True, config=config)
<add>
<add> """
<add> config = kwargs.pop('config', None)
<add> cache_dir = kwargs.pop('cache_dir', None)
<add> from_pt = kwargs.pop('from_pt', False)
<add> force_download = kwargs.pop('force_download', False)
<add> proxies = kwargs.pop('proxies', None)
<add> output_loading_info = kwargs.pop('output_loading_info', False)
<add>
<add> # Load config
<add> if config is None:
<add> config, model_kwargs = cls.config_class.from_pretrained(
<add> pretrained_model_name_or_path, *model_args,
<add> cache_dir=cache_dir, return_unused_kwargs=True,
<add> force_download=force_download,
<add> **kwargs
<add> )
<add> else:
<add> model_kwargs = kwargs
<add>
<add> # Load model
<add> if pretrained_model_name_or_path in cls.pretrained_model_archive_map:
<add> archive_file = cls.pretrained_model_archive_map[pretrained_model_name_or_path]
<add> elif os.path.isdir(pretrained_model_name_or_path):
<add> if from_pt:
<add> # Load from a PyTorch checkpoint
<add> archive_file = os.path.join(pretrained_model_name_or_path, WEIGHTS_NAME)
<add> else:
<add> archive_file = os.path.join(pretrained_model_name_or_path, TF_WEIGHTS_NAME)
<add> else:
<add> archive_file = pretrained_model_name_or_path
<add> # redirect to the cache, if necessary
<add> try:
<add> resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies)
<add> except EnvironmentError:
<add> if pretrained_model_name_or_path in cls.pretrained_model_archive_map:
<add> logger.error(
<add> "Couldn't reach server at '{}' to download pretrained weights.".format(
<add> archive_file))
<add> else:
<add> logger.error(
<add> "Model name '{}' was not found in model name list ({}). "
<add> "We assumed '{}' was a path or url but couldn't find any file "
<add> "associated to this path or url.".format(
<add> pretrained_model_name_or_path,
<add> ', '.join(cls.pretrained_model_archive_map.keys()),
<add> archive_file))
<add> return None
<add> if resolved_archive_file == archive_file:
<add> logger.info("loading weights file {}".format(archive_file))
<add> else:
<add> logger.info("loading weights file {} from cache at {}".format(
<add> archive_file, resolved_archive_file))
<add>
<add> # Instantiate model.
<add> model = cls(config, *model_args, **model_kwargs)
<add>
<add> if from_pt:
<add> # Load from a PyTorch checkpoint
<add> return cls.load_pt_weights(model, config, resolved_archive_file)
<add>
<add> inputs = tf.constant([[7, 6, 0, 0, 1], [1, 2, 3, 0, 0], [0, 0, 0, 4, 5]])
<add> ret = model(inputs, training=False) # build the network with dummy inputs
<add>
<add> # 'by_name' allow us to do transfer learning by skipping/adding layers
<add> # see https://github.com/tensorflow/tensorflow/blob/00fad90125b18b80fe054de1055770cfb8fe4ba3/tensorflow/python/keras/engine/network.py#L1339-L1357
<add> model.load_weights(resolved_archive_file, by_name=True)
<add>
<add> ret = model(inputs, training=False) # Make sure restore ops are run
<add>
<add> # if hasattr(model, 'tie_weights'):
<add> # model.tie_weights() # TODO make sure word embedding weights are still tied
<add>
<add> if output_loading_info:
<add> loading_info = {"missing_keys": missing_keys, "unexpected_keys": unexpected_keys, "error_msgs": error_msgs}
<add> return model, loading_info
<add>
<add> return model
<ide><path>pytorch_transformers/tests/modeling_tf_common_test.py
<add># coding=utf-8
<add># Copyright 2019 HuggingFace Inc.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add>from __future__ import absolute_import
<add>from __future__ import division
<add>from __future__ import print_function
<add>
<add>import copy
<add>import os
<add>import shutil
<add>import json
<add>import random
<add>import uuid
<add>
<add>import unittest
<add>import logging
<add>
<add>import tensorflow as tf
<add>
<add>from pytorch_transformers import TFPreTrainedModel
<add># from pytorch_transformers.modeling_bert import BertModel, BertConfig, BERT_PRETRAINED_MODEL_ARCHIVE_MAP
<add>
<add>
<add>def _config_zero_init(config):
<add> configs_no_init = copy.deepcopy(config)
<add> for key in configs_no_init.__dict__.keys():
<add> if '_range' in key or '_std' in key:
<add> setattr(configs_no_init, key, 0.0)
<add> return configs_no_init
<add>
<add>class TFCommonTestCases:
<add>
<add> class TFCommonModelTester(unittest.TestCase):
<add>
<add> model_tester = None
<add> all_model_classes = ()
<add> test_torchscript = True
<add> test_pruning = True
<add> test_resize_embeddings = True
<add>
<add> def test_initialization(self):
<add> pass
<add> # config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<add>
<add> # configs_no_init = _config_zero_init(config)
<add> # for model_class in self.all_model_classes:
<add> # model = model_class(config=configs_no_init)
<add> # for name, param in model.named_parameters():
<add> # if param.requires_grad:
<add> # self.assertIn(param.data.mean().item(), [0.0, 1.0],
<add> # msg="Parameter {} of model {} seems not properly initialized".format(name, model_class))
<add>
<add>
<add> def test_attention_outputs(self):
<add> pass
<add> # config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<add>
<add> # for model_class in self.all_model_classes:
<add> # config.output_attentions = True
<add> # config.output_hidden_states = False
<add> # model = model_class(config)
<add> # model.eval()
<add> # outputs = model(**inputs_dict)
<add> # attentions = outputs[-1]
<add> # self.assertEqual(model.config.output_attentions, True)
<add> # self.assertEqual(model.config.output_hidden_states, False)
<add> # self.assertEqual(len(attentions), self.model_tester.num_hidden_layers)
<add> # self.assertListEqual(
<add> # list(attentions[0].shape[-3:]),
<add> # [self.model_tester.num_attention_heads,
<add> # self.model_tester.seq_length,
<add> # self.model_tester.key_len if hasattr(self.model_tester, 'key_len') else self.model_tester.seq_length])
<add> # out_len = len(outputs)
<add>
<add> # # Check attention is always last and order is fine
<add> # config.output_attentions = True
<add> # config.output_hidden_states = True
<add> # model = model_class(config)
<add> # model.eval()
<add> # outputs = model(**inputs_dict)
<add> # self.assertEqual(out_len+1, len(outputs))
<add> # self.assertEqual(model.config.output_attentions, True)
<add> # self.assertEqual(model.config.output_hidden_states, True)
<add>
<add> # attentions = outputs[-1]
<add> # self.assertEqual(len(attentions), self.model_tester.num_hidden_layers)
<add> # self.assertListEqual(
<add> # list(attentions[0].shape[-3:]),
<add> # [self.model_tester.num_attention_heads,
<add> # self.model_tester.seq_length,
<add> # self.model_tester.key_len if hasattr(self.model_tester, 'key_len') else self.model_tester.seq_length])
<add>
<add>
<add> def test_headmasking(self):
<add> pass
<add> # config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<add>
<add> # config.output_attentions = True
<add> # config.output_hidden_states = True
<add> # configs_no_init = _config_zero_init(config) # To be sure we have no Nan
<add> # for model_class in self.all_model_classes:
<add> # model = model_class(config=configs_no_init)
<add> # model.eval()
<add>
<add> # # Prepare head_mask
<add> # # Set require_grad after having prepared the tensor to avoid error (leaf variable has been moved into the graph interior)
<add> # head_mask = torch.ones(self.model_tester.num_hidden_layers, self.model_tester.num_attention_heads)
<add> # head_mask[0, 0] = 0
<add> # head_mask[-1, :-1] = 0
<add> # head_mask.requires_grad_(requires_grad=True)
<add> # inputs = inputs_dict.copy()
<add> # inputs['head_mask'] = head_mask
<add>
<add> # outputs = model(**inputs)
<add>
<add> # # Test that we can get a gradient back for importance score computation
<add> # output = sum(t.sum() for t in outputs[0])
<add> # output = output.sum()
<add> # output.backward()
<add> # multihead_outputs = head_mask.grad
<add>
<add> # attentions = outputs[-1]
<add> # hidden_states = outputs[-2]
<add>
<add> # # Remove Nan
<add>
<add> # self.assertIsNotNone(multihead_outputs)
<add> # self.assertEqual(len(multihead_outputs), self.model_tester.num_hidden_layers)
<add> # self.assertAlmostEqual(
<add> # attentions[0][..., 0, :, :].flatten().sum().item(), 0.0)
<add> # self.assertNotEqual(
<add> # attentions[0][..., -1, :, :].flatten().sum().item(), 0.0)
<add> # self.assertNotEqual(
<add> # attentions[1][..., 0, :, :].flatten().sum().item(), 0.0)
<add> # self.assertAlmostEqual(
<add> # attentions[-1][..., -2, :, :].flatten().sum().item(), 0.0)
<add> # self.assertNotEqual(
<add> # attentions[-1][..., -1, :, :].flatten().sum().item(), 0.0)
<add>
<add>
<add> def test_head_pruning(self):
<add> pass
<add> # if not self.test_pruning:
<add> # return
<add>
<add> # config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<add>
<add> # for model_class in self.all_model_classes:
<add> # config.output_attentions = True
<add> # config.output_hidden_states = False
<add> # model = model_class(config=config)
<add> # model.eval()
<add> # heads_to_prune = {0: list(range(1, self.model_tester.num_attention_heads)),
<add> # -1: [0]}
<add> # model.prune_heads(heads_to_prune)
<add> # outputs = model(**inputs_dict)
<add>
<add> # attentions = outputs[-1]
<add>
<add> # self.assertEqual(
<add> # attentions[0].shape[-3], 1)
<add> # self.assertEqual(
<add> # attentions[1].shape[-3], self.model_tester.num_attention_heads)
<add> # self.assertEqual(
<add> # attentions[-1].shape[-3], self.model_tester.num_attention_heads - 1)
<add>
<add>
<add> def test_hidden_states_output(self):
<add> pass
<add> # config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<add>
<add> # for model_class in self.all_model_classes:
<add> # config.output_hidden_states = True
<add> # config.output_attentions = False
<add> # model = model_class(config)
<add> # model.eval()
<add> # outputs = model(**inputs_dict)
<add> # hidden_states = outputs[-1]
<add> # self.assertEqual(model.config.output_attentions, False)
<add> # self.assertEqual(model.config.output_hidden_states, True)
<add> # self.assertEqual(len(hidden_states), self.model_tester.num_hidden_layers + 1)
<add> # self.assertListEqual(
<add> # list(hidden_states[0].shape[-2:]),
<add> # [self.model_tester.seq_length, self.model_tester.hidden_size])
<add>
<add>
<add> def test_resize_tokens_embeddings(self):
<add> pass
<add> # original_config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<add> # if not self.test_resize_embeddings:
<add> # return
<add>
<add> # for model_class in self.all_model_classes:
<add> # config = copy.deepcopy(original_config)
<add> # model = model_class(config)
<add>
<add> # model_vocab_size = config.vocab_size
<add> # # Retrieve the embeddings and clone theme
<add> # model_embed = model.resize_token_embeddings(model_vocab_size)
<add> # cloned_embeddings = model_embed.weight.clone()
<add>
<add> # # Check that resizing the token embeddings with a larger vocab size increases the model's vocab size
<add> # model_embed = model.resize_token_embeddings(model_vocab_size + 10)
<add> # self.assertEqual(model.config.vocab_size, model_vocab_size + 10)
<add> # # Check that it actually resizes the embeddings matrix
<add> # self.assertEqual(model_embed.weight.shape[0], cloned_embeddings.shape[0] + 10)
<add>
<add> # # Check that resizing the token embeddings with a smaller vocab size decreases the model's vocab size
<add> # model_embed = model.resize_token_embeddings(model_vocab_size - 15)
<add> # self.assertEqual(model.config.vocab_size, model_vocab_size - 15)
<add> # # Check that it actually resizes the embeddings matrix
<add> # self.assertEqual(model_embed.weight.shape[0], cloned_embeddings.shape[0] - 15)
<add>
<add> # # Check that adding and removing tokens has not modified the first part of the embedding matrix.
<add> # models_equal = True
<add> # for p1, p2 in zip(cloned_embeddings, model_embed.weight):
<add> # if p1.data.ne(p2.data).sum() > 0:
<add> # models_equal = False
<add>
<add> # self.assertTrue(models_equal)
<add>
<add>
<add> def test_tie_model_weights(self):
<add> pass
<add> # config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
<add>
<add> # def check_same_values(layer_1, layer_2):
<add> # equal = True
<add> # for p1, p2 in zip(layer_1.weight, layer_2.weight):
<add> # if p1.data.ne(p2.data).sum() > 0:
<add> # equal = False
<add> # return equal
<add>
<add> # for model_class in self.all_model_classes:
<add> # if not hasattr(model_class, 'tie_weights'):
<add> # continue
<add>
<add> # config.torchscript = True
<add> # model_not_tied = model_class(config)
<add> # params_not_tied = list(model_not_tied.parameters())
<add>
<add> # config_tied = copy.deepcopy(config)
<add> # config_tied.torchscript = False
<add> # model_tied = model_class(config_tied)
<add> # params_tied = list(model_tied.parameters())
<add>
<add> # # Check that the embedding layer and decoding layer are the same in size and in value
<add> # self.assertGreater(len(params_not_tied), len(params_tied))
<add>
<add> # # Check that after resize they remain tied.
<add> # model_tied.resize_token_embeddings(config.vocab_size + 10)
<add> # params_tied_2 = list(model_tied.parameters())
<add> # self.assertGreater(len(params_not_tied), len(params_tied))
<add> # self.assertEqual(len(params_tied_2), len(params_tied))
<add>
<add>
<add>def ids_tensor(shape, vocab_size, rng=None, name=None):
<add> """Creates a random int32 tensor of the shape within the vocab size."""
<add> if rng is None:
<add> rng = random.Random()
<add>
<add> total_dims = 1
<add> for dim in shape:
<add> total_dims *= dim
<add>
<add> values = []
<add> for _ in range(total_dims):
<add> values.append(rng.randint(0, vocab_size - 1))
<add>
<add> return tf.constant(values, shape=shape)
<add>
<add>
<add>class TFModelUtilsTest(unittest.TestCase):
<add> def test_model_from_pretrained(self):
<add> pass
<add> # logging.basicConfig(level=logging.INFO)
<add> # for model_name in list(BERT_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<add> # config = BertConfig.from_pretrained(model_name)
<add> # self.assertIsNotNone(config)
<add> # self.assertIsInstance(config, PretrainedConfig)
<add>
<add> # model = BertModel.from_pretrained(model_name)
<add> # model, loading_info = BertModel.from_pretrained(model_name, output_loading_info=True)
<add> # self.assertIsNotNone(model)
<add> # self.assertIsInstance(model, PreTrainedModel)
<add> # for value in loading_info.values():
<add> # self.assertEqual(len(value), 0)
<add>
<add> # config = BertConfig.from_pretrained(model_name, output_attentions=True, output_hidden_states=True)
<add> # model = BertModel.from_pretrained(model_name, output_attentions=True, output_hidden_states=True)
<add> # self.assertEqual(model.config.output_attentions, True)
<add> # self.assertEqual(model.config.output_hidden_states, True)
<add> # self.assertEqual(model.config, config)
<add>
<add>
<add>if __name__ == "__main__":
<add> unittest.main()
<ide><path>pytorch_transformers/tests/modeling_tf_test.py
<add># coding=utf-8
<add># Copyright 2018 The Google AI Language Team Authors.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add>from __future__ import absolute_import
<add>from __future__ import division
<add>from __future__ import print_function
<add>
<add>import unittest
<add>import shutil
<add>import pytest
<add>
<add>import tensorflow as tf
<add>
<add>from pytorch_transformers import (BertConfig)
<add>from pytorch_transformers.modeling_tf_bert import TFBertModel, TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP
<add>
<add>from .modeling_tf_common_test import (TFCommonTestCases, ids_tensor)
<add>from .configuration_common_test import ConfigTester
<add>
<add>
<add>class TFBertModelTest(TFCommonTestCases.TFCommonModelTester):
<add>
<add> all_model_classes = (TFBertModel,)
<add> # BertForMaskedLM, BertForNextSentencePrediction,
<add> # BertForPreTraining, BertForQuestionAnswering, BertForSequenceClassification,
<add> # BertForTokenClassification)
<add>
<add> class TFBertModelTester(object):
<add>
<add> def __init__(self,
<add> parent,
<add> batch_size=13,
<add> seq_length=7,
<add> is_training=True,
<add> use_input_mask=True,
<add> use_token_type_ids=True,
<add> use_labels=True,
<add> vocab_size=99,
<add> hidden_size=32,
<add> num_hidden_layers=5,
<add> num_attention_heads=4,
<add> intermediate_size=37,
<add> hidden_act="gelu",
<add> hidden_dropout_prob=0.1,
<add> attention_probs_dropout_prob=0.1,
<add> max_position_embeddings=512,
<add> type_vocab_size=16,
<add> type_sequence_label_size=2,
<add> initializer_range=0.02,
<add> num_labels=3,
<add> num_choices=4,
<add> scope=None,
<add> ):
<add> self.parent = parent
<add> self.batch_size = batch_size
<add> self.seq_length = seq_length
<add> self.is_training = is_training
<add> self.use_input_mask = use_input_mask
<add> self.use_token_type_ids = use_token_type_ids
<add> self.use_labels = use_labels
<add> self.vocab_size = vocab_size
<add> self.hidden_size = hidden_size
<add> self.num_hidden_layers = num_hidden_layers
<add> self.num_attention_heads = num_attention_heads
<add> self.intermediate_size = intermediate_size
<add> self.hidden_act = hidden_act
<add> self.hidden_dropout_prob = hidden_dropout_prob
<add> self.attention_probs_dropout_prob = attention_probs_dropout_prob
<add> self.max_position_embeddings = max_position_embeddings
<add> self.type_vocab_size = type_vocab_size
<add> self.type_sequence_label_size = type_sequence_label_size
<add> self.initializer_range = initializer_range
<add> self.num_labels = num_labels
<add> self.num_choices = num_choices
<add> self.scope = scope
<add>
<add> def prepare_config_and_inputs(self):
<add> input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
<add>
<add> input_mask = None
<add> if self.use_input_mask:
<add> input_mask = ids_tensor([self.batch_size, self.seq_length], vocab_size=2)
<add>
<add> token_type_ids = None
<add> if self.use_token_type_ids:
<add> token_type_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size)
<add>
<add> sequence_labels = None
<add> token_labels = None
<add> choice_labels = None
<add> if self.use_labels:
<add> sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
<add> token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels)
<add> choice_labels = ids_tensor([self.batch_size], self.num_choices)
<add>
<add> config = BertConfig(
<add> vocab_size_or_config_json_file=self.vocab_size,
<add> hidden_size=self.hidden_size,
<add> num_hidden_layers=self.num_hidden_layers,
<add> num_attention_heads=self.num_attention_heads,
<add> intermediate_size=self.intermediate_size,
<add> hidden_act=self.hidden_act,
<add> hidden_dropout_prob=self.hidden_dropout_prob,
<add> attention_probs_dropout_prob=self.attention_probs_dropout_prob,
<add> max_position_embeddings=self.max_position_embeddings,
<add> type_vocab_size=self.type_vocab_size,
<add> initializer_range=self.initializer_range)
<add>
<add> return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
<add>
<add> def check_loss_output(self, result):
<add> self.parent.assertListEqual(
<add> list(result["loss"].size()),
<add> [])
<add>
<add> def create_and_check_bert_model(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> model = TFBertModel(config=config)
<add> # model.eval()
<add> inputs = {'input_ids': input_ids,
<add> 'attention_mask': input_mask,
<add> 'token_type_ids': token_type_ids}
<add> sequence_output, pooled_output = model(inputs)
<add>
<add> inputs = [input_ids, input_mask]
<add> sequence_output, pooled_output = model(inputs)
<add>
<add> sequence_output, pooled_output = model(input_ids)
<add>
<add> result = {
<add> "sequence_output": sequence_output.numpy(),
<add> "pooled_output": pooled_output.numpy(),
<add> }
<add> self.parent.assertListEqual(
<add> list(result["sequence_output"].shape),
<add> [self.batch_size, self.seq_length, self.hidden_size])
<add> self.parent.assertListEqual(list(result["pooled_output"].shape), [self.batch_size, self.hidden_size])
<add>
<add>
<add> def create_and_check_bert_for_masked_lm(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> pass
<add> # model = BertForMaskedLM(config=config)
<add> # model.eval()
<add> # loss, prediction_scores = model(input_ids, token_type_ids, input_mask, token_labels)
<add> # result = {
<add> # "loss": loss,
<add> # "prediction_scores": prediction_scores,
<add> # }
<add> # self.parent.assertListEqual(
<add> # list(result["prediction_scores"].size()),
<add> # [self.batch_size, self.seq_length, self.vocab_size])
<add> # self.check_loss_output(result)
<add>
<add>
<add> def create_and_check_bert_for_next_sequence_prediction(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> pass
<add> # model = BertForNextSentencePrediction(config=config)
<add> # model.eval()
<add> # loss, seq_relationship_score = model(input_ids, token_type_ids, input_mask, sequence_labels)
<add> # result = {
<add> # "loss": loss,
<add> # "seq_relationship_score": seq_relationship_score,
<add> # }
<add> # self.parent.assertListEqual(
<add> # list(result["seq_relationship_score"].size()),
<add> # [self.batch_size, 2])
<add> # self.check_loss_output(result)
<add>
<add>
<add> def create_and_check_bert_for_pretraining(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> pass
<add> # model = BertForPreTraining(config=config)
<add> # model.eval()
<add> # loss, prediction_scores, seq_relationship_score = model(input_ids, token_type_ids, input_mask, token_labels, sequence_labels)
<add> # result = {
<add> # "loss": loss,
<add> # "prediction_scores": prediction_scores,
<add> # "seq_relationship_score": seq_relationship_score,
<add> # }
<add> # self.parent.assertListEqual(
<add> # list(result["prediction_scores"].size()),
<add> # [self.batch_size, self.seq_length, self.vocab_size])
<add> # self.parent.assertListEqual(
<add> # list(result["seq_relationship_score"].size()),
<add> # [self.batch_size, 2])
<add> # self.check_loss_output(result)
<add>
<add>
<add> def create_and_check_bert_for_question_answering(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> pass
<add> # model = BertForQuestionAnswering(config=config)
<add> # model.eval()
<add> # loss, start_logits, end_logits = model(input_ids, token_type_ids, input_mask, sequence_labels, sequence_labels)
<add> # result = {
<add> # "loss": loss,
<add> # "start_logits": start_logits,
<add> # "end_logits": end_logits,
<add> # }
<add> # self.parent.assertListEqual(
<add> # list(result["start_logits"].size()),
<add> # [self.batch_size, self.seq_length])
<add> # self.parent.assertListEqual(
<add> # list(result["end_logits"].size()),
<add> # [self.batch_size, self.seq_length])
<add> # self.check_loss_output(result)
<add>
<add>
<add> def create_and_check_bert_for_sequence_classification(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> pass
<add> # config.num_labels = self.num_labels
<add> # model = BertForSequenceClassification(config)
<add> # model.eval()
<add> # loss, logits = model(input_ids, token_type_ids, input_mask, sequence_labels)
<add> # result = {
<add> # "loss": loss,
<add> # "logits": logits,
<add> # }
<add> # self.parent.assertListEqual(
<add> # list(result["logits"].size()),
<add> # [self.batch_size, self.num_labels])
<add> # self.check_loss_output(result)
<add>
<add>
<add> def create_and_check_bert_for_token_classification(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> pass
<add> # config.num_labels = self.num_labels
<add> # model = BertForTokenClassification(config=config)
<add> # model.eval()
<add> # loss, logits = model(input_ids, token_type_ids, input_mask, token_labels)
<add> # result = {
<add> # "loss": loss,
<add> # "logits": logits,
<add> # }
<add> # self.parent.assertListEqual(
<add> # list(result["logits"].size()),
<add> # [self.batch_size, self.seq_length, self.num_labels])
<add> # self.check_loss_output(result)
<add>
<add>
<add> def create_and_check_bert_for_multiple_choice(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels):
<add> pass
<add> # config.num_choices = self.num_choices
<add> # model = BertForMultipleChoice(config=config)
<add> # model.eval()
<add> # multiple_choice_inputs_ids = input_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
<add> # multiple_choice_token_type_ids = token_type_ids.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
<add> # multiple_choice_input_mask = input_mask.unsqueeze(1).expand(-1, self.num_choices, -1).contiguous()
<add> # loss, logits = model(multiple_choice_inputs_ids,
<add> # multiple_choice_token_type_ids,
<add> # multiple_choice_input_mask,
<add> # choice_labels)
<add> # result = {
<add> # "loss": loss,
<add> # "logits": logits,
<add> # }
<add> # self.parent.assertListEqual(
<add> # list(result["logits"].size()),
<add> # [self.batch_size, self.num_choices])
<add> # self.check_loss_output(result)
<add>
<add>
<add> def prepare_config_and_inputs_for_common(self):
<add> config_and_inputs = self.prepare_config_and_inputs()
<add> (config, input_ids, token_type_ids, input_mask,
<add> sequence_labels, token_labels, choice_labels) = config_and_inputs
<add> inputs_dict = {'input_ids': input_ids, 'token_type_ids': token_type_ids, 'attention_mask': input_mask}
<add> return config, inputs_dict
<add>
<add> def setUp(self):
<add> self.model_tester = TFBertModelTest.TFBertModelTester(self)
<add> self.config_tester = ConfigTester(self, config_class=BertConfig, hidden_size=37)
<add>
<add> def test_config(self):
<add> self.config_tester.run_common_tests()
<add>
<add> def test_bert_model(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_model(*config_and_inputs)
<add>
<add> def test_for_masked_lm(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_for_masked_lm(*config_and_inputs)
<add>
<add> def test_for_multiple_choice(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_for_multiple_choice(*config_and_inputs)
<add>
<add> def test_for_next_sequence_prediction(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_for_next_sequence_prediction(*config_and_inputs)
<add>
<add> def test_for_pretraining(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_for_pretraining(*config_and_inputs)
<add>
<add> def test_for_question_answering(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_for_question_answering(*config_and_inputs)
<add>
<add> def test_for_sequence_classification(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_for_sequence_classification(*config_and_inputs)
<add>
<add> def test_for_token_classification(self):
<add> config_and_inputs = self.model_tester.prepare_config_and_inputs()
<add> self.model_tester.create_and_check_bert_for_token_classification(*config_and_inputs)
<add>
<add> @pytest.mark.slow
<add> def test_model_from_pretrained(self):
<add> cache_dir = "/tmp/pytorch_transformers_test/"
<add> for model_name in list(TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<add> model = TFBertModel.from_pretrained(model_name, cache_dir=cache_dir)
<add> shutil.rmtree(cache_dir)
<add> self.assertIsNotNone(model)
<add>
<add>if __name__ == "__main__":
<add> unittest.main() | 5 |
PHP | PHP | add tests for single set + accessible properties | bc8f1ced98859f22a957ce85c0f18a94805bd5e7 | <ide><path>Cake/Test/TestCase/ORM/EntityTest.php
<ide> public function testSetWithAccessibleWithArray() {
<ide> $this->assertEquals(5, $entity->get('foo'));
<ide> }
<ide>
<add>/**
<add> * Test that accessible() and single property setting works.
<add> *
<add> * @return
<add> */
<add> public function testSetWithAccessibleSingleProperty() {
<add> $entity = new Entity(['foo' => 1, 'bar' => 2]);
<add> $entity->accessible('title', true);
<add>
<add> $entity->set(['title' => 'test', 'body' => 'Nope']);
<add> $this->assertEquals('test', $entity->title);
<add> $this->assertNull($entity->body);
<add>
<add> $entity->body = 'Yep';
<add> $this->assertEquals('Yep', $entity->body, 'Single set should bypass guards.');
<add>
<add> $entity->set('body', 'Yes');
<add> $this->assertEquals('Yes', $entity->body, 'Single set should bypass guards.');
<add> }
<add>
<ide> } | 1 |
Mixed | Go | add support for syslog over tls | 4b98193beab00bc6cf48762858570a1bd418c9ef | <ide><path>daemon/logger/syslog/syslog.go
<ide> package syslog
<ide>
<ide> import (
<add> "crypto/tls"
<ide> "errors"
<ide> "fmt"
<del> "log/syslog"
<ide> "net"
<ide> "net/url"
<ide> "os"
<ide> "path"
<ide> "strconv"
<ide> "strings"
<ide>
<add> syslog "github.com/RackSec/srslog"
<add>
<ide> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/daemon/logger"
<ide> "github.com/docker/docker/daemon/logger/loggerutils"
<ide> "github.com/docker/docker/pkg/urlutil"
<add> "github.com/docker/go-connections/tlsconfig"
<ide> )
<ide>
<del>const name = "syslog"
<add>const (
<add> name = "syslog"
<add> secureProto = "tcp+tls"
<add>)
<ide>
<ide> var facilities = map[string]syslog.Priority{
<ide> "kern": syslog.LOG_KERN,
<ide> func New(ctx logger.Context) (logger.Logger, error) {
<ide> return nil, err
<ide> }
<ide>
<del> log, err := syslog.Dial(
<del> proto,
<del> address,
<del> facility,
<del> path.Base(os.Args[0])+"/"+tag,
<del> )
<add> logTag := path.Base(os.Args[0]) + "/" + tag
<add>
<add> var log *syslog.Writer
<add> if proto == secureProto {
<add> tlsConfig, tlsErr := parseTLSConfig(ctx.Config)
<add> if tlsErr != nil {
<add> return nil, tlsErr
<add> }
<add> log, err = syslog.DialWithTLSConfig(proto, address, facility, logTag, tlsConfig)
<add> } else {
<add> log, err = syslog.Dial(proto, address, facility, logTag)
<add> }
<add>
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> func ValidateLogOpt(cfg map[string]string) error {
<ide> case "syslog-address":
<ide> case "syslog-facility":
<ide> case "syslog-tag":
<add> case "syslog-tls-ca-cert":
<add> case "syslog-tls-cert":
<add> case "syslog-tls-key":
<add> case "syslog-tls-skip-verify":
<ide> case "tag":
<ide> default:
<ide> return fmt.Errorf("unknown log opt '%s' for syslog log driver", key)
<ide> func parseFacility(facility string) (syslog.Priority, error) {
<ide>
<ide> return syslog.Priority(0), errors.New("invalid syslog facility")
<ide> }
<add>
<add>func parseTLSConfig(cfg map[string]string) (*tls.Config, error) {
<add> _, skipVerify := cfg["syslog-tls-skip-verify"]
<add>
<add> opts := tlsconfig.Options{
<add> CAFile: cfg["syslog-tls-ca-cert"],
<add> CertFile: cfg["syslog-tls-cert"],
<add> KeyFile: cfg["syslog-tls-key"],
<add> InsecureSkipVerify: skipVerify,
<add> }
<add>
<add> return tlsconfig.Client(opts)
<add>}
<ide><path>docs/reference/logging/overview.md
<ide> If `max-size` and `max-file` are set, `docker logs` only returns the log lines f
<ide>
<ide> The following logging options are supported for the `syslog` logging driver:
<ide>
<del> --log-opt syslog-address=[tcp|udp]://host:port
<add> --log-opt syslog-address=[tcp|udp|tcp+tls]://host:port
<ide> --log-opt syslog-address=unix://path
<ide> --log-opt syslog-facility=daemon
<add> --log-opt syslog-tls-ca-cert=/etc/ca-certificates/custom/ca.pem
<add> --log-opt syslog-tls-cert=/etc/ca-certificates/custom/cert.pem
<add> --log-opt syslog-tls-key=/etc/ca-certificates/custom/key.pem
<add> --log-opt syslog-tls-skip-verify=true
<ide> --log-opt tag="mailer"
<ide>
<ide> `syslog-address` specifies the remote syslog server address where the driver connects to.
<ide> the following named facilities:
<ide> * `local6`
<ide> * `local7`
<ide>
<add>`syslog-tls-ca-cert` specifies the absolute path to the trust certificates
<add>signed by the CA. This option is ignored if the address protocol is not `tcp+tls`.
<add>
<add>`syslog-tls-cert` specifies the absolute path to the TLS certificate file.
<add>This option is ignored if the address protocol is not `tcp+tls`.
<add>
<add>`syslog-tls-key` specifies the absolute path to the TLS key file.
<add>This option is ignored if the address protocol is not `tcp+tls`.
<add>
<add>`syslog-tls-skip-verify` configures the TLS verification.
<add>This verification is enabled by default, but it can be overriden by setting
<add>this option to `true`. This option is ignored if the address protocol is not `tcp+tls`.
<add>
<ide> By default, Docker uses the first 12 characters of the container ID to tag log messages.
<ide> Refer to the [log tag option documentation](log_tags.md) for customizing
<ide> the log tag format. | 2 |
Javascript | Javascript | call setstate callbacks enqueued in cwm | ef99e7e096cb661de0e72f93a442fbfeaaa07b1c | <ide><path>src/renderers/dom/stack/client/ReactMount.js
<ide> var ReactMount = {
<ide> nextElement,
<ide> container,
<ide> shouldReuseMarkup,
<del> context
<add> context,
<add> callback
<ide> ) {
<ide> // Various parts of our code (such as ReactCompositeComponent's
<ide> // _renderValidatedComponent) assume that calls to render aren't nested;
<ide> var ReactMount = {
<ide> ReactBrowserEventEmitter.ensureScrollValueMonitoring();
<ide> var componentInstance = instantiateReactComponent(nextElement, false);
<ide>
<add> if (callback) {
<add> componentInstance._pendingCallbacks = [function() {
<add> callback.call(componentInstance._renderedComponent.getPublicInstance());
<add> }];
<add> }
<add>
<ide> // The initial render is synchronous but any updates that happen during
<ide> // rendering, in componentWillMount or componentDidMount, will be batched
<ide> // according to the current batching strategy.
<ide> var ReactMount = {
<ide> nextWrappedElement,
<ide> container,
<ide> shouldReuseMarkup,
<del> nextContext
<add> nextContext,
<add> callback
<ide> )._renderedComponent.getPublicInstance();
<del> if (callback) {
<del> callback.call(component);
<del> }
<ide> return component;
<ide> },
<ide>
<ide><path>src/renderers/native/ReactNativeMount.js
<ide> var ReactNativeMount = {
<ide> var instance = instantiateReactComponent(nextWrappedElement, false);
<ide> ReactNativeMount._instancesByContainerID[containerTag] = instance;
<ide>
<add> if (callback) {
<add> instance._pendingCallbacks = [function() {
<add> callback.call(instance._renderedComponent.getPublicInstance());
<add> }];
<add> }
<add>
<ide> // The initial render is synchronous but any updates that happen during
<ide> // rendering, in componentWillMount or componentDidMount, will be batched
<ide> // according to the current batching strategy.
<ide> var ReactNativeMount = {
<ide> instance,
<ide> containerTag
<ide> );
<del> var component = instance.getPublicInstance();
<del> if (callback) {
<del> callback.call(component);
<del> }
<add> var component = instance._renderedComponent.getPublicInstance();
<ide> return component;
<ide> },
<ide>
<ide><path>src/renderers/native/__tests__/ReactNativeMount-test.js
<ide> describe('ReactNative', () => {
<ide> expect(UIManager.updateView).toBeCalledWith(3, 'View', { foo: 'bar' });
<ide> });
<ide>
<add> it('should be able to create and update a native component', () => {
<add> var View = createReactNativeComponentClass({
<add> validAttributes: { foo: true },
<add> uiViewClassName: 'View',
<add> });
<add>
<add> var a;
<add> var b;
<add> var c = ReactNative.render(
<add> <View foo="foo" ref={(v) => a = v} />,
<add> 11,
<add> function() {
<add> b = this;
<add> }
<add> );
<add>
<add> expect(a).toBeTruthy();
<add> expect(a).toBe(b);
<add> expect(a).toBe(c);
<add> });
<add>
<ide> });
<ide><path>src/renderers/shared/stack/reconciler/ReactCompositeComponent.js
<ide> var ReactCompositeComponent = {
<ide> }
<ide> }
<ide>
<add> // setState callbacks during willMount should end up here
<add> const callbacks = this._pendingCallbacks;
<add> if (callbacks) {
<add> this._pendingCallbacks = null;
<add> for (let i = 0; i < callbacks.length; i++) {
<add> transaction.getReactMountReady().enqueue(
<add> callbacks[i],
<add> inst
<add> );
<add> }
<add> }
<add>
<ide> return markup;
<ide> },
<ide>
<ide><path>src/renderers/shared/stack/reconciler/__tests__/ReactCompositeComponentState-test.js
<ide>
<ide> 'use strict';
<ide>
<del>var ReactDOMFeatureFlags = require('ReactDOMFeatureFlags');
<del>
<ide> var React;
<ide> var ReactDOM;
<ide>
<ide> describe('ReactCompositeComponent-state', () => {
<ide> ['componentDidMount-end', 'orange'],
<ide> ];
<ide>
<del> if (ReactDOMFeatureFlags.useFiber) {
<del> // The setState callbacks in componentWillMount, and the initial callback
<del> // passed to ReactDOM.render, should be flushed right after component
<del> // did mount:
<del> expected.push(
<del> ['setState-sunrise', 'orange'], // 1
<del> ['setState-orange', 'orange'], // 2
<del> ['initial-callback', 'orange'], // 3
<del> ['shouldComponentUpdate-currentState', 'orange'],
<del> ['shouldComponentUpdate-nextState', 'yellow'],
<del> ['componentWillUpdate-currentState', 'orange'],
<del> ['componentWillUpdate-nextState', 'yellow'],
<del> ['render', 'yellow'],
<del> ['componentDidUpdate-currentState', 'yellow'],
<del> ['componentDidUpdate-prevState', 'orange'],
<del> ['setState-yellow', 'yellow'],
<del> );
<del> } else {
<del> // There is a bug in the stack reconciler where those callbacks are
<del> // enqueued, but aren't called until the next flush.
<del> expected.push(
<del> ['shouldComponentUpdate-currentState', 'orange'],
<del> ['shouldComponentUpdate-nextState', 'yellow'],
<del> ['componentWillUpdate-currentState', 'orange'],
<del> ['componentWillUpdate-nextState', 'yellow'],
<del> ['render', 'yellow'],
<del> ['componentDidUpdate-currentState', 'yellow'],
<del> ['componentDidUpdate-prevState', 'orange'],
<del> ['setState-sunrise', 'yellow'], // 1
<del> ['setState-orange', 'yellow'], // 2
<del> ['setState-yellow', 'yellow'],
<del> ['initial-callback', 'yellow'] // 3
<del> );
<del> }
<add> // The setState callbacks in componentWillMount, and the initial callback
<add> // passed to ReactDOM.render, should be flushed right after component
<add> // did mount:
<add> expected.push(
<add> ['setState-sunrise', 'orange'], // 1
<add> ['setState-orange', 'orange'], // 2
<add> ['initial-callback', 'orange'], // 3
<add> ['shouldComponentUpdate-currentState', 'orange'],
<add> ['shouldComponentUpdate-nextState', 'yellow'],
<add> ['componentWillUpdate-currentState', 'orange'],
<add> ['componentWillUpdate-nextState', 'yellow'],
<add> ['render', 'yellow'],
<add> ['componentDidUpdate-currentState', 'yellow'],
<add> ['componentDidUpdate-prevState', 'orange'],
<add> ['setState-yellow', 'yellow'],
<add> );
<ide>
<ide> expected.push(
<ide> ['componentWillReceiveProps-start', 'yellow'], | 5 |
Go | Go | add debug messages while testing devicemapper | 11d695a2973d67906145e7f0972b6e693bdaa3f9 | <ide><path>devmapper/deviceset_devmapper.go
<ide> func (devices *DeviceSetDM) saveMetadata() error {
<ide> }
<ide>
<ide> func (devices *DeviceSetDM) registerDevice(id int, hash string, size uint64) (*DevInfo, error) {
<add> utils.Debugf("registerDevice(%v, %v)", id, hash)
<ide> info := &DevInfo{
<ide> Hash: hash,
<ide> DeviceId: id,
<ide> func (devices *DeviceSetDM) registerDevice(id int, hash string, size uint64) (*D
<ide> }
<ide>
<ide> func (devices *DeviceSetDM) activateDeviceIfNeeded(hash string) error {
<del> utils.Debugf("activateDeviceIfNeeded()")
<add> utils.Debugf("activateDeviceIfNeeded(%v)", hash)
<ide> info := devices.Devices[hash]
<ide> if info == nil {
<ide> return fmt.Errorf("Unknown device %s", hash)
<ide> func (devices *DeviceSetDM) DeactivateDevice(hash string) error {
<ide>
<ide> func (devices *DeviceSetDM) Shutdown() error {
<ide> devices.Lock()
<add> utils.Debugf("[devmapper] Shutting down DeviceSet: %s", devices.root)
<ide> defer devices.Unlock()
<ide>
<ide> if !devices.initialized {
<ide><path>devmapper/devmapper.go
<ide> func resumeDevice(name string) error {
<ide> }
<ide>
<ide> func createDevice(poolName string, deviceId int) error {
<add> utils.Debugf("[devmapper] createDevice(poolName=%v, deviceId=%v)", poolName, deviceId)
<ide> task, err := createTask(DeviceTargetMsg, poolName)
<ide> if task == nil {
<ide> return err
<ide><path>runtime_test.go
<ide> func removeDev(name string) error {
<ide> }
<ide>
<ide> func cleanupDevMapper() error {
<add> utils.Debugf("[devmapper cleanup] starting")
<add> defer utils.Debugf("[devmapper cleanup] done")
<ide> filter := "docker-" + path.Base(unitTestStoreBase)
<ide> utils.Debugf("Filtering out %s\n", filter)
<ide> // Unmount any leftover mounts from previous unit test runs
<ide><path>utils_test.go
<ide> type Fataler interface {
<ide> Fatal(args ...interface{})
<ide> }
<ide>
<del>func newTestRuntime() (*Runtime, error) {
<add>func newTestRuntime() (runtime *Runtime, err error) {
<add> utils.Debugf("newTestRuntime start")
<ide> root, err := ioutil.TempDir("", "docker-test")
<add> defer func() {
<add> utils.Debugf("newTestRuntime: %s", root)
<add> }()
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> if err := os.Remove(root); err != nil {
<ide> return nil, err
<ide> }
<add> utils.Debugf("Copying %s to %s", unitTestStoreBase, root)
<ide> if err := utils.CopyDirectory(unitTestStoreBase, root); err != nil {
<add> utils.Debugf("ERROR: Copying %s to %s returned %s", unitTestStoreBase, root, err)
<ide> return nil, err
<ide> }
<ide>
<del> runtime, err := NewRuntimeFromDirectory(root, false)
<add> runtime, err = NewRuntimeFromDirectory(root, false)
<ide> if err != nil {
<ide> return nil, err
<ide> } | 4 |
Javascript | Javascript | fix run-ios when specifying a scheme as cli arg | c58e19ef3397db89ba0a8705d3fcafa17cf4aad7 | <ide><path>local-cli/runIOS/runIOS.js
<ide> function runIOS(argv, config, args) {
<ide> } else if (args.udid) {
<ide> return runOnDeviceByUdid(args, scheme, xcodeProject, devices);
<ide> } else {
<del> return runOnSimulator(xcodeProject, args, inferredSchemeName, scheme);
<add> return runOnSimulator(xcodeProject, args, scheme);
<ide> }
<ide> }
<ide>
<ide> function runOnDeviceByUdid(args, scheme, xcodeProject, devices) {
<ide> }
<ide> }
<ide>
<del>function runOnSimulator(xcodeProject, args, inferredSchemeName, scheme){
<add>function runOnSimulator(xcodeProject, args, scheme){
<ide> return new Promise((resolve) => {
<ide> try {
<ide> var simulators = JSON.parse(
<ide> function runOnSimulator(xcodeProject, args, inferredSchemeName, scheme){
<ide> .then((udid) => buildProject(xcodeProject, udid, scheme, args.configuration, args.packager))
<ide> .then((appName) => {
<ide> if (!appName) {
<del> appName = inferredSchemeName;
<add> appName = scheme;
<ide> }
<ide> let appPath = getBuildPath(args.configuration, appName);
<ide> console.log(`Installing ${appPath}`); | 1 |
PHP | PHP | make striplinks a bit more thorough | 7a59b6bb6365b856850a11ca232a5623b6a73fc5 | <ide><path>src/Utility/Text.php
<ide> public static function highlight($text, $phrase, array $options = [])
<ide> /**
<ide> * Strips given text of all links (<a href=....).
<ide> *
<add> * *Warning* This method is not an robust solution in preventing XSS
<add> * or malicious HTML.
<add> *
<ide> * @param string $text Text
<ide> * @return string The text without links
<add> * @deprecated 3.2.12 This method will be removed in 4.0.0
<ide> */
<ide> public static function stripLinks($text)
<ide> {
<del> return preg_replace('|<a\s+[^>]+>|im', '', preg_replace('|<\/a>|im', '', $text));
<add> do {
<add> $text = preg_replace('#</?a([/\s][^>]*)?(>|$)#i', '', $text, -1, $count);
<add> } while ($count);
<add> return $text;
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Utility/TextTest.php
<ide> public function testStripLinks()
<ide> $expected = 'This <strong>is</strong> a test and <abbr>some</abbr> other text';
<ide> $result = $this->Text->stripLinks($text);
<ide> $this->assertEquals($expected, $result);
<add>
<add> $text = '<a<a h> href=\'bla\'>test</a</a>>';
<add> $this->assertEquals('test', $this->Text->stripLinks($text));
<add>
<add> $text = '<a/href="#">test</a/>';
<add> $this->assertEquals('test', $this->Text->stripLinks($text));
<add>
<add> $text = '<a href="#"';
<add> $this->assertEquals('', $this->Text->stripLinks($text));
<ide> }
<ide>
<ide> /** | 2 |
Python | Python | replace codecs.open with io.open | 1a93d7f725d7ca2131942f748a9953dbac8ed026 | <ide><path>bin/get_freqs.py
<ide> import os
<ide> import bz2
<ide> import ujson
<del>import codecs
<ide> from preshed.counter import PreshCounter
<ide> from joblib import Parallel, delayed
<ide> import io
<ide> def count_freqs(input_loc, output_loc):
<ide> doc = tokenizer(json_comment['body'])
<ide> doc.count_by(ORTH, counts=counts)
<ide>
<del> with codecs.open(output_loc, 'w', 'utf8') as file_:
<add> with io.open(output_loc, 'w', 'utf8') as file_:
<ide> for orth, freq in counts:
<ide> string = tokenizer.vocab.strings[orth]
<ide> if not string.isspace(): | 1 |
PHP | PHP | unify new lines at the end of the file | 881127ef4d74545ee16802fc06a52e1cd96a9626 | <ide><path>lib/Cake/Cache/Cache.php
<ide> public static function settings($name = 'default') {
<ide> }
<ide>
<ide> }
<del>
<ide><path>lib/Cake/Controller/Component/CookieComponent.php
<ide> protected function _explode($string) {
<ide> return $array;
<ide> }
<ide> }
<del>
<ide><path>lib/Cake/Model/Aco.php
<ide> class Aco extends AclNode {
<ide> * @var array
<ide> */
<ide> public $hasAndBelongsToMany = array('Aro' => array('with' => 'Permission'));
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Model/AcoAction.php
<ide> class AcoAction extends AppModel {
<ide> * @var array
<ide> */
<ide> public $belongsTo = array('Aco');
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Model/ModelBehavior.php
<ide> protected function _addToWhitelist(Model $model, $field) {
<ide> }
<ide>
<ide> }
<del>
<ide><path>lib/Cake/Routing/Filter/AssetDispatcher.php
<ide> protected function _deliverAsset(CakeResponse $response, $assetFile, $ext) {
<ide> }
<ide> }
<ide>
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/AllConsoleTest.php
<ide> public static function suite() {
<ide> $suite->addTestFile($path . 'AllShellsTest.php');
<ide> return $suite;
<ide> }
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/AllControllerTest.php
<ide> public static function suite() {
<ide> $suite->addTestFile(CORE_TEST_CASES . DS . 'Controller' . DS . 'ControllerMergeVarsTest.php');
<ide> return $suite;
<ide> }
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/AllCoreTest.php
<ide> public static function suite() {
<ide> return $suite;
<ide> }
<ide> }
<del>
<ide><path>lib/Cake/Test/Case/AllEventTest.php
<ide> public static function suite() {
<ide> return $suite;
<ide> }
<ide> }
<del>
<ide><path>lib/Cake/Test/Case/AllLogTest.php
<ide> public static function suite() {
<ide> return $suite;
<ide> }
<ide> }
<del>
<ide><path>lib/Cake/Test/Case/AllTestSuiteTest.php
<ide> public static function suite() {
<ide> $suite->addTestDirectory(CORE_TEST_CASES . DS . 'TestSuite');
<ide> return $suite;
<ide> }
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/Console/AllConsoleLibsTest.php
<ide> public static function suite() {
<ide> }
<ide> return $suite;
<ide> }
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/Console/AllShellsTest.php
<ide> public static function suite() {
<ide> $suite->addTestDirectory($path);
<ide> return $suite;
<ide> }
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/Console/AllTasksTest.php
<ide> public static function suite() {
<ide> return $suite;
<ide> }
<ide> }
<del>
<ide><path>lib/Cake/Test/Case/Controller/Component/Acl/IniAclTest.php
<ide> public function testCheckArray() {
<ide> $this->assertTrue($Ini->check($user, 'posts'));
<ide> }
<ide> }
<del>
<ide><path>lib/Cake/Test/Case/Event/CakeEventTest.php
<ide> public function testEventDirectPropertyAccess() {
<ide> $this->assertEquals($this, $event->subject);
<ide> $this->assertEquals('fake.event', $event->name);
<ide> }
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/Model/Behavior/TreeBehaviorAfterTest.php
<ide> public function testAftersaveCallback() {
<ide> $this->assertEquals($expected, $result[7]);
<ide> }
<ide> }
<del>
<del>
<ide><path>lib/Cake/Test/Case/Model/Datasource/Session/CacheSessionTest.php
<ide> public function testDestroy() {
<ide> $this->assertFalse(Cache::read('test_one', 'session_test'), 'Value stuck around.');
<ide> }
<ide>
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/Network/Email/DebugTransportTest.php
<ide> public function testSend() {
<ide> $this->assertEquals($data, $result['message']);
<ide> }
<ide>
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/Test/Case/Network/Email/MailTransportTest.php
<ide> public function testSendData() {
<ide> }
<ide>
<ide> }
<del>
<ide><path>lib/Cake/Test/Case/View/Helper/TextHelperTest.php
<ide> public function testAutoLinkEmailInvalid() {
<ide> }
<ide>
<ide> }
<del>
<ide><path>lib/Cake/TestSuite/Fixture/CakeTestModel.php
<ide> public function save($data = null, $validate = true, $fieldList = array()) {
<ide> return parent::save($data, $validate, $fieldList);
<ide> }
<ide>
<del>}
<ide>\ No newline at end of file
<add>}
<ide><path>lib/Cake/TestSuite/templates/footer.php
<ide> */
<ide> ?> </div>
<ide> </div>
<del> <div id="footer">
<add> <div id="footer">
<ide> <p>
<del> <!--PLEASE USE ONE OF THE POWERED BY CAKEPHP LOGO-->
<del> <a href="http://www.cakephp.org/" target="_blank">
<add> <!--PLEASE USE ONE OF THE POWERED BY CAKEPHP LOGO-->
<add> <a href="http://www.cakephp.org/" target="_blank">
<ide> <img src="<?php echo $baseDir; ?>img/cake.power.gif" alt="CakePHP(tm) :: Rapid Development Framework" /></a>
<ide> </p>
<del> </div>
<add> </div>
<ide> <?php
<ide> App::uses('View', 'View');
<ide> $null = null;
<ide> ?>
<ide> </div>
<ide> </body>
<del></html>
<ide>\ No newline at end of file
<add></html>
<ide><path>lib/Cake/TestSuite/templates/xdebug.php
<ide> </div>
<ide> <?php
<ide> include dirname(__FILE__) . DS . 'footer.php';
<del>
<ide><path>lib/Cake/bootstrap.php
<ide> function mb_encode_mimeheader($str, $charset = 'UTF-8', $transferEncoding = 'B',
<ide> }
<ide>
<ide> }
<del> | 26 |
Javascript | Javascript | fix edge case when parsing top-level this on iife | da6f869c1ef39abd0f1e512b07f41f6bbecd4b88 | <ide><path>lib/Parser.js
<ide> class Parser extends Tapable {
<ide> this.walkClass(expression);
<ide> }
<ide>
<del> walkCallExpression(expression) {
<del> let result;
<del>
<del> const walkIIFE = (functionExpression, options, currentThis) => {
<del> const renameArgOrThis = argOrThis => {
<del> const renameIdentifier = this.getRenameIdentifier(argOrThis);
<del> if (renameIdentifier) {
<del> const hook = this.hooks.canRename.get(renameIdentifier);
<del> if (hook !== undefined && hook.call(argOrThis)) {
<del> const hook = this.hooks.rename.get(renameIdentifier);
<del> if (hook === undefined || !hook.call(argOrThis))
<del> return renameIdentifier;
<del> }
<del> }
<del> this.walkExpression(argOrThis);
<del> };
<del> const wasTopLevel = this.scope.topLevelScope;
<del> this.scope.topLevelScope = false;
<del> const params = functionExpression.params;
<del> const renameThis = currentThis ? renameArgOrThis(currentThis) : null;
<del> const args = options.map(renameArgOrThis);
<del> this.inScope(params.filter((identifier, idx) => !args[idx]), () => {
<del> if (renameThis) {
<del> this.scope.renames.set("this", renameThis);
<add> _walkIIFE(functionExpression, options, currentThis) {
<add> const renameArgOrThis = argOrThis => {
<add> const renameIdentifier = this.getRenameIdentifier(argOrThis);
<add> if (renameIdentifier) {
<add> const hook = this.hooks.canRename.get(renameIdentifier);
<add> if (hook !== undefined && hook.call(argOrThis)) {
<add> const hook = this.hooks.rename.get(renameIdentifier);
<add> if (hook === undefined || !hook.call(argOrThis))
<add> return renameIdentifier;
<ide> }
<del> for (let i = 0; i < args.length; i++) {
<del> const param = args[i];
<del> if (!param) continue;
<del> if (!params[i] || params[i].type !== "Identifier") continue;
<del> this.scope.renames.set(params[i].name, param);
<del> }
<del> if (functionExpression.body.type === "BlockStatement") {
<del> this.prewalkStatement(functionExpression.body);
<del> this.walkStatement(functionExpression.body);
<del> } else this.walkExpression(functionExpression.body);
<del> });
<del> this.scope.topLevelScope = wasTopLevel;
<add> }
<add> this.walkExpression(argOrThis);
<ide> };
<add> const params = functionExpression.params;
<add> const renameThis = currentThis ? renameArgOrThis(currentThis) : null;
<add> const args = options.map(renameArgOrThis);
<add> const wasTopLevel = this.scope.topLevelScope;
<add> this.scope.topLevelScope = false;
<add> this.inScope(params.filter((identifier, idx) => !args[idx]), () => {
<add> if (renameThis) {
<add> this.scope.renames.set("this", renameThis);
<add> }
<add> for (let i = 0; i < args.length; i++) {
<add> const param = args[i];
<add> if (!param) continue;
<add> if (!params[i] || params[i].type !== "Identifier") continue;
<add> this.scope.renames.set(params[i].name, param);
<add> }
<add> if (functionExpression.body.type === "BlockStatement") {
<add> this.prewalkStatement(functionExpression.body);
<add> this.walkStatement(functionExpression.body);
<add> } else this.walkExpression(functionExpression.body);
<add> });
<add> this.scope.topLevelScope = wasTopLevel;
<add> }
<add>
<add> walkCallExpression(expression) {
<ide> if (
<ide> expression.callee.type === "MemberExpression" &&
<ide> expression.callee.object.type === "FunctionExpression" &&
<ide> !expression.callee.computed &&
<ide> (expression.callee.property.name === "call" ||
<ide> expression.callee.property.name === "bind") &&
<del> expression.arguments &&
<ide> expression.arguments.length > 0
<ide> ) {
<ide> // (function(…) { }.call/bind(?, …))
<del> walkIIFE(
<add> this._walkIIFE(
<ide> expression.callee.object,
<ide> expression.arguments.slice(1),
<ide> expression.arguments[0]
<ide> );
<del> } else if (
<del> expression.callee.type === "FunctionExpression" &&
<del> expression.arguments
<del> ) {
<add> } else if (expression.callee.type === "FunctionExpression") {
<ide> // (function(…) { }(…))
<del> walkIIFE(expression.callee, expression.arguments, null);
<add> this._walkIIFE(expression.callee, expression.arguments, null);
<ide> } else if (expression.callee.type === "Import") {
<del> result = this.hooks.importCall.call(expression);
<add> let result = this.hooks.importCall.call(expression);
<ide> if (result === true) return;
<ide>
<ide> if (expression.arguments) this.walkExpressions(expression.arguments);
<ide> class Parser extends Tapable {
<ide> if (callee.isIdentifier()) {
<ide> const callHook = this.hooks.call.get(callee.identifier);
<ide> if (callHook !== undefined) {
<del> result = callHook.call(expression);
<add> let result = callHook.call(expression);
<ide> if (result === true) return;
<ide> }
<ide> let identifier = callee.identifier.replace(/\.[^.]+$/, "");
<ide> if (identifier !== callee.identifier) {
<ide> const callAnyHook = this.hooks.callAnyMember.get(identifier);
<ide> if (callAnyHook !== undefined) {
<del> result = callAnyHook.call(expression);
<add> let result = callAnyHook.call(expression);
<ide> if (result === true) return;
<ide> }
<ide> }
<ide><path>test/configCases/parsing/harmony-this/index.js
<ide>
<ide> import {extendThisClass, returnThisArrow, returnThisMember, that} from "./abc";
<ide> import d, {a, b as B, C as _C, D as _D, E, F, f1, f2, f3, G} from "./abc";
<add>import {bindThis, callThis, applyThis} from "./issue-7213";
<ide>
<ide> import * as abc from "./abc";
<ide>
<del>it("should have this = undefined on harmony modules", function() {
<add>it("should have this = undefined on harmony modules", () => {
<ide> expect((typeof that)).toBe("undefined");
<ide> expect((typeof abc.that)).toBe("undefined");
<ide> expect((typeof returnThisArrow())).toBe("undefined");
<ide> it("should have this = undefined on harmony modules", function() {
<ide> }).toThrowError();
<ide> });
<ide>
<del>it("should not break classes and functions", function() {
<add>it("should not break classes and functions", () => {
<ide> expect((new _C).foo()).toBe("bar");
<ide> expect((new _C).bar()).toBe("bar");
<ide> expect((new _D).prop()).toBe("ok");
<ide> it("should not break classes and functions", function() {
<ide> expect((new G("ok")).getX()).toBe("ok");
<ide> });
<ide>
<del>function x() { throw new Error("should not be executed"); }
<del>it("should have this = undefined on imported non-strict functions", function() {
<add>function x() {
<add> throw new Error("should not be executed");
<add>}
<add>
<add>it("should have this = undefined on imported non-strict functions", () => {
<ide> x
<ide> expect(d()).toBe("undefined");
<ide> x
<ide> import C2, { C } from "./new";
<ide>
<ide> import * as New from "./new";
<ide>
<del>it("should be possible to use new correctly", function() {
<add>it("should be possible to use new correctly", () => {
<ide> x
<ide> expect(new C()).toEqual({ok: true});
<ide> x
<ide> expect(new C2()).toEqual({ok: true});
<ide> x
<ide> expect(new New.C()).toEqual({ok: true});
<ide> });
<add>
<add>it("should not break Babel arrow function transform", () => {
<add> expect(bindThis()).toBe(undefined);
<add> expect(callThis).toBe(undefined);
<add> expect(applyThis).toBe(undefined);
<add>});
<ide><path>test/configCases/parsing/harmony-this/issue-7213.js
<add>// This helper is taken from Babel
<add>function _newArrowCheck(innerThis, boundThis) {
<add> if (innerThis !== boundThis) {
<add> throw new TypeError("Cannot instantiate an arrow function");
<add> }
<add>}
<add>
<add>let _this = this;
<add>export let bindThis = function() {
<add> _newArrowCheck(this, _this);
<add> return this
<add>}.bind(this);
<add>
<add>export let callThis = function() {
<add> return this
<add>}.call(this)
<add>
<add>export let applyThis = function() {
<add> return this
<add>}.apply(this) | 3 |
Javascript | Javascript | use only polling on travis/sl | c64a9853074a392ba83a0f790b28b3be96c1144a | <ide><path>karma-shared.conf.js
<ide> module.exports = function(config) {
<ide> }
<ide> }
<ide> });
<add>
<add>
<add> // TODO(vojta): remove once SauceLabs supports websockets.
<add> // This speeds up the capturing a bit, as browsers don't even try to use websocket.
<add> if (process.env.TRAVIS) {
<add> config.transports = ['xhr-polling'];
<add> }
<ide> }; | 1 |
PHP | PHP | add test for | a40fa5cae95119c81fdda9541f90c9071c7d6d35 | <ide><path>lib/Cake/Test/Case/Utility/HashTest.php
<ide> public function testGet() {
<ide> $this->assertEquals($data[1]['Article'], $result);
<ide> }
<ide>
<add>/**
<add> * Test get() with an invalid path
<add> *
<add> * @expectedException InvalidArgumentException
<add> * @return void
<add> */
<add> public function testGetInvalidPath() {
<add> Hash::get(array('one' => 'two'), true);
<add> }
<add>
<ide> /**
<ide> * Test dimensions.
<ide> * | 1 |
PHP | PHP | use field instead of property for entity | b54e93730665cfb818226f1058bb737f7c9c6a20 | <ide><path>src/Auth/BaseAuthenticate.php
<ide> protected function _findUser(string $username, ?string $password = null)
<ide> }
<ide>
<ide> $this->_needsPasswordRehash = $hasher->needsRehash($hashedPassword);
<del> $result->unsetProperty($passwordField);
<add> $result->unsetField($passwordField);
<ide> }
<ide> $hidden = $result->getHidden();
<ide> if ($password === null && in_array($passwordField, $hidden)) {
<ide><path>src/Datasource/EntityInterface.php
<ide> interface EntityInterface extends ArrayAccess, JsonSerializable
<ide> {
<ide> /**
<del> * Sets hidden properties.
<add> * Sets hidden fields.
<ide> *
<del> * @param array $properties An array of properties to hide from array exports.
<del> * @param bool $merge Merge the new properties with the existing. By default false.
<add> * @param array $fields An array of fields to hide from array exports.
<add> * @param bool $merge Merge the new fields with the existing. By default false.
<ide> * @return $this
<ide> */
<del> public function setHidden(array $properties, bool $merge = false);
<add> public function setHidden(array $fields, bool $merge = false);
<ide>
<ide> /**
<del> * Gets the hidden properties.
<add> * Gets the hidden fields.
<ide> *
<ide> * @return array
<ide> */
<ide> public function getHidden(): array;
<ide>
<ide> /**
<del> * Sets the virtual properties on this entity.
<add> * Sets the virtual fields on this entity.
<ide> *
<del> * @param array $properties An array of properties to treat as virtual.
<del> * @param bool $merge Merge the new properties with the existing. By default false.
<add> * @param array $fields An array of fields to treat as virtual.
<add> * @param bool $merge Merge the new fields with the existing. By default false.
<ide> * @return $this
<ide> */
<del> public function setVirtual(array $properties, bool $merge = false);
<add> public function setVirtual(array $fields, bool $merge = false);
<ide>
<ide> /**
<del> * Gets the virtual properties on this entity.
<add> * Gets the virtual fields on this entity.
<ide> *
<ide> * @return array
<ide> */
<ide> public function getVirtual(): array;
<ide>
<ide> /**
<del> * Sets the dirty status of a single property.
<add> * Sets the dirty status of a single field.
<ide> *
<del> * @param string $property the field to set or check status for
<del> * @param bool $isDirty true means the property was changed, false means
<add> * @param string $field the field to set or check status for
<add> * @param bool $isDirty true means the field was changed, false means
<ide> * it was not changed
<ide> * @return $this
<ide> */
<del> public function setDirty(string $property, bool $isDirty);
<add> public function setDirty(string $field, bool $isDirty);
<ide>
<ide> /**
<del> * Checks if the entity is dirty or if a single property of it is dirty.
<add> * Checks if the entity is dirty or if a single field of it is dirty.
<ide> *
<del> * @param string|null $property The field to check the status for. Null for the whole entity.
<del> * @return bool Whether the property was changed or not
<add> * @param string|null $field The field to check the status for. Null for the whole entity.
<add> * @return bool Whether the field was changed or not
<ide> */
<del> public function isDirty(?string $property = null): bool;
<add> public function isDirty(?string $field = null): bool;
<ide>
<ide> /**
<del> * Gets the dirty properties.
<add> * Gets the dirty fields.
<ide> *
<ide> * @return array
<ide> */
<ide> public function setErrors(array $fields, bool $overwrite = false);
<ide> public function setError($field, $errors, bool $overwrite = false);
<ide>
<ide> /**
<del> * Stores whether or not a property value can be changed or set in this entity.
<add> * Stores whether or not a field value can be changed or set in this entity.
<ide> *
<del> * @param string|array $property single or list of properties to change its accessibility
<del> * @param bool $set true marks the property as accessible, false will
<add> * @param string|array $field single or list of fields to change its accessibility
<add> * @param bool $set true marks the field as accessible, false will
<ide> * mark it as protected.
<ide> * @return $this
<ide> */
<del> public function setAccess($property, bool $set);
<add> public function setAccess($field, bool $set);
<ide>
<ide> /**
<del> * Checks if a property is accessible
<add> * Checks if a field is accessible
<ide> *
<del> * @param string $property Property name to check
<add> * @param string $field Field name to check
<ide> * @return bool
<ide> */
<del> public function isAccessible(string $property): bool;
<add> public function isAccessible(string $field): bool;
<ide>
<ide> /**
<ide> * Sets the source alias
<ide> public function setSource(string $alias);
<ide> public function getSource(): string;
<ide>
<ide> /**
<del> * Returns an array with the requested original properties
<del> * stored in this entity, indexed by property name.
<add> * Returns an array with the requested original fields
<add> * stored in this entity, indexed by field name.
<ide> *
<del> * @param array $properties List of properties to be returned
<add> * @param array $field List of fields to be returned
<ide> * @return array
<ide> */
<del> public function extractOriginal(array $properties);
<add> public function extractOriginal(array $field);
<ide>
<ide> /**
<del> * Returns an array with only the original properties
<del> * stored in this entity, indexed by property name.
<add> * Returns an array with only the original fields
<add> * stored in this entity, indexed by field name.
<ide> *
<del> * @param array $properties List of properties to be returned
<add> * @param array $fields List of fields to be returned
<ide> * @return array
<ide> */
<del> public function extractOriginalChanged(array $properties);
<add> public function extractOriginalChanged(array $fields);
<ide>
<ide> /**
<del> * Sets one or multiple properties to the specified value
<add> * Sets one or multiple fields to the specified value
<ide> *
<del> * @param string|array $property the name of property to set or a list of
<del> * properties with their respective values
<del> * @param mixed $value The value to set to the property or an array if the
<add> * @param string|array $field the name of field to set or a list of
<add> * fields with their respective values
<add> * @param mixed $value The value to set to the field or an array if the
<ide> * first argument is also an array, in which case will be treated as $options
<del> * @param array $options options to be used for setting the property. Allowed option
<add> * @param array $options options to be used for setting the field. Allowed option
<ide> * keys are `setter` and `guard`
<ide> * @return \Cake\Datasource\EntityInterface
<ide> */
<del> public function set($property, $value = null, array $options = []);
<add> public function set($field, $value = null, array $options = []);
<ide>
<ide> /**
<del> * Returns the value of a property by name
<add> * Returns the value of a field by name
<ide> *
<del> * @param string $property the name of the property to retrieve
<add> * @param string $field the name of the field to retrieve
<ide> * @return mixed
<ide> */
<del> public function &get($property);
<add> public function &get($field);
<ide>
<ide> /**
<del> * Returns whether this entity contains a property named $property
<add> * Returns whether this entity contains a field named $field
<ide> * regardless of if it is empty.
<ide> *
<del> * @param string|array $property The property to check.
<add> * @param string|array $field The field to check.
<ide> * @return bool
<ide> */
<del> public function has($property);
<add> public function has($field);
<ide>
<ide> /**
<del> * Removes a property or list of properties from this entity
<add> * Removes a field or list of fields from this entity
<ide> *
<del> * @param string|array $property The property to unset.
<add> * @param string|array $field The field to unset.
<ide> * @return \Cake\Datasource\EntityInterface
<ide> */
<del> public function unsetProperty($property);
<add> public function unsetField($field);
<ide>
<ide> /**
<del> * Get the list of visible properties.
<add> * Get the list of visible fields.
<ide> *
<del> * @return array A list of properties that are 'visible' in all representations.
<add> * @return array A list of fields that are 'visible' in all representations.
<ide> */
<del> public function visibleProperties();
<add> public function getVisible(): array;
<ide>
<ide> /**
<del> * Returns an array with all the visible properties set in this entity.
<add> * Returns an array with all the visible fields set in this entity.
<ide> *
<del> * *Note* hidden properties are not visible, and will not be output
<add> * *Note* hidden fields are not visible, and will not be output
<ide> * by toArray().
<ide> *
<ide> * @return array
<ide> */
<ide> public function toArray();
<ide>
<ide> /**
<del> * Returns an array with the requested properties
<del> * stored in this entity, indexed by property name
<add> * Returns an array with the requested fields
<add> * stored in this entity, indexed by field name
<ide> *
<del> * @param array $properties list of properties to be returned
<del> * @param bool $onlyDirty Return the requested property only if it is dirty
<add> * @param array $fields list of fields to be returned
<add> * @param bool $onlyDirty Return the requested field only if it is dirty
<ide> * @return array
<ide> */
<del> public function extract(array $properties, $onlyDirty = false);
<add> public function extract(array $fields, $onlyDirty = false);
<ide>
<ide> /**
<ide> * Sets the entire entity as clean, which means that it will appear as
<del> * no properties being modified or added at all. This is an useful call
<add> * no fields being modified or added at all. This is an useful call
<ide> * for an initial object hydration
<ide> *
<ide> * @return void
<ide><path>src/Datasource/EntityTrait.php
<ide> trait EntityTrait
<ide> {
<ide> /**
<del> * Holds all properties and their values for this entity
<add> * Holds all fields and their values for this entity.
<ide> *
<ide> * @var array
<ide> */
<del> protected $_properties = [];
<add> protected $_fields = [];
<ide>
<ide> /**
<del> * Holds all properties that have been changed and their original values for this entity
<add> * Holds all fields that have been changed and their original values for this entity.
<ide> *
<ide> * @var array
<ide> */
<ide> protected $_original = [];
<ide>
<ide> /**
<del> * List of property names that should **not** be included in JSON or Array
<add> * List of field names that should **not** be included in JSON or Array
<ide> * representations of this Entity.
<ide> *
<ide> * @var array
<ide> trait EntityTrait
<ide> protected $_new = true;
<ide>
<ide> /**
<del> * List of errors per field as stored in this object
<add> * List of errors per field as stored in this object.
<ide> *
<ide> * @var array
<ide> */
<ide> protected $_errors = [];
<ide>
<ide> /**
<del> * List of invalid fields and their data for errors upon validation/patching
<add> * List of invalid fields and their data for errors upon validation/patching.
<ide> *
<ide> * @var array
<ide> */
<ide> protected $_invalid = [];
<ide>
<ide> /**
<ide> * Map of properties in this entity that can be safely assigned, each
<del> * property name points to a boolean indicating its status. An empty array
<add> * field name points to a boolean indicating its status. An empty array
<ide> * means no properties are accessible
<ide> *
<del> * The special property '\*' can also be mapped, meaning that any other property
<add> * The special field '\*' can also be mapped, meaning that any other field
<ide> * not defined in the map will take its value. For example, `'\*' => true`
<del> * means that any property not defined in the map will be accessible by default
<add> * means that any field not defined in the map will be accessible by default
<ide> *
<ide> * @var array
<ide> */
<ide> trait EntityTrait
<ide> /**
<ide> * Magic getter to access properties that have been set in this entity
<ide> *
<del> * @param string $property Name of the property to access
<add> * @param string $field Name of the field to access
<ide> * @return mixed
<ide> */
<del> public function &__get($property)
<add> public function &__get($field)
<ide> {
<del> return $this->get($property);
<add> return $this->get($field);
<ide> }
<ide>
<ide> /**
<ide> * Magic setter to add or edit a property in this entity
<ide> *
<del> * @param string $property The name of the property to set
<add> * @param string $field The name of the property to set
<ide> * @param mixed $value The value to set to the property
<ide> * @return void
<ide> */
<del> public function __set($property, $value)
<add> public function __set($field, $value)
<ide> {
<del> $this->set($property, $value);
<add> $this->set($field, $value);
<ide> }
<ide>
<ide> /**
<ide> * Returns whether this entity contains a property named $property
<ide> * regardless of if it is empty.
<ide> *
<del> * @param string $property The property to check.
<add> * @param string $field The property to check.
<ide> * @return bool
<ide> * @see \Cake\ORM\Entity::has()
<ide> */
<del> public function __isset($property)
<add> public function __isset($field)
<ide> {
<del> return $this->has($property);
<add> return $this->has($field);
<ide> }
<ide>
<ide> /**
<ide> * Removes a property from this entity
<ide> *
<del> * @param string $property The property to unset
<add> * @param string $field The property to unset
<ide> * @return void
<ide> */
<del> public function __unset($property)
<add> public function __unset($field)
<ide> {
<del> $this->unsetProperty($property);
<add> $this->unsetField($field);
<ide> }
<ide>
<ide> /**
<ide> public function __unset($property)
<ide> * $entity->set('name', 'Andrew');
<ide> * ```
<ide> *
<del> * @param string|array $property the name of property to set or a list of
<add> * @param string|array $field the name of property to set or a list of
<ide> * properties with their respective values
<ide> * @param mixed $value The value to set to the property or an array if the
<ide> * first argument is also an array, in which case will be treated as $options
<ide> public function __unset($property)
<ide> * @return $this
<ide> * @throws \InvalidArgumentException
<ide> */
<del> public function set($property, $value = null, array $options = [])
<add> public function set($field, $value = null, array $options = [])
<ide> {
<del> if (is_string($property) && $property !== '') {
<add> if (is_string($field) && $field !== '') {
<ide> $guard = false;
<del> $property = [$property => $value];
<add> $field = [$field => $value];
<ide> } else {
<ide> $guard = true;
<ide> $options = (array)$value;
<ide> }
<ide>
<del> if (!is_array($property)) {
<add> if (!is_array($field)) {
<ide> throw new InvalidArgumentException('Cannot set an empty property');
<ide> }
<ide> $options += ['setter' => true, 'guard' => $guard];
<ide>
<del> foreach ($property as $p => $value) {
<del> if ($options['guard'] === true && !$this->isAccessible($p)) {
<add> foreach ($field as $name => $value) {
<add> if ($options['guard'] === true && !$this->isAccessible($name)) {
<ide> continue;
<ide> }
<ide>
<del> $this->setDirty($p, true);
<add> $this->setDirty($name, true);
<ide>
<del> if (!array_key_exists($p, $this->_original) &&
<del> array_key_exists($p, $this->_properties) &&
<del> $this->_properties[$p] !== $value
<add> if (!array_key_exists($name, $this->_original) &&
<add> array_key_exists($name, $this->_fields) &&
<add> $this->_fields[$name] !== $value
<ide> ) {
<del> $this->_original[$p] = $this->_properties[$p];
<add> $this->_original[$name] = $this->_fields[$name];
<ide> }
<ide>
<ide> if (!$options['setter']) {
<del> $this->_properties[$p] = $value;
<add> $this->_fields[$name] = $value;
<ide> continue;
<ide> }
<ide>
<del> $setter = static::_accessor($p, 'set');
<add> $setter = static::_accessor($name, 'set');
<ide> if ($setter) {
<ide> $value = $this->{$setter}($value);
<ide> }
<del> $this->_properties[$p] = $value;
<add> $this->_fields[$name] = $value;
<ide> }
<ide>
<ide> return $this;
<ide> public function set($property, $value = null, array $options = [])
<ide> /**
<ide> * Returns the value of a property by name
<ide> *
<del> * @param string $property the name of the property to retrieve
<add> * @param string $field the name of the property to retrieve
<ide> * @return mixed
<ide> * @throws \InvalidArgumentException if an empty property name is passed
<ide> */
<del> public function &get($property)
<add> public function &get($field)
<ide> {
<del> if (!strlen((string)$property)) {
<add> if (!strlen((string)$field)) {
<ide> throw new InvalidArgumentException('Cannot get an empty property');
<ide> }
<ide>
<ide> $value = null;
<del> $method = static::_accessor($property, 'get');
<add> $method = static::_accessor($field, 'get');
<ide>
<del> if (isset($this->_properties[$property])) {
<del> $value =& $this->_properties[$property];
<add> if (isset($this->_fields[$field])) {
<add> $value =& $this->_fields[$field];
<ide> }
<ide>
<ide> if ($method) {
<ide> public function &get($property)
<ide> /**
<ide> * Returns the value of an original property by name
<ide> *
<del> * @param string $property the name of the property for which original value is retrieved.
<add> * @param string $field the name of the property for which original value is retrieved.
<ide> * @return mixed
<ide> * @throws \InvalidArgumentException if an empty property name is passed.
<ide> */
<del> public function getOriginal($property)
<add> public function getOriginal($field)
<ide> {
<del> if (!strlen((string)$property)) {
<add> if (!strlen((string)$field)) {
<ide> throw new InvalidArgumentException('Cannot get an empty property');
<ide> }
<del> if (array_key_exists($property, $this->_original)) {
<del> return $this->_original[$property];
<add> if (array_key_exists($field, $this->_original)) {
<add> return $this->_original[$field];
<ide> }
<ide>
<del> return $this->get($property);
<add> return $this->get($field);
<ide> }
<ide>
<ide> /**
<ide> public function getOriginalValues()
<ide> {
<ide> $originals = $this->_original;
<ide> $originalKeys = array_keys($originals);
<del> foreach ($this->_properties as $key => $value) {
<add> foreach ($this->_fields as $key => $value) {
<ide> if (!in_array($key, $originalKeys)) {
<ide> $originals[$key] = $value;
<ide> }
<ide> public function getOriginalValues()
<ide> * When checking multiple properties. All properties must not be null
<ide> * in order for true to be returned.
<ide> *
<del> * @param string|array $property The property or properties to check.
<add> * @param string|array $field The property or properties to check.
<ide> * @return bool
<ide> */
<del> public function has($property)
<add> public function has($field)
<ide> {
<del> foreach ((array)$property as $prop) {
<add> foreach ((array)$field as $prop) {
<ide> if ($this->get($prop) === null) {
<ide> return false;
<ide> }
<ide> public function has($property)
<ide> *
<ide> * and false in all other cases.
<ide> *
<del> * @param string $property The property to check.
<add> * @param string $field The property to check.
<ide> * @return bool
<ide> */
<del> public function isEmpty($property)
<add> public function isEmpty($field)
<ide> {
<del> $value = $this->get($property);
<add> $value = $this->get($field);
<ide> if ($value === null
<ide> || (is_array($value) && empty($value)
<ide> || (is_string($value) && empty($value)))
<ide> public function isEmpty($property)
<ide> *
<ide> * and false in all other cases.
<ide> *
<del> * @param string $property The property to check.
<add> * @param string $field The property to check.
<ide> * @return bool
<ide> */
<del> public function hasValue($property)
<add> public function hasValue($field)
<ide> {
<del> return !$this->isEmpty($property);
<add> return !$this->isEmpty($field);
<ide> }
<ide>
<ide> /**
<ide> public function hasValue($property)
<ide> * $entity->unsetProperty(['name', 'last_name']);
<ide> * ```
<ide> *
<del> * @param string|array $property The property to unset.
<add> * @param string|array $field The property to unset.
<ide> * @return $this
<ide> */
<del> public function unsetProperty($property)
<add> public function unsetField($field)
<ide> {
<del> $property = (array)$property;
<del> foreach ($property as $p) {
<del> unset($this->_properties[$p], $this->_dirty[$p]);
<add> $field = (array)$field;
<add> foreach ($field as $p) {
<add> unset($this->_fields[$p], $this->_dirty[$p]);
<ide> }
<ide>
<ide> return $this;
<ide> }
<ide>
<add> /**
<add> * Removes a property or list of properties from this entity
<add> *
<add> * @deprecated 4.0.0 Use unsetField() instead.
<add> * @param string|array $field The field to unset.
<add> * @return $this
<add> */
<add> public function unsetProperty($field) {
<add> return $this->unsetField($field);
<add> }
<add>
<ide> /**
<ide> * Sets hidden properties.
<ide> *
<del> * @param array $properties An array of properties to hide from array exports.
<add> * @param array $fields An array of properties to hide from array exports.
<ide> * @param bool $merge Merge the new properties with the existing. By default false.
<ide> * @return $this
<ide> */
<del> public function setHidden(array $properties, bool $merge = false)
<add> public function setHidden(array $fields, bool $merge = false)
<ide> {
<ide> if ($merge === false) {
<del> $this->_hidden = $properties;
<add> $this->_hidden = $fields;
<ide>
<ide> return $this;
<ide> }
<ide>
<del> $properties = array_merge($this->_hidden, $properties);
<del> $this->_hidden = array_unique($properties);
<add> $fields = array_merge($this->_hidden, $fields);
<add> $this->_hidden = array_unique($fields);
<ide>
<ide> return $this;
<ide> }
<ide> public function getHidden(): array
<ide> /**
<ide> * Sets the virtual properties on this entity.
<ide> *
<del> * @param array $properties An array of properties to treat as virtual.
<add> * @param array $fields An array of properties to treat as virtual.
<ide> * @param bool $merge Merge the new properties with the existing. By default false.
<ide> * @return $this
<ide> */
<del> public function setVirtual(array $properties, bool $merge = false)
<add> public function setVirtual(array $fields, bool $merge = false)
<ide> {
<ide> if ($merge === false) {
<del> $this->_virtual = $properties;
<add> $this->_virtual = $fields;
<ide>
<ide> return $this;
<ide> }
<ide>
<del> $properties = array_merge($this->_virtual, $properties);
<del> $this->_virtual = array_unique($properties);
<add> $fields = array_merge($this->_virtual, $fields);
<add> $this->_virtual = array_unique($fields);
<ide>
<ide> return $this;
<ide> }
<ide>
<ide> /**
<del> * Gets the virtual properties on this entity.
<add> * Gets the virtual fields on this entity.
<ide> *
<ide> * @return array
<ide> */
<ide> public function getVirtual(): array
<ide> }
<ide>
<ide> /**
<del> * Get the list of visible properties.
<add> * Gets the list of visible fields.
<ide> *
<del> * The list of visible properties is all standard properties
<del> * plus virtual properties minus hidden properties.
<add> * The list of visible fields is all standard fields
<add> * plus virtual fields minus hidden fields.
<ide> *
<del> * @return array A list of properties that are 'visible' in all
<add> * @return array A list of fields that are 'visible' in all
<ide> * representations.
<ide> */
<del> public function visibleProperties()
<add> public function getVisible(): array
<ide> {
<del> $properties = array_keys($this->_properties);
<add> $properties = array_keys($this->_fields);
<ide> $properties = array_merge($properties, $this->_virtual);
<ide>
<ide> return array_diff($properties, $this->_hidden);
<ide> }
<ide>
<ide> /**
<del> * Returns an array with all the properties that have been set
<add> * Gets the list of visible fields.
<add> *
<add> * @deprecated 4.0.0 Use getVisible() instead.
<add> * @return array
<add> */
<add> public function visibleProperties(): array
<add> {
<add> return $this->getVisible();
<add> }
<add>
<add> /**
<add> * Returns an array with all the fields that have been set
<ide> * to this entity
<ide> *
<del> * This method will recursively transform entities assigned to properties
<add> * This method will recursively transform entities assigned to fields
<ide> * into arrays as well.
<ide> *
<ide> * @return array
<ide> */
<ide> public function toArray()
<ide> {
<ide> $result = [];
<del> foreach ($this->visibleProperties() as $property) {
<del> $value = $this->get($property);
<add> foreach ($this->getVisible() as $field) {
<add> $value = $this->get($field);
<ide> if (is_array($value)) {
<del> $result[$property] = [];
<add> $result[$field] = [];
<ide> foreach ($value as $k => $entity) {
<ide> if ($entity instanceof EntityInterface) {
<del> $result[$property][$k] = $entity->toArray();
<add> $result[$field][$k] = $entity->toArray();
<ide> } else {
<del> $result[$property][$k] = $entity;
<add> $result[$field][$k] = $entity;
<ide> }
<ide> }
<ide> } elseif ($value instanceof EntityInterface) {
<del> $result[$property] = $value->toArray();
<add> $result[$field] = $value->toArray();
<ide> } else {
<del> $result[$property] = $value;
<add> $result[$field] = $value;
<ide> }
<ide> }
<ide>
<ide> public function toArray()
<ide> */
<ide> public function jsonSerialize()
<ide> {
<del> return $this->extract($this->visibleProperties());
<add> return $this->extract($this->getVisible());
<ide> }
<ide>
<ide> /**
<ide> public function offsetSet($offset, $value)
<ide> */
<ide> public function offsetUnset($offset)
<ide> {
<del> $this->unsetProperty($offset);
<add> $this->unsetField($offset);
<ide> }
<ide>
<ide> /**
<ide> protected static function _accessor($property, $type)
<ide> * Returns an array with the requested properties
<ide> * stored in this entity, indexed by property name
<ide> *
<del> * @param array $properties list of properties to be returned
<add> * @param array $fields list of properties to be returned
<ide> * @param bool $onlyDirty Return the requested property only if it is dirty
<ide> * @return array
<ide> */
<del> public function extract(array $properties, $onlyDirty = false)
<add> public function extract(array $fields, $onlyDirty = false)
<ide> {
<ide> $result = [];
<del> foreach ($properties as $property) {
<del> if (!$onlyDirty || $this->isDirty($property)) {
<del> $result[$property] = $this->get($property);
<add> foreach ($fields as $field) {
<add> if (!$onlyDirty || $this->isDirty($field)) {
<add> $result[$field] = $this->get($field);
<ide> }
<ide> }
<ide>
<ide> public function extract(array $properties, $onlyDirty = false)
<ide> * Properties that are unchanged from their original value will be included in the
<ide> * return of this method.
<ide> *
<del> * @param array $properties List of properties to be returned
<add> * @param array $fields List of properties to be returned
<ide> * @return array
<ide> */
<del> public function extractOriginal(array $properties)
<add> public function extractOriginal(array $fields)
<ide> {
<ide> $result = [];
<del> foreach ($properties as $property) {
<del> $result[$property] = $this->getOriginal($property);
<add> foreach ($fields as $field) {
<add> $result[$field] = $this->getOriginal($field);
<ide> }
<ide>
<ide> return $result;
<ide> public function extractOriginal(array $properties)
<ide> * This method will only return properties that have been modified since
<ide> * the entity was built. Unchanged properties will be omitted.
<ide> *
<del> * @param array $properties List of properties to be returned
<add> * @param array $fields List of properties to be returned
<ide> * @return array
<ide> */
<del> public function extractOriginalChanged(array $properties)
<add> public function extractOriginalChanged(array $fields)
<ide> {
<ide> $result = [];
<del> foreach ($properties as $property) {
<del> $original = $this->getOriginal($property);
<del> if ($original !== $this->get($property)) {
<del> $result[$property] = $original;
<add> foreach ($fields as $field) {
<add> $original = $this->getOriginal($field);
<add> if ($original !== $this->get($field)) {
<add> $result[$field] = $original;
<ide> }
<ide> }
<ide>
<ide> public function extractOriginalChanged(array $properties)
<ide> /**
<ide> * Sets the dirty status of a single property.
<ide> *
<del> * @param string $property the field to set or check status for
<add> * @param string $field the field to set or check status for
<ide> * @param bool $isDirty true means the property was changed, false means
<ide> * it was not changed. Defaults to true.
<ide> * @return $this
<ide> */
<del> public function setDirty(string $property, bool $isDirty = true)
<add> public function setDirty(string $field, bool $isDirty = true)
<ide> {
<ide> if ($isDirty === false) {
<del> unset($this->_dirty[$property]);
<add> unset($this->_dirty[$field]);
<ide>
<ide> return $this;
<ide> }
<ide>
<del> $this->_dirty[$property] = true;
<del> unset($this->_errors[$property], $this->_invalid[$property]);
<add> $this->_dirty[$field] = true;
<add> unset($this->_errors[$field], $this->_invalid[$field]);
<ide>
<ide> return $this;
<ide> }
<ide>
<ide> /**
<ide> * Checks if the entity is dirty or if a single property of it is dirty.
<ide> *
<del> * @param string|null $property The field to check the status for. Null for the whole entity.
<add> * @param string|null $field The field to check the status for. Null for the whole entity.
<ide> * @return bool Whether the property was changed or not
<ide> */
<del> public function isDirty(?string $property = null): bool
<add> public function isDirty(?string $field = null): bool
<ide> {
<del> if ($property === null) {
<add> if ($field === null) {
<ide> return !empty($this->_dirty);
<ide> }
<ide>
<del> return isset($this->_dirty[$property]);
<add> return isset($this->_dirty[$field]);
<ide> }
<ide>
<ide> /**
<ide> public function isNew(?bool $new = null): bool
<ide> $new = (bool)$new;
<ide>
<ide> if ($new) {
<del> foreach ($this->_properties as $k => $p) {
<add> foreach ($this->_fields as $k => $p) {
<ide> $this->_dirty[$k] = true;
<ide> }
<ide> }
<ide> public function hasErrors(bool $includeNested = true): bool
<ide> return false;
<ide> }
<ide>
<del> foreach ($this->_properties as $property) {
<del> if ($this->_readHasErrors($property)) {
<add> foreach ($this->_fields as $field) {
<add> if ($this->_readHasErrors($field)) {
<ide> return true;
<ide> }
<ide> }
<ide> public function hasErrors(bool $includeNested = true): bool
<ide> */
<ide> public function getErrors(): array
<ide> {
<del> $diff = array_diff_key($this->_properties, $this->_errors);
<add> $diff = array_diff_key($this->_fields, $this->_errors);
<ide>
<ide> return $this->_errors + (new Collection($diff))
<ide> ->filter(function ($value) {
<ide> public function getInvalid(): array
<ide> */
<ide> public function getInvalidField(string $field)
<ide> {
<del> $value = $this->_invalid[$field] ?? null;
<del>
<del> return $value;
<add> return $this->_invalid[$field] ?? null;
<ide> }
<ide>
<ide> /**
<ide> public function setInvalidField(string $field, $value)
<ide> * $entity->setAccess('*', false); // Mark all properties as protected
<ide> * ```
<ide> *
<del> * @param string|array $property single or list of properties to change its accessibility
<add> * @param string|array $field single or list of properties to change its accessibility
<ide> * @param bool $set true marks the property as accessible, false will
<ide> * mark it as protected.
<ide> * @return $this
<ide> */
<del> public function setAccess($property, bool $set)
<add> public function setAccess($field, bool $set)
<ide> {
<del> if ($property === '*') {
<add> if ($field === '*') {
<ide> $this->_accessible = array_map(function ($p) use ($set) {
<ide> return (bool)$set;
<ide> }, $this->_accessible);
<ide> public function setAccess($property, bool $set)
<ide> return $this;
<ide> }
<ide>
<del> foreach ((array)$property as $prop) {
<add> foreach ((array)$field as $prop) {
<ide> $this->_accessible[$prop] = (bool)$set;
<ide> }
<ide>
<ide> public function setAccess($property, bool $set)
<ide> * $entity->isAccessible('id'); // Returns whether it can be set or not
<ide> * ```
<ide> *
<del> * @param string $property Property name to check
<add> * @param string $field Property name to check
<ide> * @return bool
<ide> */
<del> public function isAccessible(string $property): bool
<add> public function isAccessible(string $field): bool
<ide> {
<del> $value = $this->_accessible[$property] ??
<add> $value = $this->_accessible[$field] ??
<ide> null;
<ide>
<ide> return ($value === null && !empty($this->_accessible['*'])) || $value;
<ide> public function __toString(): string
<ide> */
<ide> public function __debugInfo(): array
<ide> {
<del> $properties = $this->_properties;
<add> $fields = $this->_fields;
<ide> foreach ($this->_virtual as $field) {
<del> $properties[$field] = $this->$field;
<add> $fields[$field] = $this->$field;
<ide> }
<ide>
<del> return $properties + [
<add> return $fields + [
<ide> '[new]' => $this->isNew(),
<ide> '[accessible]' => $this->_accessible,
<ide> '[dirty]' => $this->_dirty,
<ide><path>src/ORM/Association/BelongsToMany.php
<ide> protected function _saveLinks(EntityInterface $sourceEntity, array $targetEntiti
<ide> // or if we are updating an existing link.
<ide> if ($changedKeys) {
<ide> $joint->isNew(true);
<del> $joint->unsetProperty($junction->getPrimaryKey())
<add> $joint->unsetField($junction->getPrimaryKey())
<ide> ->set(array_merge($sourceKeys, $targetKeys), ['guard' => false]);
<ide> }
<ide> $saved = $junction->save($joint, $options);
<ide><path>src/ORM/Association/HasOne.php
<ide> public function saveAssociated(EntityInterface $entity, array $options = [])
<ide> $targetEntity->set($properties, ['guard' => false]);
<ide>
<ide> if (!$this->getTarget()->save($targetEntity, $options)) {
<del> $targetEntity->unsetProperty(array_keys($properties));
<add> $targetEntity->unsetField(array_keys($properties));
<ide>
<ide> return false;
<ide> }
<ide><path>src/ORM/Behavior/Translate/ShadowTableStrategy.php
<ide> protected function rowMapper($results, $locale)
<ide> return $row;
<ide> }
<ide>
<add> /** @var \Cake\ORM\Entity $translation|array */
<ide> $translation = $row['translation'];
<ide>
<del> $keys = $hydrated ? $translation->visibleProperties() : array_keys($translation);
<add> $keys = $hydrated ? $translation->getVisible() : array_keys($translation);
<ide>
<ide> foreach ($keys as $field) {
<ide> if ($field === 'locale') {
<ide><path>src/ORM/Behavior/Translate/TranslateStrategyTrait.php
<ide> public function getLocale(): string
<ide> */
<ide> protected function unsetEmptyFields($entity)
<ide> {
<add> /** @var \Cake\ORM\Entity[] $translations */
<ide> $translations = (array)$entity->get('_translations');
<ide> foreach ($translations as $locale => $translation) {
<ide> $fields = $translation->extract($this->_config['fields'], false);
<ide> foreach ($fields as $field => $value) {
<ide> if (strlen($value) === 0) {
<del> $translation->unsetProperty($field);
<add> $translation->unsetField($field);
<ide> }
<ide> }
<ide>
<ide> protected function unsetEmptyFields($entity)
<ide> // If now, the whole _translations property is empty,
<ide> // unset it completely and return
<ide> if (empty($entity->get('_translations'))) {
<del> $entity->unsetProperty('_translations');
<add> $entity->unsetField('_translations');
<ide> }
<ide> }
<ide>
<ide> public function buildMarshalMap(Marshaller $marshaller, array $map, array $optio
<ide> */
<ide> public function afterSave(EventInterface $event, EntityInterface $entity)
<ide> {
<del> $entity->unsetProperty('_i18n');
<add> $entity->unsetField('_i18n');
<ide> }
<ide> }
<ide><path>src/ORM/Entity.php
<ide> public function __construct(array $properties = [], array $options = [])
<ide> }
<ide>
<ide> if (!empty($properties) && $options['markClean'] && !$options['useSetters']) {
<del> $this->_properties = $properties;
<add> $this->_fields = $properties;
<ide>
<ide> return;
<ide> }
<ide><path>src/ORM/Marshaller.php
<ide> protected function _mergeJoinData(array $original, BelongsToMany $assoc, array $
<ide>
<ide> // Scalar data can't be handled
<ide> if (!is_array($value)) {
<del> $record->unsetProperty('_joinData');
<add> $record->unsetField('_joinData');
<ide> continue;
<ide> }
<ide>
<ide><path>src/ORM/Table.php
<ide> protected function _processSave(EntityInterface $entity, ArrayObject $options)
<ide> }
<ide>
<ide> if (!$success && $isNew) {
<del> $entity->unsetProperty($this->getPrimaryKey());
<add> $entity->unsetField($this->getPrimaryKey());
<ide> $entity->isNew(true);
<ide> }
<ide>
<ide> public function saveMany(iterable $entities, $options = [])
<ide> {
<ide> $isNew = [];
<ide> $cleanup = function ($entities) use (&$isNew): void {
<add> /** @var \Cake\Datasource\EntityInterface[] $entities */
<ide> foreach ($entities as $key => $entity) {
<ide> if (isset($isNew[$key]) && $isNew[$key]) {
<del> $entity->unsetProperty($this->getPrimaryKey());
<add> $entity->unsetField($this->getPrimaryKey());
<ide> $entity->isNew(true);
<ide> }
<ide> }
<ide><path>tests/TestCase/ORM/Behavior/TreeBehaviorTest.php
<ide> public function testChildCountNoTreeColumns()
<ide> {
<ide> $table = $this->table;
<ide> $node = $table->get(6);
<del> $node->unsetProperty('lft');
<del> $node->unsetProperty('rght');
<add> $node->unsetField('lft');
<add> $node->unsetField('rght');
<ide> $count = $this->table->childCount($node, false);
<ide> $this->assertEquals(4, $count);
<ide> }
<ide> public function testMoveNoTreeColumns()
<ide> $table = $this->getTableLocator()->get('MenuLinkTrees');
<ide> $table->addBehavior('Tree', ['scope' => ['menu' => 'main-menu']]);
<ide> $node = $table->get(8);
<del> $node->unsetProperty('lft');
<del> $node->unsetProperty('rght');
<add> $node->unsetField('lft');
<add> $node->unsetField('rght');
<ide> $node = $table->moveUp($node, true);
<ide> $this->assertEquals(['lft' => 1, 'rght' => 2], $node->extract(['lft', 'rght']));
<ide> $expected = [
<ide> public function testMoveDownNoTreeColumns()
<ide> $table = $this->getTableLocator()->get('MenuLinkTrees');
<ide> $table->addBehavior('Tree', ['scope' => ['menu' => 'main-menu']]);
<ide> $node = $table->get(1);
<del> $node->unsetProperty('lft');
<del> $node->unsetProperty('rght');
<add> $node->unsetField('lft');
<add> $node->unsetField('rght');
<ide> $node = $table->moveDown($node, true);
<ide> $this->assertEquals(['lft' => 7, 'rght' => 16], $node->extract(['lft', 'rght']));
<ide> $expected = [
<ide> public function testReParentNoTreeColumns()
<ide> {
<ide> $table = $this->table;
<ide> $entity = $table->get(6);
<del> $entity->unsetProperty('lft');
<del> $entity->unsetProperty('rght');
<add> $entity->unsetField('lft');
<add> $entity->unsetField('rght');
<ide> $entity->parent_id = 2;
<ide> $this->assertSame($entity, $table->save($entity));
<ide> $this->assertEquals(9, $entity->lft);
<ide> public function testRootingNoTreeColumns()
<ide> {
<ide> $table = $this->table;
<ide> $entity = $table->get(2);
<del> $entity->unsetProperty('lft');
<del> $entity->unsetProperty('rght');
<add> $entity->unsetField('lft');
<add> $entity->unsetField('rght');
<ide> $entity->parent_id = null;
<ide> $this->assertSame($entity, $table->save($entity));
<ide> $this->assertEquals(15, $entity->lft);
<ide> public function testDeleteRootNoTreeColumns()
<ide> {
<ide> $table = $this->table;
<ide> $entity = $table->get(1);
<del> $entity->unsetProperty('lft');
<del> $entity->unsetProperty('rght');
<add> $entity->unsetField('lft');
<add> $entity->unsetField('rght');
<ide> $this->assertTrue($table->delete($entity));
<ide>
<ide> $expected = [
<ide><path>tests/TestCase/ORM/EntityTest.php
<ide> public function testGetCustomGettersAfterSet()
<ide> */
<ide> public function testGetCacheClearedByUnset()
<ide> {
<add> /** @var \Cake\ORM\Entity|\PHPUnit\Framework\MockObject\MockObject $entity */
<ide> $entity = $this->getMockBuilder('Cake\ORM\Entity')
<ide> ->setMethods(['_getName'])
<ide> ->getMock();
<ide> public function testGetCacheClearedByUnset()
<ide> $entity->set('name', 'Jones');
<ide> $this->assertEquals('Dr. Jones', $entity->get('name'));
<ide>
<del> $entity->unsetProperty('name');
<add> $entity->unsetField('name');
<ide> $this->assertEquals('Dr. ', $entity->get('name'));
<ide> }
<ide>
<ide> public function testHas()
<ide> public function testUnset()
<ide> {
<ide> $entity = new Entity(['id' => 1, 'name' => 'bar']);
<del> $entity->unsetProperty('id');
<add> $entity->unsetField('id');
<ide> $this->assertFalse($entity->has('id'));
<ide> $this->assertTrue($entity->has('name'));
<del> $entity->unsetProperty('name');
<add> $entity->unsetField('name');
<ide> $this->assertFalse($entity->has('id'));
<ide> }
<ide>
<ide> public function testUnsetMakesClean()
<ide> {
<ide> $entity = new Entity(['id' => 1, 'name' => 'bar']);
<ide> $this->assertTrue($entity->isDirty('name'));
<del> $entity->unsetProperty('name');
<add> $entity->unsetField('name');
<ide> $this->assertFalse($entity->isDirty('name'), 'Removed properties are not dirty.');
<ide> }
<ide>
<ide> public function testUnsetMakesClean()
<ide> public function testUnsetMultiple()
<ide> {
<ide> $entity = new Entity(['id' => 1, 'name' => 'bar', 'thing' => 2]);
<del> $entity->unsetProperty(['id', 'thing']);
<add> $entity->unsetField(['id', 'thing']);
<ide> $this->assertFalse($entity->has('id'));
<ide> $this->assertTrue($entity->has('name'));
<ide> $this->assertFalse($entity->has('thing'));
<ide> public function testMagicIsset()
<ide> public function testMagicUnset()
<ide> {
<ide> $entity = $this->getMockBuilder('Cake\ORM\Entity')
<del> ->setMethods(['unsetProperty'])
<add> ->setMethods(['unsetField'])
<ide> ->getMock();
<ide> $entity->expects($this->at(0))
<del> ->method('unsetProperty')
<add> ->method('unsetField')
<ide> ->with('foo');
<ide> unset($entity->foo);
<ide> }
<ide> public function testSetArrayAccess()
<ide> public function testUnsetArrayAccess()
<ide> {
<ide> $entity = $this->getMockBuilder('Cake\ORM\Entity')
<del> ->setMethods(['unsetProperty'])
<add> ->setMethods(['unsetField'])
<ide> ->getMock();
<ide> $entity->expects($this->at(0))
<del> ->method('unsetProperty')
<add> ->method('unsetField')
<ide> ->with('foo');
<ide> unset($entity['foo']);
<ide> }
<ide><path>tests/TestCase/ORM/TableTest.php
<ide> public function testSaveEntityOnlySchemaFields()
<ide> $this->assertEquals($entity->id, self::$nextUserId);
<ide>
<ide> $row = $table->find('all')->where(['id' => self::$nextUserId])->first();
<del> $entity->unsetProperty('crazyness');
<add> $entity->unsetField('crazyness');
<ide> $this->assertEquals($entity->toArray(), $row->toArray());
<ide> }
<ide>
<ide> public function testSaveWithClonedEntity()
<ide> $article = $table->get(1);
<ide>
<ide> $cloned = clone $article;
<del> $cloned->unsetProperty('id');
<add> $cloned->unsetField('id');
<ide> $cloned->isNew(true);
<ide> $this->assertSame($cloned, $table->save($cloned));
<ide> $this->assertEquals( | 13 |
Javascript | Javascript | fix style errors | b031671138a6b997e934cfafa17f03247c036c71 | <ide><path>lib/readline.js
<ide> Interface.prototype._insertString = function(c) {
<ide> this.cursor += c.length;
<ide>
<ide> if (this._getCursorPos().cols === 0) {
<del> this._refreshLine();
<add> this._refreshLine();
<ide> } else {
<del> this.output.write(c);
<add> this.output.write(c);
<ide> }
<ide>
<ide> // a hack to get the line refreshed if it's needed
<ide><path>lib/tty.js
<ide> WriteStream.prototype._refreshSize = function() {
<ide> this.rows = newRows;
<ide> this.emit('resize');
<ide> }
<del>}
<add>};
<ide>
<ide>
<ide> // backwards-compat | 2 |
Ruby | Ruby | update docs on object#try | a7e0b2f843c4a6fdfe08a45c09d2ff44bcfe994e | <ide><path>activesupport/lib/active_support/core_ext/object/try.rb
<ide> class Object
<ide> # Without a method argument try will yield to the block unless the receiver is nil.
<ide> # @person.try { |p| "#{p.first_name} #{p.last_name}" }
<ide> #--
<del> # +try+ behaves like +Object#send+, unless called on +NilClass+.
<add> # +try+ behaves like +Object#public_send+, unless called on +NilClass+.
<ide> def try(*a, &b)
<ide> if a.empty? && block_given?
<ide> yield self | 1 |
Python | Python | use enumerate instead of range(len(...)) | 15e49b4e59ca20e706dbe8339345a35bd625857b | <ide><path>numpy/lib/index_tricks.py
<ide> def __getitem__(self, key):
<ide> arraytypes = []
<ide> scalartypes = []
<ide>
<del> for k in range(len(key)):
<add> for k, item in enumerate(key):
<ide> scalar = False
<del> if isinstance(key[k], slice):
<del> step = key[k].step
<del> start = key[k].start
<del> stop = key[k].stop
<add> if isinstance(item, slice):
<add> step = item.step
<add> start = item.start
<add> stop = item.stop
<ide> if start is None:
<ide> start = 0
<ide> if step is None:
<ide> def __getitem__(self, key):
<ide> newobj = array(newobj, copy=False, ndmin=ndmin)
<ide> if trans1d != -1:
<ide> newobj = newobj.swapaxes(-1, trans1d)
<del> elif isinstance(key[k], str):
<add> elif isinstance(item, str):
<ide> if k != 0:
<ide> raise ValueError("special directives must be the "
<ide> "first entry.")
<del> key0 = key[0]
<del> if key0 in 'rc':
<add> if item in 'rc':
<ide> matrix = True
<del> col = (key0 == 'c')
<add> col = (item == 'c')
<ide> continue
<del> if ',' in key0:
<del> vec = key0.split(',')
<add> if ',' in item:
<add> vec = item.split(',')
<ide> try:
<ide> axis, ndmin = [int(x) for x in vec[:2]]
<ide> if len(vec) == 3:
<ide> def __getitem__(self, key):
<ide> except:
<ide> raise ValueError("unknown special directive")
<ide> try:
<del> axis = int(key[k])
<add> axis = int(item)
<ide> continue
<ide> except (ValueError, TypeError):
<ide> raise ValueError("unknown special directive")
<del> elif type(key[k]) in ScalarType:
<del> newobj = array(key[k], ndmin=ndmin)
<add> elif type(item) in ScalarType:
<add> newobj = array(item, ndmin=ndmin)
<ide> scalars.append(k)
<ide> scalar = True
<ide> scalartypes.append(newobj.dtype)
<ide> else:
<del> newobj = key[k]
<add> newobj = item
<ide> if ndmin > 1:
<ide> tempobj = array(newobj, copy=False, subok=True)
<ide> newobj = array(newobj, copy=False, subok=True, | 1 |
Ruby | Ruby | fix railties tests that were order dependent | 655c2c8b50aa113ffbe5a8eaf1296b52d111ee4a | <ide><path>railties/test/generators_test.rb
<ide> def test_fallbacks_for_generators_on_find_by_namespace
<ide> klass = Rails::Generators.find_by_namespace(:plugin, :remarkable)
<ide> assert klass
<ide> assert_equal "test_unit:plugin", klass.namespace
<add> ensure
<add> Rails::Generators.fallbacks.delete(:remarkable)
<ide> end
<ide>
<ide> def test_fallbacks_for_generators_on_find_by_namespace_with_context
<ide> Rails::Generators.fallbacks[:remarkable] = :test_unit
<ide> klass = Rails::Generators.find_by_namespace(:remarkable, :rails, :plugin)
<ide> assert klass
<ide> assert_equal "test_unit:plugin", klass.namespace
<add> ensure
<add> Rails::Generators.fallbacks.delete(:remarkable)
<ide> end
<ide>
<ide> def test_fallbacks_for_generators_on_invoke
<ide> Rails::Generators.fallbacks[:shoulda] = :test_unit
<ide> TestUnit::Generators::ModelGenerator.expects(:start).with(["Account"], {})
<ide> Rails::Generators.invoke "shoulda:model", ["Account"]
<add> ensure
<add> Rails::Generators.fallbacks.delete(:shoulda)
<ide> end
<ide>
<ide> def test_nested_fallbacks_for_generators
<add> Rails::Generators.fallbacks[:shoulda] = :test_unit
<ide> Rails::Generators.fallbacks[:super_shoulda] = :shoulda
<ide> TestUnit::Generators::ModelGenerator.expects(:start).with(["Account"], {})
<ide> Rails::Generators.invoke "super_shoulda:model", ["Account"]
<add> ensure
<add> Rails::Generators.fallbacks.delete(:shoulda)
<add> Rails::Generators.fallbacks.delete(:super_shoulda)
<ide> end
<ide>
<ide> def test_developer_options_are_overwritten_by_user_options | 1 |
Go | Go | use containerd client `reconnect()` api | 2c682d5209d1d1cdd232ce3909fa25b6023f9a5b | <ide><path>libcontainerd/client_daemon.go
<ide> type client struct {
<ide> containers map[string]*container
<ide> }
<ide>
<add>func (c *client) reconnect() error {
<add> c.Lock()
<add> err := c.remote.Reconnect()
<add> c.Unlock()
<add> return err
<add>}
<add>
<ide> func (c *client) setRemote(remote *containerd.Client) {
<ide> c.Lock()
<ide> c.remote = remote
<ide><path>libcontainerd/remote_daemon.go
<ide> func (r *remote) monitorConnection(monitor *containerd.Client) {
<ide> }
<ide> <-r.daemonWaitCh
<ide>
<del> monitor.Close()
<ide> os.Remove(r.GRPC.Address)
<ide> if err := r.startContainerd(); err != nil {
<ide> r.logger.WithError(err).Error("failed restarting containerd")
<ide> continue
<ide> }
<ide>
<del> newMonitor, err := containerd.New(r.GRPC.Address)
<del> if err != nil {
<add> if err := monitor.Reconnect(); err != nil {
<ide> r.logger.WithError(err).Error("failed connect to containerd")
<ide> continue
<ide> }
<ide>
<del> monitor = newMonitor
<ide> var wg sync.WaitGroup
<ide>
<ide> for _, c := range r.clients {
<ide> func (r *remote) monitorConnection(monitor *containerd.Client) {
<ide> go func(c *client) {
<ide> defer wg.Done()
<ide> c.logger.WithField("namespace", c.namespace).Debug("creating new containerd remote client")
<del> c.remote.Close()
<del>
<del> remote, err := containerd.New(r.GRPC.Address, containerd.WithDefaultNamespace(c.namespace))
<del> if err != nil {
<add> if err := c.reconnect(); err != nil {
<ide> r.logger.WithError(err).Error("failed to connect to containerd")
<ide> // TODO: Better way to handle this?
<ide> // This *shouldn't* happen, but this could wind up where the daemon
<ide> // is not able to communicate with an eventually up containerd
<del> return
<ide> }
<del>
<del> c.setRemote(remote)
<ide> }(c)
<ide>
<ide> wg.Wait()
<ide><path>libcontainerd/remote_daemon_options_linux.go
<ide> func (o oomScore) Apply(r Remote) error {
<ide> }
<ide> return fmt.Errorf("WithOOMScore option not supported for this remote")
<ide> }
<del>
<del>// WithSubreaper sets whether containerd should register itself as a
<del>// subreaper
<del>func WithSubreaper(reap bool) RemoteOption {
<del> return subreaper(reap)
<del>}
<del>
<del>type subreaper bool
<del>
<del>func (s subreaper) Apply(r Remote) error {
<del> if remote, ok := r.(*remote); ok {
<del> remote.NoSubreaper = !bool(s)
<del> return nil
<del> }
<del> return fmt.Errorf("WithSubreaper option not supported for this remote")
<del>} | 3 |
Ruby | Ruby | add `extract_plist` strategy | a210b1a04ef52354c52ac3851582cc9ead805996 | <ide><path>Library/Homebrew/bundle_version.rb
<ide> class BundleVersion
<ide> sig { params(info_plist_path: Pathname).returns(T.nilable(T.attached_class)) }
<ide> def self.from_info_plist(info_plist_path)
<ide> plist = system_command!("plutil", args: ["-convert", "xml1", "-o", "-", info_plist_path]).plist
<add> from_info_plist_content(plist)
<add> end
<ide>
<add> sig { params(plist: T::Hash[String, T.untyped]).returns(T.nilable(T.attached_class)) }
<add> def self.from_info_plist_content(plist)
<ide> short_version = plist["CFBundleShortVersionString"].presence
<ide> version = plist["CFBundleVersion"].presence
<ide>
<ide><path>Library/Homebrew/livecheck/livecheck.rb
<ide> def latest_version(formula_or_cask, json: false, full_name: false, verbose: fals
<ide> regex_provided: livecheck_regex.present?,
<ide> block_provided: livecheck.strategy_block.present?,
<ide> )
<del> strategy = Strategy.from_symbol(livecheck_strategy)
<del> strategy ||= strategies.first
<add> strategy = Strategy.from_symbol(livecheck_strategy) || strategies.first
<ide> strategy_name = livecheck_strategy_names[strategy]
<ide>
<ide> if debug
<ide> def latest_version(formula_or_cask, json: false, full_name: false, verbose: fals
<ide> puts "Regex: #{livecheck_regex.inspect}" if livecheck_regex.present?
<ide> end
<ide>
<del> if livecheck_strategy == :page_match && (livecheck_regex.blank? && livecheck.strategy_block.blank?)
<del> odebug "#{strategy_name} strategy requires a regex or block"
<del> next
<del> end
<del>
<del> if livecheck_strategy.present? && livecheck_url.blank?
<del> odebug "#{strategy_name} strategy requires a URL"
<del> next
<del> end
<del>
<del> if livecheck_strategy.present? && strategies.exclude?(strategy)
<del> odebug "#{strategy_name} strategy does not apply to this URL"
<del> next
<add> if livecheck_strategy.present?
<add> if livecheck_strategy == :page_match && (livecheck_regex.blank? && livecheck.strategy_block.blank?)
<add> odebug "#{strategy_name} strategy requires a regex or block"
<add> next
<add> elsif livecheck_url.blank?
<add> odebug "#{strategy_name} strategy requires a URL"
<add> next
<add> elsif strategies.exclude?(strategy)
<add> odebug "#{strategy_name} strategy does not apply to this URL"
<add> next
<add> end
<ide> end
<ide>
<ide> next if strategy.blank?
<ide>
<del> strategy_data = strategy.find_versions(url, livecheck_regex, &livecheck.strategy_block)
<add> strategy_data = begin
<add> strategy.find_versions(url, livecheck_regex, cask: cask, &livecheck.strategy_block)
<add> rescue ArgumentError => e
<add> raise unless e.message.include?("unknown keyword: cask")
<add>
<add> odeprecated "`def self.find_versions` in `#{strategy}` without a `cask` parameter"
<add> strategy.find_versions(url, livecheck_regex, &livecheck.strategy_block)
<add> end
<ide> match_version_map = strategy_data[:matches]
<ide> regex = strategy_data[:regex]
<ide> messages = strategy_data[:messages]
<ide> def latest_version(formula_or_cask, json: false, full_name: false, verbose: fals
<ide> end
<ide> end
<ide>
<del> if debug && match_version_map.present?
<add> next if match_version_map.blank?
<add>
<add> if debug
<ide> puts
<ide> puts "Matched Versions:"
<ide>
<ide> def latest_version(formula_or_cask, json: false, full_name: false, verbose: fals
<ide> end
<ide> end
<ide>
<del> next if match_version_map.blank?
<del>
<ide> version_info = {
<ide> latest: Version.new(match_version_map.values.max_by { |v| LivecheckVersion.create(formula_or_cask, v) }),
<ide> }
<ide><path>Library/Homebrew/livecheck/strategy.rb
<ide> def self.page_content(url)
<ide> require_relative "strategy/bitbucket"
<ide> require_relative "strategy/cpan"
<ide> require_relative "strategy/electron_builder"
<add>require_relative "strategy/extract_plist"
<ide> require_relative "strategy/git"
<ide> require_relative "strategy/github_latest"
<ide> require_relative "strategy/gnome"
<ide><path>Library/Homebrew/livecheck/strategy/apache.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Apache
<add> extend T::Sig
<add>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> URL_MATCH_REGEX = %r{
<ide> ^https?://www\.apache\.org
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> # Use `\.t` instead of specific tarball extensions (e.g. .tar.gz)
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # * `/href=["']?example-v?(\d+(?:\.\d+)+)-bin\.zip/i`
<ide> regex ||= /href=["']?#{Regexp.escape(match[:prefix])}v?(\d+(?:\.\d+)+)#{Regexp.escape(suffix)}/i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/bitbucket.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Bitbucket
<add> extend T::Sig
<add>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> URL_MATCH_REGEX = %r{
<ide> ^https?://bitbucket\.org
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> # Use `\.t` instead of specific tarball extensions (e.g. .tar.gz)
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # * `/href=.*?example-v?(\d+(?:\.\d+)+)\.t/i`
<ide> regex ||= /href=.*?#{Regexp.escape(match[:prefix])}v?(\d+(?:\.\d+)+)#{Regexp.escape(suffix)}/i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/cpan.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Cpan
<add> extend T::Sig
<add>
<ide> NICE_NAME = "CPAN"
<ide>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> # Use `\.t` instead of specific tarball extensions (e.g. .tar.gz)
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # Example regex: `/href=.*?Brew[._-]v?(\d+(?:\.\d+)*)\.t/i`
<ide> regex ||= /href=.*?#{match[:prefix]}[._-]v?(\d+(?:\.\d+)*)#{Regexp.escape(suffix)}/i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/electron_builder.rb
<ide> def self.version_from_content(content, &block)
<ide> params(
<ide> url: String,
<ide> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<ide> block: T.nilable(T.proc.params(arg0: Hash).returns(String)),
<ide> ).returns(T::Hash[Symbol, T.untyped])
<ide> }
<del> def self.find_versions(url, regex = nil, &block)
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> raise ArgumentError, "The #{T.must(name).demodulize} strategy does not support a regex." if regex
<ide>
<ide> match_data = { matches: {}, regex: regex, url: url }
<ide><path>Library/Homebrew/livecheck/strategy/extract_plist.rb
<add># typed: true
<add># frozen_string_literal: true
<add>
<add>require "bundle_version"
<add>require "unversioned_cask_checker"
<add>require_relative "page_match"
<add>
<add>module Homebrew
<add> module Livecheck
<add> module Strategy
<add> # The {ExtractPlist} strategy downloads the file at a URL and
<add> # extracts versions from contained `.plist` files.
<add> #
<add> # @api private
<add> class ExtractPlist
<add> extend T::Sig
<add>
<add> # A priority of zero causes livecheck to skip the strategy. We only
<add> # apply {ExtractPlist} using `strategy :extract_plist` in a `livecheck` block,
<add> # as we can't automatically determine when this can be successfully
<add> # applied to a URL without fetching the content.
<add> PRIORITY = 0
<add>
<add> # The `Regexp` used to determine if the strategy applies to the URL.
<add> URL_MATCH_REGEX = %r{^https?://}i.freeze
<add>
<add> # Whether the strategy can be applied to the provided URL.
<add> # The strategy will technically match any HTTP URL but is
<add> # only usable with a `livecheck` block containing a regex
<add> # or block.
<add> sig { params(url: String).returns(T::Boolean) }
<add> def self.match?(url)
<add> URL_MATCH_REGEX.match?(url)
<add> end
<add>
<add> # @api private
<add> Item = Struct.new(
<add> # @api private
<add> :bundle_version,
<add> keyword_init: true,
<add> ) do
<add> extend T::Sig
<add>
<add> extend Forwardable
<add>
<add> # @api public
<add> delegate version: :bundle_version
<add>
<add> # @api public
<add> delegate short_version: :bundle_version
<add> end
<add>
<add> # Checks the content at the URL for new versions.
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: Cask::Cask,
<add> block: T.nilable(T.proc.params(arg0: T::Hash[String, Item]).returns(String)),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask:, &block)
<add> raise ArgumentError, "The #{T.must(name).demodulize} strategy does not support a regex." if regex
<add> raise ArgumentError, "The #{T.must(name).demodulize} strategy only supports casks." unless T.unsafe(cask)
<add>
<add> match_data = { matches: {}, regex: regex, url: url }
<add>
<add> unversioned_cask_checker = UnversionedCaskChecker.new(cask)
<add> versions = unversioned_cask_checker.all_versions.transform_values { |v| Item.new(bundle_version: v) }
<add>
<add> if block
<add> match = block.call(versions)
<add>
<add> unless T.unsafe(match).is_a?(String)
<add> raise TypeError, "Return value of `strategy :extract_plist` block must be a string."
<add> end
<add>
<add> match_data[:matches][match] = Version.new(match) if match
<add> elsif versions.any?
<add> versions.each_value do |item|
<add> version = item.bundle_version.nice_version
<add> match_data[:matches][version] = Version.new(version)
<add> end
<add> end
<add>
<add> match_data
<add> end
<add> end
<add> end
<add> end
<add>end
<ide><path>Library/Homebrew/livecheck/strategy/git.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Git
<add> extend T::Sig
<add>
<ide> # The priority of the strategy on an informal scale of 1 to 10 (from
<ide> # lowest to highest).
<ide> PRIORITY = 8
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the Git repository to check
<ide> # @param regex [Regexp] the regex to use for matching versions
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: T::Array[String])
<add> .returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match_data = { matches: {}, regex: regex, url: url }
<ide>
<ide> tags_data = tag_info(url, regex)
<ide><path>Library/Homebrew/livecheck/strategy/github_latest.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class GithubLatest
<add> extend T::Sig
<add>
<ide> NICE_NAME = "GitHub - Latest"
<ide>
<ide> # A priority of zero causes livecheck to skip the strategy. We do this
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.sub(/\.git$/i, "").match(URL_MATCH_REGEX)
<ide>
<ide> # Example URL: `https://github.com/example/example/releases/latest`
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # The default regex is the same for all URLs using this strategy
<ide> regex ||= %r{href=.*?/tag/v?(\d+(?:\.\d+)+)["' >]}i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/gnome.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Gnome
<add> extend T::Sig
<add>
<ide> NICE_NAME = "GNOME"
<ide>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> page_url = "https://download.gnome.org/sources/#{match[:package_name]}/cache.json"
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # Example regex: `/example-(\d+\.([0-8]\d*?)?[02468](?:\.\d+)*?)\.t/i`
<ide> regex ||= /#{Regexp.escape(match[:package_name])}-(\d+\.([0-8]\d*?)?[02468](?:\.\d+)*?)\.t/i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/gnu.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Gnu
<add> extend T::Sig
<add>
<ide> NICE_NAME = "GNU"
<ide>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> # The directory listing page for the project's files
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # Example regex: `%r{href=.*?example[._-]v?(\d+(?:\.\d+)*)(?:\.[a-z]+|/)}i`
<ide> regex ||= %r{href=.*?#{match[:project_name]}[._-]v?(\d+(?:\.\d+)*)(?:\.[a-z]+|/)}i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/hackage.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Hackage
<add> extend T::Sig
<add>
<ide> # A `Regexp` used in determining if the strategy applies to the URL and
<ide> # also as part of extracting the package name from the URL basename.
<ide> PACKAGE_NAME_REGEX = /(?<package_name>.+?)-\d+/i.freeze
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = File.basename(url).match(FILENAME_REGEX)
<ide>
<ide> # A page containing a directory listing of the latest source tarball
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # Example regex: `%r{<h3>example-(.*?)/?</h3>}i`
<ide> regex ||= %r{<h3>#{Regexp.escape(match[:package_name])}-(.*?)/?</h3>}i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/header_match.rb
<ide> def self.match?(url)
<ide>
<ide> # Checks the final URL for new versions after following all redirections,
<ide> # using the provided regex for matching.
<del> sig { params(url: String, regex: T.nilable(Regexp)).returns(T::Hash[Symbol, T.untyped]) }
<del> def self.find_versions(url, regex, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: T::Hash[String, String])
<add> .returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match_data = { matches: {}, regex: regex, url: url }
<ide>
<ide> headers = Strategy.page_headers(url)
<ide> def self.find_versions(url, regex, &block)
<ide> merged_headers = headers.reduce(&:merge)
<ide>
<ide> if block
<del> match = block.call(merged_headers, regex)
<add> match = yield merged_headers, regex
<ide> else
<ide> match = nil
<ide>
<ide><path>Library/Homebrew/livecheck/strategy/launchpad.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Launchpad
<add> extend T::Sig
<add>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> URL_MATCH_REGEX = %r{
<ide> ^https?://(?:[^/]+?\.)*launchpad\.net
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> # The main page for the project on Launchpad
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # The default regex is the same for all URLs using this strategy
<ide> regex ||= %r{class="[^"]*version[^"]*"[^>]*>\s*Latest version is (.+)\s*</}
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/npm.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Npm
<add> extend T::Sig
<add>
<ide> NICE_NAME = "npm"
<ide>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> page_url = "https://www.npmjs.com/package/#{match[:package_name]}?activeTab=versions"
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # * `%r{href=.*?/package/@example/example/v/(\d+(?:\.\d+)+)"}i`
<ide> regex ||= %r{href=.*?/package/#{Regexp.escape(match[:package_name])}/v/(\d+(?:\.\d+)+)"}i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/page_match.rb
<ide> def self.page_matches(content, regex, &block)
<ide> params(
<ide> url: String,
<ide> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<ide> provided_content: T.nilable(String),
<ide> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<ide> ).returns(T::Hash[Symbol, T.untyped])
<ide> }
<del> def self.find_versions(url, regex, provided_content = nil, &block)
<add> def self.find_versions(url, regex, cask: nil, provided_content: nil, &block)
<ide> match_data = { matches: {}, regex: regex, url: url }
<ide>
<ide> content = if provided_content.is_a?(String)
<ide><path>Library/Homebrew/livecheck/strategy/pypi.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Pypi
<add> extend T::Sig
<add>
<ide> NICE_NAME = "PyPI"
<ide>
<ide> # The `Regexp` used to extract the package name and suffix (e.g., file
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = File.basename(url).match(FILENAME_REGEX)
<ide>
<ide> # Use `\.t` instead of specific tarball extensions (e.g. .tar.gz)
<ide> def self.find_versions(url, regex = nil, &block)
<ide> re_suffix = Regexp.escape(suffix)
<ide> regex ||= %r{href=.*?/packages.*?/#{re_package_name}[._-]v?(\d+(?:\.\d+)*(?:[._-]post\d+)?)#{re_suffix}}i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/sourceforge.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Sourceforge
<add> extend T::Sig
<add>
<ide> NICE_NAME = "SourceForge"
<ide>
<ide> # The `Regexp` used to determine if the strategy applies to the URL.
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> match = url.match(URL_MATCH_REGEX)
<ide>
<ide> page_url = "https://sourceforge.net/projects/#{match[:project_name]}/rss"
<ide> def self.find_versions(url, regex = nil, &block)
<ide> # create something that works for most URLs.
<ide> regex ||= %r{url=.*?/#{Regexp.escape(match[:project_name])}/files/.*?[-_/](\d+(?:[-.]\d+)+)[-_/%.]}i
<ide>
<del> PageMatch.find_versions(page_url, regex, &block)
<add> PageMatch.find_versions(page_url, regex, cask: cask, &block)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/sparkle.rb
<ide> def self.match?(url)
<ide> URL_MATCH_REGEX.match?(url)
<ide> end
<ide>
<del> Item = Struct.new(:title, :url, :bundle_version, :short_version, :version, keyword_init: true) do
<add> # @api private
<add> Item = Struct.new(
<add> # @api public
<add> :title,
<add> # @api public
<add> :url,
<add> # @api private
<add> :bundle_version,
<add> keyword_init: true,
<add> ) do
<ide> extend T::Sig
<ide>
<ide> extend Forwardable
<ide>
<add> # @api public
<ide> delegate version: :bundle_version
<add>
<add> # @api public
<ide> delegate short_version: :bundle_version
<ide> end
<ide>
<ide> def self.item_from_content(content)
<ide> title: title,
<ide> url: url,
<ide> bundle_version: bundle_version,
<del> short_version: bundle_version&.short_version,
<del> version: bundle_version&.version,
<ide> }.compact
<ide>
<ide> Item.new(**data) unless data.empty?
<ide> def self.item_from_content(content)
<ide> end
<ide>
<ide> # Checks the content at the URL for new versions.
<del> sig { params(url: String, regex: T.nilable(Regexp)).returns(T::Hash[Symbol, T.untyped]) }
<del> def self.find_versions(url, regex, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: Item).returns(String)),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> raise ArgumentError, "The #{T.must(name).demodulize} strategy does not support a regex." if regex
<ide>
<ide> match_data = { matches: {}, regex: regex, url: url }
<ide> def self.find_versions(url, regex, &block)
<ide>
<ide> if (item = item_from_content(content))
<ide> match = if block
<del> block.call(item)&.to_s
<add> value = block.call(item)
<add>
<add> unless T.unsafe(value).is_a?(String)
<add> raise TypeError, "Return value of `strategy :sparkle` block must be a string."
<add> end
<add>
<add> value
<ide> else
<ide> item.bundle_version&.nice_version
<ide> end
<ide><path>Library/Homebrew/livecheck/strategy/xorg.rb
<ide> module Strategy
<ide> #
<ide> # @api public
<ide> class Xorg
<add> extend T::Sig
<add>
<ide> NICE_NAME = "X.Org"
<ide>
<ide> # A `Regexp` used in determining if the strategy applies to the URL and
<ide> def self.match?(url)
<ide> # @param url [String] the URL of the content to check
<ide> # @param regex [Regexp] a regex used for matching versions in content
<ide> # @return [Hash]
<del> def self.find_versions(url, regex = nil, &block)
<add> sig {
<add> params(
<add> url: String,
<add> regex: T.nilable(Regexp),
<add> cask: T.nilable(Cask::Cask),
<add> block: T.nilable(T.proc.params(arg0: String).returns(T.any(T::Array[String], String))),
<add> ).returns(T::Hash[Symbol, T.untyped])
<add> }
<add> def self.find_versions(url, regex, cask: nil, &block)
<ide> file_name = File.basename(url)
<ide> match = file_name.match(FILENAME_REGEX)
<ide>
<ide> def self.find_versions(url, regex = nil, &block)
<ide>
<ide> # Use the cached page content to avoid duplicate fetches
<ide> cached_content = @page_data[page_url]
<del> match_data = PageMatch.find_versions(page_url, regex, cached_content, &block)
<add> match_data = PageMatch.find_versions(page_url, regex, provided_content: cached_content, cask: cask, &block)
<ide>
<ide> # Cache any new page content
<ide> @page_data[page_url] = match_data[:content] if match_data[:content].present?
<ide><path>Library/Homebrew/test/livecheck/strategy/page_match_spec.rb
<ide>
<ide> describe "::find_versions?" do
<ide> it "finds versions in provided_content" do
<del> expect(page_match.find_versions(url, regex, page_content)).to eq(find_versions_cached_return_hash)
<add> expect(page_match.find_versions(url, regex, provided_content: page_content))
<add> .to eq(find_versions_cached_return_hash)
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/test/livecheck/strategy/sparkle_spec.rb
<ide>
<ide> let(:appcast_data) {
<ide> {
<del> title: "Version 1.2.3",
<del> url: "https://www.example.com/example/example.tar.gz",
<del> bundle_version: Homebrew::BundleVersion.new("1.2.3", "1234"),
<del> short_version: "1.2.3",
<del> version: "1234",
<add> title: "Version 1.2.3",
<add> url: "https://www.example.com/example/example.tar.gz",
<add> short_version: "1.2.3",
<add> version: "1234",
<ide> }
<ide> }
<ide>
<del> let(:appcast_item) {
<del> Homebrew::Livecheck::Strategy::Sparkle::Item.new(
<del> {
<del> title: appcast_data[:title],
<del> url: appcast_data[:url],
<del> bundle_version: appcast_data[:bundle_version],
<del> short_version: appcast_data[:bundle_version]&.short_version,
<del> version: appcast_data[:bundle_version]&.version,
<del> },
<del> )
<del> }
<del>
<ide> let(:appcast_xml) {
<ide> <<~EOS
<ide> <?xml version="1.0" encoding="utf-8"?>
<ide>
<ide> it "returns an Item when given XML data" do
<ide> expect(item_from_appcast_xml).to be_a(Homebrew::Livecheck::Strategy::Sparkle::Item)
<del> expect(item_from_appcast_xml.title).to eq(appcast_item.title)
<del> expect(item_from_appcast_xml.url).to eq(appcast_item.url)
<del> expect(item_from_appcast_xml.bundle_version.short_version).to eq(appcast_item.bundle_version.short_version)
<del> expect(item_from_appcast_xml.bundle_version.version).to eq(appcast_item.bundle_version.version)
<add> expect(item_from_appcast_xml.title).to eq(appcast_data[:title])
<add> expect(item_from_appcast_xml.url).to eq(appcast_data[:url])
<add> expect(item_from_appcast_xml.short_version).to eq(appcast_data[:short_version])
<add> expect(item_from_appcast_xml.version).to eq(appcast_data[:version])
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/unversioned_cask_checker.rb
<ide> def top_level_info_plists(paths)
<ide> end
<ide> end
<ide>
<add> sig { returns(T::Hash[String, BundleVersion]) }
<add> def all_versions
<add> versions = {}
<add>
<add> parse_info_plist = proc do |info_plist_path|
<add> plist = system_command!("plutil", args: ["-convert", "xml1", "-o", "-", info_plist_path]).plist
<add>
<add> id = plist["CFBundleIdentifier"]
<add> version = BundleVersion.from_info_plist_content(plist)
<add>
<add> versions[id] = version if id && version
<add> end
<add>
<add> Dir.mktmpdir do |dir|
<add> dir = Pathname(dir)
<add>
<add> installer.extract_primary_container(to: dir)
<add>
<add> info_plist_paths = apps.flat_map do |app|
<add> top_level_info_plists(Pathname.glob(dir/"**"/app.source.basename/"Contents"/"Info.plist")).sort
<add> end
<add>
<add> info_plist_paths.each(&parse_info_plist)
<add>
<add> pkg_paths = pkgs.flat_map do |pkg|
<add> Pathname.glob(dir/"**"/pkg.path.basename).sort
<add> end
<add>
<add> pkg_paths.each do |pkg_path|
<add> Dir.mktmpdir do |extract_dir|
<add> extract_dir = Pathname(extract_dir)
<add> FileUtils.rmdir extract_dir
<add>
<add> system_command! "pkgutil", args: ["--expand-full", pkg_path, extract_dir]
<add>
<add> top_level_info_plist_paths = top_level_info_plists(Pathname.glob(extract_dir/"**/Contents/Info.plist"))
<add>
<add> top_level_info_plist_paths.each(&parse_info_plist)
<add> ensure
<add> Cask::Utils.gain_permissions_remove(extract_dir)
<add> extract_dir.mkpath
<add> end
<add> end
<add>
<add> nil
<add> end
<add>
<add> versions
<add> end
<add>
<ide> sig { returns(T.nilable(String)) }
<ide> def guess_cask_version
<ide> if apps.empty? && pkgs.empty? | 24 |
Mixed | Go | implement build cache based on history array | 690882c2e79c3f3742c709cf158584e61594ba00 | <ide><path>api/server/router/build/build_routes.go
<ide> func newImageBuildOptions(ctx context.Context, r *http.Request) (*types.ImageBui
<ide> var buildUlimits = []*units.Ulimit{}
<ide> ulimitsJSON := r.FormValue("ulimits")
<ide> if ulimitsJSON != "" {
<del> if err := json.NewDecoder(strings.NewReader(ulimitsJSON)).Decode(&buildUlimits); err != nil {
<add> if err := json.Unmarshal([]byte(ulimitsJSON), &buildUlimits); err != nil {
<ide> return nil, err
<ide> }
<ide> options.Ulimits = buildUlimits
<ide> func newImageBuildOptions(ctx context.Context, r *http.Request) (*types.ImageBui
<ide> var buildArgs = map[string]string{}
<ide> buildArgsJSON := r.FormValue("buildargs")
<ide> if buildArgsJSON != "" {
<del> if err := json.NewDecoder(strings.NewReader(buildArgsJSON)).Decode(&buildArgs); err != nil {
<add> if err := json.Unmarshal([]byte(buildArgsJSON), &buildArgs); err != nil {
<ide> return nil, err
<ide> }
<ide> options.BuildArgs = buildArgs
<ide> }
<ide> var labels = map[string]string{}
<ide> labelsJSON := r.FormValue("labels")
<ide> if labelsJSON != "" {
<del> if err := json.NewDecoder(strings.NewReader(labelsJSON)).Decode(&labels); err != nil {
<add> if err := json.Unmarshal([]byte(labelsJSON), &labels); err != nil {
<ide> return nil, err
<ide> }
<ide> options.Labels = labels
<ide> }
<ide>
<add> var cacheFrom = []string{}
<add> cacheFromJSON := r.FormValue("cachefrom")
<add> if cacheFromJSON != "" {
<add> if err := json.Unmarshal([]byte(cacheFromJSON), &cacheFrom); err != nil {
<add> return nil, err
<add> }
<add> options.CacheFrom = cacheFrom
<add> }
<add>
<ide> return options, nil
<ide> }
<ide>
<ide><path>api/types/client.go
<ide> type ImageBuildOptions struct {
<ide> // preserves the original image and creates a new one from the parent with all
<ide> // the changes applied to a single layer
<ide> Squash bool
<add> // CacheFrom specifies images that are used for matching cache. Images
<add> // specified here do not need to have a valid parent chain to match cache.
<add> CacheFrom []string
<ide> }
<ide>
<ide> // ImageBuildResponse holds information
<ide><path>builder/builder.go
<ide> type Image interface {
<ide> RunConfig() *container.Config
<ide> }
<ide>
<del>// ImageCache abstracts an image cache store.
<add>// ImageCacheBuilder represents a generator for stateful image cache.
<add>type ImageCacheBuilder interface {
<add> // MakeImageCache creates a stateful image cache.
<add> MakeImageCache(cacheFrom []string) ImageCache
<add>}
<add>
<add>// ImageCache abstracts an image cache.
<ide> // (parent image, child runconfig) -> child image
<ide> type ImageCache interface {
<ide> // GetCachedImageOnBuild returns a reference to a cached image whose parent equals `parent`
<ide> // and runconfig equals `cfg`. A cache miss is expected to return an empty ID and a nil error.
<del> GetCachedImageOnBuild(parentID string, cfg *container.Config) (imageID string, err error)
<add> GetCache(parentID string, cfg *container.Config) (imageID string, err error)
<ide> }
<ide><path>builder/dockerfile/builder.go
<ide> type Builder struct {
<ide>
<ide> // TODO: remove once docker.Commit can receive a tag
<ide> id string
<add>
<add> imageCache builder.ImageCache
<ide> }
<ide>
<ide> // BuildManager implements builder.Backend and is shared across all Builder objects.
<ide> func NewBuilder(clientCtx context.Context, config *types.ImageBuildOptions, back
<ide> LookingForDirectives: true,
<ide> },
<ide> }
<add> if icb, ok := backend.(builder.ImageCacheBuilder); ok {
<add> b.imageCache = icb.MakeImageCache(config.CacheFrom)
<add> }
<add>
<ide> parser.SetEscapeToken(parser.DefaultEscapeToken, &b.directive) // Assume the default token for escape
<ide>
<ide> if dockerfile != nil {
<ide><path>builder/dockerfile/internals.go
<ide> func (b *Builder) processImageFrom(img builder.Image) error {
<ide> return nil
<ide> }
<ide>
<del>// probeCache checks if `b.docker` implements builder.ImageCache and image-caching
<del>// is enabled (`b.UseCache`).
<del>// If so attempts to look up the current `b.image` and `b.runConfig` pair with `b.docker`.
<add>// probeCache checks if cache match can be found for current build instruction.
<ide> // If an image is found, probeCache returns `(true, nil)`.
<ide> // If no image is found, it returns `(false, nil)`.
<ide> // If there is any error, it returns `(false, err)`.
<ide> func (b *Builder) probeCache() (bool, error) {
<del> c, ok := b.docker.(builder.ImageCache)
<del> if !ok || b.options.NoCache || b.cacheBusted {
<add> c := b.imageCache
<add> if c == nil || b.options.NoCache || b.cacheBusted {
<ide> return false, nil
<ide> }
<del> cache, err := c.GetCachedImageOnBuild(b.image, b.runConfig)
<add> cache, err := c.GetCache(b.image, b.runConfig)
<ide> if err != nil {
<ide> return false, err
<ide> }
<ide><path>cli/command/image/build.go
<ide> type buildOptions struct {
<ide> rm bool
<ide> forceRm bool
<ide> pull bool
<add> cacheFrom []string
<ide> }
<ide>
<ide> // NewBuildCommand creates a new `docker build` command
<ide> func NewBuildCommand(dockerCli *command.DockerCli) *cobra.Command {
<ide> flags.BoolVar(&options.forceRm, "force-rm", false, "Always remove intermediate containers")
<ide> flags.BoolVarP(&options.quiet, "quiet", "q", false, "Suppress the build output and print image ID on success")
<ide> flags.BoolVar(&options.pull, "pull", false, "Always attempt to pull a newer version of the image")
<add> flags.StringSliceVar(&options.cacheFrom, "cache-from", []string{}, "Images to consider as cache sources")
<ide>
<ide> command.AddTrustedFlags(flags, true)
<ide>
<ide> func runBuild(dockerCli *command.DockerCli, options buildOptions) error {
<ide> BuildArgs: runconfigopts.ConvertKVStringsToMap(options.buildArgs.GetAll()),
<ide> AuthConfigs: authConfig,
<ide> Labels: runconfigopts.ConvertKVStringsToMap(options.labels),
<add> CacheFrom: options.cacheFrom,
<ide> }
<ide>
<ide> response, err := dockerCli.Client().ImageBuild(ctx, body, buildOptions)
<ide><path>client/image_build.go
<ide> func imageBuildOptionsToQuery(options types.ImageBuildOptions) (url.Values, erro
<ide> return query, err
<ide> }
<ide> query.Set("labels", string(labelsJSON))
<add>
<add> cacheFromJSON, err := json.Marshal(options.CacheFrom)
<add> if err != nil {
<add> return query, err
<add> }
<add> query.Set("cachefrom", string(cacheFromJSON))
<add>
<ide> return query, nil
<ide> }
<ide>
<ide><path>daemon/cache.go
<add>package daemon
<add>
<add>import (
<add> "encoding/json"
<add> "fmt"
<add> "reflect"
<add> "strings"
<add>
<add> "github.com/Sirupsen/logrus"
<add> containertypes "github.com/docker/docker/api/types/container"
<add> "github.com/docker/docker/builder"
<add> "github.com/docker/docker/dockerversion"
<add> "github.com/docker/docker/image"
<add> "github.com/docker/docker/layer"
<add> "github.com/docker/docker/runconfig"
<add> "github.com/pkg/errors"
<add>)
<add>
<add>// getLocalCachedImage returns the most recent created image that is a child
<add>// of the image with imgID, that had the same config when it was
<add>// created. nil is returned if a child cannot be found. An error is
<add>// returned if the parent image cannot be found.
<add>func (daemon *Daemon) getLocalCachedImage(imgID image.ID, config *containertypes.Config) (*image.Image, error) {
<add> // Loop on the children of the given image and check the config
<add> getMatch := func(siblings []image.ID) (*image.Image, error) {
<add> var match *image.Image
<add> for _, id := range siblings {
<add> img, err := daemon.imageStore.Get(id)
<add> if err != nil {
<add> return nil, fmt.Errorf("unable to find image %q", id)
<add> }
<add>
<add> if runconfig.Compare(&img.ContainerConfig, config) {
<add> // check for the most up to date match
<add> if match == nil || match.Created.Before(img.Created) {
<add> match = img
<add> }
<add> }
<add> }
<add> return match, nil
<add> }
<add>
<add> // In this case, this is `FROM scratch`, which isn't an actual image.
<add> if imgID == "" {
<add> images := daemon.imageStore.Map()
<add> var siblings []image.ID
<add> for id, img := range images {
<add> if img.Parent == imgID {
<add> siblings = append(siblings, id)
<add> }
<add> }
<add> return getMatch(siblings)
<add> }
<add>
<add> // find match from child images
<add> siblings := daemon.imageStore.Children(imgID)
<add> return getMatch(siblings)
<add>}
<add>
<add>// MakeImageCache creates a stateful image cache.
<add>func (daemon *Daemon) MakeImageCache(sourceRefs []string) builder.ImageCache {
<add> if len(sourceRefs) == 0 {
<add> return &localImageCache{daemon}
<add> }
<add>
<add> cache := &imageCache{daemon: daemon, localImageCache: &localImageCache{daemon}}
<add>
<add> for _, ref := range sourceRefs {
<add> img, err := daemon.GetImage(ref)
<add> if err != nil {
<add> logrus.Warnf("Could not look up %s for cache resolution, skipping: %+v", ref, err)
<add> continue
<add> }
<add> cache.sources = append(cache.sources, img)
<add> }
<add>
<add> return cache
<add>}
<add>
<add>// localImageCache is cache based on parent chain.
<add>type localImageCache struct {
<add> daemon *Daemon
<add>}
<add>
<add>func (lic *localImageCache) GetCache(imgID string, config *containertypes.Config) (string, error) {
<add> return getImageIDAndError(lic.daemon.getLocalCachedImage(image.ID(imgID), config))
<add>}
<add>
<add>// imageCache is cache based on history objects. Requires initial set of images.
<add>type imageCache struct {
<add> sources []*image.Image
<add> daemon *Daemon
<add> localImageCache *localImageCache
<add>}
<add>
<add>func (ic *imageCache) restoreCachedImage(parent, target *image.Image, cfg *containertypes.Config) (image.ID, error) {
<add> var history []image.History
<add> rootFS := image.NewRootFS()
<add> lenHistory := 0
<add> if parent != nil {
<add> history = parent.History
<add> rootFS = parent.RootFS
<add> lenHistory = len(parent.History)
<add> }
<add> history = append(history, target.History[lenHistory])
<add> if layer := getLayerForHistoryIndex(target, lenHistory); layer != "" {
<add> rootFS.Append(layer)
<add> }
<add>
<add> config, err := json.Marshal(&image.Image{
<add> V1Image: image.V1Image{
<add> DockerVersion: dockerversion.Version,
<add> Config: cfg,
<add> Architecture: target.Architecture,
<add> OS: target.OS,
<add> Author: target.Author,
<add> Created: history[len(history)-1].Created,
<add> },
<add> RootFS: rootFS,
<add> History: history,
<add> OSFeatures: target.OSFeatures,
<add> OSVersion: target.OSVersion,
<add> })
<add> if err != nil {
<add> return "", errors.Wrap(err, "failed to marshal image config")
<add> }
<add>
<add> imgID, err := ic.daemon.imageStore.Create(config)
<add> if err != nil {
<add> return "", errors.Wrap(err, "failed to create cache image")
<add> }
<add>
<add> if parent != nil {
<add> if err := ic.daemon.imageStore.SetParent(imgID, parent.ID()); err != nil {
<add> return "", errors.Wrapf(err, "failed to set parent for %v to %v", target.ID(), parent.ID())
<add> }
<add> }
<add> return imgID, nil
<add>}
<add>
<add>func (ic *imageCache) isParent(imgID, parentID image.ID) bool {
<add> nextParent, err := ic.daemon.imageStore.GetParent(imgID)
<add> if err != nil {
<add> return false
<add> }
<add> if nextParent == parentID {
<add> return true
<add> }
<add> return ic.isParent(nextParent, parentID)
<add>}
<add>
<add>func (ic *imageCache) GetCache(parentID string, cfg *containertypes.Config) (string, error) {
<add> imgID, err := ic.localImageCache.GetCache(parentID, cfg)
<add> if err != nil {
<add> return "", err
<add> }
<add> if imgID != "" {
<add> for _, s := range ic.sources {
<add> if ic.isParent(s.ID(), image.ID(imgID)) {
<add> return imgID, nil
<add> }
<add> }
<add> }
<add>
<add> var parent *image.Image
<add> lenHistory := 0
<add> if parentID != "" {
<add> parent, err = ic.daemon.imageStore.Get(image.ID(parentID))
<add> if err != nil {
<add> return "", errors.Wrapf(err, "unable to find image %v", parentID)
<add> }
<add> lenHistory = len(parent.History)
<add> }
<add>
<add> for _, target := range ic.sources {
<add> if !isValidParent(target, parent) || !isValidConfig(cfg, target.History[lenHistory]) {
<add> continue
<add> }
<add>
<add> if len(target.History)-1 == lenHistory { // last
<add> if parent != nil {
<add> if err := ic.daemon.imageStore.SetParent(target.ID(), parent.ID()); err != nil {
<add> return "", errors.Wrapf(err, "failed to set parent for %v to %v", target.ID(), parent.ID())
<add> }
<add> }
<add> return target.ID().String(), nil
<add> }
<add>
<add> imgID, err := ic.restoreCachedImage(parent, target, cfg)
<add> if err != nil {
<add> return "", errors.Wrapf(err, "failed to restore cached image from %q to %v", parentID, target.ID())
<add> }
<add>
<add> ic.sources = []*image.Image{target} // avoid jumping to different target, tuned for safety atm
<add> return imgID.String(), nil
<add> }
<add>
<add> return "", nil
<add>}
<add>
<add>func getImageIDAndError(img *image.Image, err error) (string, error) {
<add> if img == nil || err != nil {
<add> return "", err
<add> }
<add> return img.ID().String(), nil
<add>}
<add>
<add>func isValidParent(img, parent *image.Image) bool {
<add> if len(img.History) == 0 {
<add> return false
<add> }
<add> if parent == nil || len(parent.History) == 0 && len(parent.RootFS.DiffIDs) == 0 {
<add> return true
<add> }
<add> if len(parent.History) >= len(img.History) {
<add> return false
<add> }
<add> if len(parent.RootFS.DiffIDs) >= len(img.RootFS.DiffIDs) {
<add> return false
<add> }
<add>
<add> for i, h := range parent.History {
<add> if !reflect.DeepEqual(h, img.History[i]) {
<add> return false
<add> }
<add> }
<add> for i, d := range parent.RootFS.DiffIDs {
<add> if d != img.RootFS.DiffIDs[i] {
<add> return false
<add> }
<add> }
<add> return true
<add>}
<add>
<add>func getLayerForHistoryIndex(image *image.Image, index int) layer.DiffID {
<add> layerIndex := 0
<add> for i, h := range image.History {
<add> if i == index {
<add> if h.EmptyLayer {
<add> return ""
<add> }
<add> break
<add> }
<add> if !h.EmptyLayer {
<add> layerIndex++
<add> }
<add> }
<add> return image.RootFS.DiffIDs[layerIndex] // validate?
<add>}
<add>
<add>func isValidConfig(cfg *containertypes.Config, h image.History) bool {
<add> // todo: make this format better than join that loses data
<add> return strings.Join(cfg.Cmd, " ") == h.CreatedBy
<add>}
<ide><path>daemon/image.go
<ide> package daemon
<ide> import (
<ide> "fmt"
<ide>
<del> containertypes "github.com/docker/docker/api/types/container"
<ide> "github.com/docker/docker/builder"
<ide> "github.com/docker/docker/image"
<ide> "github.com/docker/docker/reference"
<del> "github.com/docker/docker/runconfig"
<ide> )
<ide>
<ide> // ErrImageDoesNotExist is error returned when no image can be found for a reference.
<ide> func (daemon *Daemon) GetImageOnBuild(name string) (builder.Image, error) {
<ide> }
<ide> return img, nil
<ide> }
<del>
<del>// GetCachedImage returns the most recent created image that is a child
<del>// of the image with imgID, that had the same config when it was
<del>// created. nil is returned if a child cannot be found. An error is
<del>// returned if the parent image cannot be found.
<del>func (daemon *Daemon) GetCachedImage(imgID image.ID, config *containertypes.Config) (*image.Image, error) {
<del> // Loop on the children of the given image and check the config
<del> getMatch := func(siblings []image.ID) (*image.Image, error) {
<del> var match *image.Image
<del> for _, id := range siblings {
<del> img, err := daemon.imageStore.Get(id)
<del> if err != nil {
<del> return nil, fmt.Errorf("unable to find image %q", id)
<del> }
<del>
<del> if runconfig.Compare(&img.ContainerConfig, config) {
<del> // check for the most up to date match
<del> if match == nil || match.Created.Before(img.Created) {
<del> match = img
<del> }
<del> }
<del> }
<del> return match, nil
<del> }
<del>
<del> // In this case, this is `FROM scratch`, which isn't an actual image.
<del> if imgID == "" {
<del> images := daemon.imageStore.Map()
<del> var siblings []image.ID
<del> for id, img := range images {
<del> if img.Parent == imgID {
<del> siblings = append(siblings, id)
<del> }
<del> }
<del> return getMatch(siblings)
<del> }
<del>
<del> // find match from child images
<del> siblings := daemon.imageStore.Children(imgID)
<del> return getMatch(siblings)
<del>}
<del>
<del>// GetCachedImageOnBuild returns a reference to a cached image whose parent equals `parent`
<del>// and runconfig equals `cfg`. A cache miss is expected to return an empty ID and a nil error.
<del>func (daemon *Daemon) GetCachedImageOnBuild(imgID string, cfg *containertypes.Config) (string, error) {
<del> cache, err := daemon.GetCachedImage(image.ID(imgID), cfg)
<del> if cache == nil || err != nil {
<del> return "", err
<del> }
<del> return cache.ID().String(), nil
<del>}
<ide><path>docs/reference/api/docker_remote_api.md
<ide> This section lists each version from latest to oldest. Each listing includes a
<ide> * `POST /containers/create` now validates IPAMConfig in NetworkingConfig, and returns error for invalid IPv4 and IPv6 addresses (`--ip` and `--ip6` in `docker create/run`).
<ide> * `POST /containers/create` now takes a `Mounts` field in `HostConfig` which replaces `Binds` and `Volumes`. *note*: `Binds` and `Volumes` are still available but are exclusive with `Mounts`
<ide> * `POST /build` now performs a preliminary validation of the `Dockerfile` before starting the build, and returns an error if the syntax is incorrect. Note that this change is _unversioned_ and applied to all API versions.
<add>* `POST /build` accepts `cachefrom` parameter to specify images used for build cache.
<ide>
<ide> ### v1.24 API changes
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.25.md
<ide> or being killed.
<ide> there must be a file with the corresponding path inside the tarball.
<ide> - **q** – Suppress verbose build output.
<ide> - **nocache** – Do not use the cache when building the image.
<add>- **cachefrom** - JSON array of images used for build cache resolution.
<ide> - **pull** - Attempt to pull the image even if an older image exists locally.
<ide> - **rm** - Remove intermediate containers after a successful build (default behavior).
<ide> - **forcerm** - Always remove intermediate containers (includes `rm`).
<ide><path>docs/reference/builder.md
<ide> the `Using cache` message in the console output.
<ide> ---> 7ea8aef582cc
<ide> Successfully built 7ea8aef582cc
<ide>
<add>Build cache is only used from images that have a local parent chain. This means
<add>that these images were created by previous builds or the whole chain of images
<add>was loaded with `docker load`. If you wish to use build cache of a specific
<add>image you can specify it with `--cache-from` option. Images specified with
<add>`--cache-from` do not need to have a parent chain and may be pulled from other
<add>registries.
<add>
<ide> When you're done with your build, you're ready to look into [*Pushing a
<ide> repository to its registry*](../tutorials/dockerrepos.md#contributing-to-docker-hub).
<ide>
<ide><path>docs/reference/commandline/build.md
<ide> Build an image from a Dockerfile
<ide>
<ide> Options:
<ide> --build-arg value Set build-time variables (default [])
<add> --cache-from value Images to consider as cache sources (default [])
<ide> --cgroup-parent string Optional parent cgroup for the container
<ide> --cpu-period int Limit the CPU CFS (Completely Fair Scheduler) period
<ide> --cpu-quota int Limit the CPU CFS (Completely Fair Scheduler) quota | 13 |
Ruby | Ruby | add more tests for formulaaudit/miscellaneous cop | 626cb6ca91f99df6f7c646d95cb5e82d0dd6532d | <ide><path>Library/Homebrew/test/rubocops/lines_cop_spec.rb
<ide> class Foo < Formula
<ide> end
<ide> end
<ide>
<add> it "with build.universal? exempted formula" do
<add> source = <<-EOS.undent
<add> class Wine < Formula
<add> desc "foo"
<add> url 'http://example.com/foo-1.0.tgz'
<add> if build.universal?
<add> "foo"
<add> end
<add> end
<add> EOS
<add>
<add> inspect_source(cop, source, "/homebrew-core/Formula/wine.rb")
<add> expect(cop.offenses).to eq([])
<add> end
<add>
<ide> it "with ENV.universal_binary" do
<ide> source = <<-EOS.undent
<ide> class Foo < Formula
<ide> class Foo < Formula
<ide> end
<ide> end
<ide>
<add> it "with ruby-macho alternatives audit exempted formula" do
<add> source = <<-EOS.undent
<add> class Cctools < Formula
<add> desc "foo"
<add> url 'http://example.com/foo-1.0.tgz'
<add> system "install_name_tool", "-id"
<add> end
<add> EOS
<add>
<add> inspect_source(cop, source, "/homebrew-core/Formula/cctools.rb")
<add> expect(cop.offenses).to eq([])
<add> end
<add>
<ide> it "with npm install without language::Node args" do
<ide> source = <<-EOS.undent
<ide> class Foo < Formula
<ide> class Foo < Formula
<ide> expect_offense(expected, actual)
<ide> end
<ide> end
<add>
<add> it "with npm install without language::Node args in kibana" do
<add> source = <<-EOS.undent
<add> class KibanaAT44 < Formula
<add> desc "foo"
<add> url 'http://example.com/foo-1.0.tgz'
<add> system "npm", "install"
<add> end
<add> EOS
<add>
<add> inspect_source(cop, source, "/homebrew-core/Formula/[email protected]")
<add> expect(cop.offenses).to eq([])
<add> end
<ide> end
<ide> end | 1 |
PHP | PHP | refactor some code into overridable methods | 5326073c80367c243e523befc39271a9e03172cb | <ide><path>lib/Cake/Network/Email/SmtpTransport.php
<ide> protected function _auth() {
<ide> }
<ide>
<ide> /**
<del> * Send emails
<add> * Prepares the `MAIL FROM` SMTP command.
<ide> *
<del> * @return void
<del> * @throws SocketException
<add> * @param string $email The email address to send with the command.
<add> * @return string
<ide> */
<del> protected function _sendRcpt() {
<add> protected function _prepareFromCmd($email) {
<add> return 'MAIL FROM:<' . $email . '>';
<add> }
<add>
<add>/**
<add> * Prepares the `RCPT TO` SMTP command.
<add> *
<add> * @param string $email The email address to send with the command.
<add> * @return string
<add> */
<add> protected function _prepareRcptCmd($email) {
<add> return 'RCPT TO:<' . $email . '>';
<add> }
<add>
<add>/**
<add> * Prepares the `from` email address.
<add> *
<add> * @return array
<add> */
<add> protected function _prepareFromAddress() {
<ide> $from = $this->_cakeEmail->returnPath();
<ide> if (empty($from)) {
<ide> $from = $this->_cakeEmail->from();
<ide> }
<del> $this->_smtpSend('MAIL FROM:<' . key($from) . '>');
<add> return $from;
<add> }
<ide>
<add>/**
<add> * Prepares the recipient email addresses.
<add> *
<add> * @return array
<add> */
<add> protected function _prepareRecipientAddresses() {
<ide> $to = $this->_cakeEmail->to();
<ide> $cc = $this->_cakeEmail->cc();
<ide> $bcc = $this->_cakeEmail->bcc();
<del> $emails = array_merge(array_keys($to), array_keys($cc), array_keys($bcc));
<del> foreach ($emails as $email) {
<del> $this->_smtpSend('RCPT TO:<' . $email . '>');
<del> }
<add> return array_merge(array_keys($to), array_keys($cc), array_keys($bcc));
<ide> }
<ide>
<ide> /**
<del> * Send Data
<add> * Prepares the message headers.
<ide> *
<del> * @return void
<del> * @throws SocketException
<add> * @return array
<ide> */
<del> protected function _sendData() {
<del> $this->_smtpSend('DATA', '354');
<add> protected function _prepareMessageHeaders() {
<add> return $this->_cakeEmail->getHeaders(array('from', 'sender', 'replyTo', 'readReceipt', 'to', 'cc', 'subject'));
<add> }
<ide>
<del> $headers = $this->_cakeEmail->getHeaders(array('from', 'sender', 'replyTo', 'readReceipt', 'to', 'cc', 'subject'));
<del> $headers = $this->_headersToString($headers);
<add>/**
<add> * Prepares the message body.
<add> *
<add> * @return string
<add> */
<add> protected function _prepareMessage() {
<ide> $lines = $this->_cakeEmail->message();
<ide> $messages = array();
<ide> foreach ($lines as $line) {
<ide> protected function _sendData() {
<ide> $messages[] = $line;
<ide> }
<ide> }
<del> $message = implode("\r\n", $messages);
<add> return implode("\r\n", $messages);
<add> }
<add>
<add>/**
<add> * Send emails
<add> *
<add> * @return void
<add> * @throws SocketException
<add> */
<add> protected function _sendRcpt() {
<add> $from = $this->_prepareFromAddress();
<add> $this->_smtpSend($this->_prepareFromCmd(key($from)));
<add>
<add> $emails = $this->_prepareRecipientAddresses();
<add> foreach ($emails as $email) {
<add> $this->_smtpSend($this->_prepareRcptCmd($email));
<add> }
<add> }
<add>
<add>/**
<add> * Send Data
<add> *
<add> * @return void
<add> * @throws SocketException
<add> */
<add> protected function _sendData() {
<add> $this->_smtpSend('DATA', '354');
<add>
<add> $headers = $this->_headersToString($this->_prepareMessageHeaders());
<add> $message = $this->_prepareMessage();
<add>
<ide> $this->_smtpSend($headers . "\r\n\r\n" . $message . "\r\n\r\n\r\n.");
<ide> $this->_content = array('headers' => $headers, 'message' => $message);
<ide> } | 1 |
Java | Java | use beanutils to instantiate target type | c0269770af0f2f51cc7758aa096d8369907d439f | <ide><path>spring-beans/src/main/java/org/springframework/beans/BeanWrapperImpl.java
<ide> else if (Map.class.isAssignableFrom(type)) {
<ide> return CollectionFactory.createMap(type, (keyDesc != null ? keyDesc.getType() : null), 16);
<ide> }
<ide> else {
<del> return type.newInstance();
<add> return BeanUtils.instantiate(type);
<ide> }
<ide> }
<ide> catch (Exception ex) { | 1 |
Ruby | Ruby | fix return types for .can_load? | 4ffca8faa2fb055ec5a8329496fe0bfe532f223a | <ide><path>Library/Homebrew/cask/lib/hbc/cask_loader.rb
<ide> def cask(header_token, &block)
<ide>
<ide> class FromURILoader < FromPathLoader
<ide> def self.can_load?(ref)
<del> ref.to_s =~ ::URI.regexp
<add> ref.to_s.match?(::URI.regexp)
<ide> end
<ide>
<ide> def initialize(url)
<ide> def load
<ide>
<ide> class FromTapLoader < FromPathLoader
<ide> def self.can_load?(ref)
<del> ref.to_s =~ HOMEBREW_TAP_CASK_REGEX
<add> ref.to_s.match?(HOMEBREW_TAP_CASK_REGEX)
<ide> end
<ide>
<ide> def initialize(tapped_name) | 1 |
PHP | PHP | fix doc-blocks | 3913c10a54a967d1a6b4e0fc3b162da4054121fb | <ide><path>src/Illuminate/Translation/Translator.php
<ide> public function has($key, $locale = null, $fallback = true)
<ide> * @param string $key
<ide> * @param array $replace
<ide> * @param string $locale
<del> * @return string|array|null
<add> * @return string|array
<ide> */
<ide> public function trans($key, array $replace = [], $locale = null)
<ide> {
<ide> public function trans($key, array $replace = [], $locale = null)
<ide> * @param array $replace
<ide> * @param string|null $locale
<ide> * @param bool $fallback
<del> * @return string|array|null
<add> * @return string|array
<ide> */
<ide> public function get($key, array $replace = [], $locale = null, $fallback = true)
<ide> {
<ide> public function get($key, array $replace = [], $locale = null, $fallback = true)
<ide> * @param string $key
<ide> * @param array $replace
<ide> * @param string $locale
<del> * @return string|array|null
<add> * @return string|array
<ide> */
<ide> public function getFromJson($key, array $replace = [], $locale = null)
<ide> { | 1 |
PHP | PHP | add getrootname() to command | df32d8556589301e30290392fa5bff969dab09a1 | <ide><path>src/Console/Command.php
<ide> public function getName(): string
<ide> return $this->name;
<ide> }
<ide>
<add> /**
<add> * Get the root command name.
<add> *
<add> * @return string
<add> */
<add> public function getRootName(): string
<add> {
<add> [$root] = explode(' ', $this->name);
<add>
<add> return $root;
<add> }
<add>
<ide> /**
<ide> * Get the command name.
<ide> *
<ide><path>tests/TestCase/Console/CommandTest.php
<ide> public function testSetName()
<ide> $command = new Command();
<ide> $this->assertSame($command, $command->setName('routes show'));
<ide> $this->assertSame('routes show', $command->getName());
<add> $this->assertSame('routes', $command->getRootName());
<ide> }
<ide>
<ide> /** | 2 |
Text | Text | add a code format in 2015-02-24-react-v0.13-rc1.md | 1744977201a7206cd8ad48ecee5adc2a713c354a | <ide><path>docs/_posts/2015-02-24-react-v0.13-rc1.md
<ide> We've also published version `0.13.0-rc1` of the `react` and `react-tools` packa
<ide> * Support for using ES6 classes to build React components; see the [v0.13.0 beta 1 notes](http://facebook.github.io/react/blog/2015/01/27/react-v0.13.0-beta-1.html) for details
<ide> * Added new top-level API `React.findDOMNode(component)`, which should be used in place of `component.getDOMNode()`. The base class for ES6-based components will not have `getDOMNode`. This change will enable some more patterns moving forward.
<ide> * New `ref` style, allowing a callback to be used in place of a name: `<Photo ref={(c) => this._photo = c} />` allows you to reference the component with `this._photo` (as opposed to `ref="photo"` which gives `this.refs.photo`)
<del>* `this.setState()` can now take a function as the first argument for transactional state updates, such as `this.setState((state, props) => ({count: state.count + 1}));` -- this means that you no longer need to use this._pendingState, which is now gone.
<add>* `this.setState()` can now take a function as the first argument for transactional state updates, such as `this.setState((state, props) => ({count: state.count + 1}));` -- this means that you no longer need to use `this._pendingState`, which is now gone.
<ide> * Support for iterators and immutable-js sequences as children
<ide>
<ide> #### Deprecations | 1 |
Text | Text | remove misterdjules from the ctc members list | 342c5f9d4c2eb8686722388d2ff15e3ae1eb9943 | <ide><path>README.md
<ide> more information about the governance of the Node.js project, see
<ide> **Matteo Collina** <[email protected]> (he/him)
<ide> * [mhdawson](https://github.com/mhdawson) -
<ide> **Michael Dawson** <[email protected]> (he/him)
<del>* [misterdjules](https://github.com/misterdjules) -
<del>**Julien Gilli** <[email protected]>
<ide> * [mscdex](https://github.com/mscdex) -
<ide> **Brian White** <[email protected]>
<ide> * [MylesBorins](https://github.com/MylesBorins) - | 1 |
Javascript | Javascript | remove unused reject handlers | 070995d5866beb31ee2b64ece90b883674a95b40 | <ide><path>test/common/inspector-helper.js
<ide> class NodeInstance extends EventEmitter {
<ide> async connectInspectorSession() {
<ide> console.log('[test]', 'Connecting to a child Node process');
<ide> const upgradeRequest = await this.sendUpgradeRequest();
<del> return new Promise((resolve, reject) => {
<add> return new Promise((resolve) => {
<ide> upgradeRequest
<ide> .on('upgrade',
<ide> (message, socket) => resolve(new InspectorSession(socket, this)))
<ide> class NodeInstance extends EventEmitter {
<ide> async expectConnectionDeclined() {
<ide> console.log('[test]', 'Checking upgrade is not possible');
<ide> const upgradeRequest = await this.sendUpgradeRequest();
<del> return new Promise((resolve, reject) => {
<add> return new Promise((resolve) => {
<ide> upgradeRequest
<ide> .on('upgrade', common.mustNotCall('Upgrade was received'))
<ide> .on('response', (response) => | 1 |
PHP | PHP | make resource opt in | 8e6ac01b4a311ec4740ab2bced01fbdf6f1241c5 | <ide><path>src/Illuminate/Routing/Console/ControllerMakeCommand.php
<ide> class ControllerMakeCommand extends GeneratorCommand
<ide> *
<ide> * @var string
<ide> */
<del> protected $description = 'Create a new resource controller class';
<add> protected $description = 'Create a new controller class';
<ide>
<ide> /**
<ide> * The type of class being generated.
<ide> class ControllerMakeCommand extends GeneratorCommand
<ide> */
<ide> protected function getStub()
<ide> {
<del> if ($this->option('plain')) {
<del> return __DIR__.'/stubs/controller.plain.stub';
<add> if ($this->option('resource')) {
<add> return __DIR__.'/stubs/controller.stub';
<ide> }
<ide>
<del> return __DIR__.'/stubs/controller.stub';
<add> return __DIR__.'/stubs/controller.plain.stub';
<ide> }
<ide>
<ide> /**
<ide> protected function getDefaultNamespace($rootNamespace)
<ide> protected function getOptions()
<ide> {
<ide> return [
<del> ['plain', null, InputOption::VALUE_NONE, 'Generate an empty controller class.'],
<add> ['resource', null, InputOption::VALUE_NONE, 'Generate a resource controller class.'],
<ide> ];
<ide> }
<ide> } | 1 |
Python | Python | stop worker on sigterm | 3ae14215d0f9298b62961021b4635513c8938c20 | <ide><path>celery/bin/celeryd.py
<ide> def run_worker(self):
<ide> def install_worker_term_handler(worker):
<ide>
<ide> def _stop(signum, frame):
<add> worker.stop()
<ide> raise SystemExit()
<add>
<ide> platform.install_signal_handler("SIGTERM", _stop)
<ide>
<add>
<ide> def install_worker_restart_handler(worker):
<ide>
<ide> def restart_worker_sig_handler(signum, frame): | 1 |
Ruby | Ruby | set uid/gid, use libarchive on macos | 4a3fc2a8fc562418212b8ffea273ff9a3806c38e | <ide><path>Library/Homebrew/dev-cmd/bottle.rb
<ide> def sudo_purge
<ide> system "/usr/bin/sudo", "--non-interactive", "/usr/sbin/purge"
<ide> end
<ide>
<add> def setup_tar_owner_group_args!
<add> # Unset the owner/group for reproducible bottles.
<add> # Use gnu-tar on Linux
<add> return ["--owner", "0", "--group", "0"].freeze if OS.linux?
<add>
<add> bsdtar_args = ["--uid", "0", "--gid", "0"].freeze
<add>
<add> # System bsdtar is new enough on macOS Catalina and above.
<add> return bsdtar_args if OS.mac? && MacOS.version >= :catalina
<add>
<add> # Use newish libarchive on older macOS versions for reproducibility.
<add> begin
<add> libarchive = Formula["libarchive"]
<add> rescue FormulaUnavailableError
<add> return [].freeze
<add> end
<add>
<add> unless libarchive.installed?
<add> ohai "Installing `libarchive` for bottling..."
<add> safe_system HOMEBREW_BREW_FILE, "install", "--formula", libarchive.full_name
<add> end
<add>
<add> path = PATH.new(ENV["PATH"])
<add> path.prepend(libarchive.opt_bin.to_s)
<add> ENV["PATH"] = path
<add>
<add> bsdtar_args
<add> end
<add>
<ide> def bottle_formula(f, args:)
<ide> local_bottle_json = args.json? && f.local_bottle_path.present?
<ide>
<ide> def bottle_formula(f, args:)
<ide>
<ide> cd cellar do
<ide> sudo_purge
<del> # Unset the owner/group for reproducible bottles.
<ide> # Tar then gzip for reproducible bottles.
<del> safe_system "tar", "--create", "--numeric-owner", "--file", tar_path, "#{f.name}/#{f.pkg_version}"
<add> owner_group_args = setup_tar_owner_group_args!
<add> safe_system "tar", "--create", "--numeric-owner",
<add> *owner_group_args,
<add> "--file", tar_path, "#{f.name}/#{f.pkg_version}"
<ide> sudo_purge
<ide> # Set more times for reproducible bottles.
<ide> tar_path.utime(tab.source_modified_time, tab.source_modified_time) | 1 |
PHP | PHP | remove return types | fa6e2f025f61e813de537043f693ea02f661a3e4 | <ide><path>src/Illuminate/Filesystem/FilesystemAdapter.php
<ide> public function size($path)
<ide> *
<ide> * @throws UnableToProvideChecksum
<ide> */
<del> public function checksum(string $path, array $options = []): string|false
<add> public function checksum(string $path, array $options = [])
<ide> {
<ide> try {
<ide> return $this->driver->checksum($path, $options); | 1 |
Go | Go | prevent panic on update --container-label-add | e462b4507a7020ae96ec4f4806877bff3a0cae99 | <ide><path>api/client/service/update.go
<ide> func updatePlacement(flags *pflag.FlagSet, placement *swarm.Placement) {
<ide>
<ide> func updateContainerLabels(flags *pflag.FlagSet, field *map[string]string) {
<ide> if flags.Changed(flagContainerLabelAdd) {
<del> if field == nil {
<add> if *field == nil {
<ide> *field = map[string]string{}
<ide> }
<ide>
<ide> func updateContainerLabels(flags *pflag.FlagSet, field *map[string]string) {
<ide> }
<ide> }
<ide>
<del> if field != nil && flags.Changed(flagContainerLabelRemove) {
<add> if *field != nil && flags.Changed(flagContainerLabelRemove) {
<ide> toRemove := flags.Lookup(flagContainerLabelRemove).Value.(*opts.ListOpts).GetAll()
<ide> for _, label := range toRemove {
<ide> delete(*field, label) | 1 |
Text | Text | fix typo in pull_request_template.md | de3976a48655a248a2417fcf2d3a511be02e1996 | <ide><path>.github/PULL_REQUEST_TEMPLATE.md
<ide> Help reviewers and the release process by writing your own release notes
<ide> [----------] TYPE
<ide> [ CLI ] [-------------] LOCATION
<ide> [ DOCS ] [ BREAKING ] [-------------]
<del>[ GENERAl ] [ BUGFIX ] [-{Component}-]
<add>[ GENERAL ] [ BUGFIX ] [-{Component}-]
<ide> [ INTERNAL ] [ ENHANCEMENT ] [ {File} ]
<ide> [ IOS ] [ FEATURE ] [ {Directory} ] |-----------|
<ide> [ ANDROID ] [ MINOR ] [ {Framework} ] - | {Message} | | 1 |
Go | Go | reset restart timeout if execution longer than 10s | b6db56b5eba00c4e8ad7a6f6c5b018e15dc883eb | <ide><path>container/container.go
<ide> func copyEscapable(dst io.Writer, src io.ReadCloser, keys []byte) (written int64
<ide> // ShouldRestart decides whether the daemon should restart the container or not.
<ide> // This is based on the container's restart policy.
<ide> func (container *Container) ShouldRestart() bool {
<del> shouldRestart, _, _ := container.restartManager.ShouldRestart(uint32(container.ExitCode), container.HasBeenManuallyStopped)
<add> shouldRestart, _, _ := container.restartManager.ShouldRestart(uint32(container.ExitCode), container.HasBeenManuallyStopped, container.FinishedAt.Sub(container.StartedAt))
<ide> return shouldRestart
<ide> }
<ide>
<ide><path>libcontainerd/container.go
<ide> package libcontainerd
<ide>
<ide> import (
<ide> "fmt"
<add> "time"
<ide>
<ide> "github.com/docker/docker/restartmanager"
<ide> )
<ide> type containerCommon struct {
<ide> restartManager restartmanager.RestartManager
<ide> restarting bool
<ide> processes map[string]*process
<add> startedAt time.Time
<ide> }
<ide>
<ide> // WithRestartManager sets the restartmanager to be used with the container.
<ide><path>libcontainerd/container_linux.go
<ide> import (
<ide> "os"
<ide> "path/filepath"
<ide> "syscall"
<add> "time"
<ide>
<ide> "github.com/Sirupsen/logrus"
<ide> containerd "github.com/docker/containerd/api/grpc/types"
<ide> func (ctr *container) start() error {
<ide> ctr.closeFifos(iopipe)
<ide> return err
<ide> }
<add> ctr.startedAt = time.Now()
<ide>
<ide> if err := ctr.client.backend.AttachStreams(ctr.containerID, *iopipe); err != nil {
<ide> return err
<ide> func (ctr *container) handleEvent(e *containerd.Event) error {
<ide> st.State = StateExitProcess
<ide> }
<ide> if st.State == StateExit && ctr.restartManager != nil {
<del> restart, wait, err := ctr.restartManager.ShouldRestart(e.Status, false)
<add> restart, wait, err := ctr.restartManager.ShouldRestart(e.Status, false, time.Since(ctr.startedAt))
<ide> if err != nil {
<ide> logrus.Warnf("container %s %v", ctr.containerID, err)
<ide> } else if restart {
<ide><path>libcontainerd/container_windows.go
<ide> import (
<ide> "io"
<ide> "strings"
<ide> "syscall"
<add> "time"
<ide>
<ide> "github.com/Microsoft/hcsshim"
<ide> "github.com/Sirupsen/logrus"
<ide> func (ctr *container) start() error {
<ide> }
<ide> return err
<ide> }
<add> ctr.startedAt = time.Now()
<ide>
<ide> // Convert io.ReadClosers to io.Readers
<ide> if stdout != nil {
<ide> func (ctr *container) waitExit(pid uint32, processFriendlyName string, isFirstPr
<ide> }
<ide>
<ide> if si.State == StateExit && ctr.restartManager != nil {
<del> restart, wait, err := ctr.restartManager.ShouldRestart(uint32(exitCode), false)
<add> restart, wait, err := ctr.restartManager.ShouldRestart(uint32(exitCode), false, time.Since(ctr.startedAt))
<ide> if err != nil {
<ide> logrus.Error(err)
<ide> } else if restart {
<ide><path>restartmanager/restartmanager.go
<ide> const (
<ide> // RestartManager defines object that controls container restarting rules.
<ide> type RestartManager interface {
<ide> Cancel() error
<del> ShouldRestart(exitCode uint32, hasBeenManuallyStopped bool) (bool, chan error, error)
<add> ShouldRestart(exitCode uint32, hasBeenManuallyStopped bool, executionDuration time.Duration) (bool, chan error, error)
<ide> }
<ide>
<ide> type restartManager struct {
<ide> func (rm *restartManager) SetPolicy(policy container.RestartPolicy) {
<ide> rm.Unlock()
<ide> }
<ide>
<del>func (rm *restartManager) ShouldRestart(exitCode uint32, hasBeenManuallyStopped bool) (bool, chan error, error) {
<add>func (rm *restartManager) ShouldRestart(exitCode uint32, hasBeenManuallyStopped bool, executionDuration time.Duration) (bool, chan error, error) {
<ide> if rm.policy.IsNone() {
<ide> return false, nil, nil
<ide> }
<ide> func (rm *restartManager) ShouldRestart(exitCode uint32, hasBeenManuallyStopped
<ide> if rm.active {
<ide> return false, nil, fmt.Errorf("invalid call on active restartmanager")
<ide> }
<del>
<add> // if the container ran for more than 10s, reguardless of status and policy reset the
<add> // the timeout back to the default.
<add> if executionDuration.Seconds() >= 10 {
<add> rm.timeout = 0
<add> }
<ide> if rm.timeout == 0 {
<ide> rm.timeout = defaultTimeout
<ide> } else {
<ide><path>restartmanager/restartmanager_test.go
<ide> package restartmanager
<ide>
<del>// FIXME
<add>import (
<add> "testing"
<add> "time"
<add>
<add> "github.com/docker/engine-api/types/container"
<add>)
<add>
<add>func TestRestartManagerTimeout(t *testing.T) {
<add> rm := New(container.RestartPolicy{Name: "always"}, 0).(*restartManager)
<add> should, _, err := rm.ShouldRestart(0, false, 1*time.Second)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if !should {
<add> t.Fatal("container should be restarted")
<add> }
<add> if rm.timeout != 100*time.Millisecond {
<add> t.Fatalf("restart manager should have a timeout of 100ms but has %s", rm.timeout)
<add> }
<add>}
<add>
<add>func TestRestartManagerTimeoutReset(t *testing.T) {
<add> rm := New(container.RestartPolicy{Name: "always"}, 0).(*restartManager)
<add> rm.timeout = 5 * time.Second
<add> _, _, err := rm.ShouldRestart(0, false, 10*time.Second)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if rm.timeout != 100*time.Millisecond {
<add> t.Fatalf("restart manager should have a timeout of 100ms but has %s", rm.timeout)
<add> }
<add>} | 6 |
Javascript | Javascript | improve error message in async-wrap test | f315081dc7abdcc2e96c55b17ff89e4c76008e9b | <ide><path>test/parallel/test-async-wrap-pop-id-during-load.js
<ide> const ret = spawnSync(
<ide> process.execPath,
<ide> ['--stack_size=50', __filename, 'async']
<ide> );
<del>assert.strictEqual(ret.status, 0);
<add>assert.strictEqual(ret.status, 0,
<add> `EXIT CODE: ${ret.status}, STDERR:\n${ret.stderr}`);
<ide> const stderr = ret.stderr.toString('utf8', 0, 2048);
<ide> assert.ok(!/async.*hook/i.test(stderr));
<ide> assert.ok(stderr.includes('UnhandledPromiseRejectionWarning: Error'), stderr); | 1 |
Go | Go | fix "docker ps" with no containers regression | f57fc03e3b39c225a05edfe217bd7616949d0dd0 | <ide><path>api/client/ps/custom.go
<ide> func customFormat(ctx Context, containers []types.Container) {
<ide> }
<ide>
<ide> if table {
<add> if len(header) == 0 {
<add> // if we still don't have a header, we didn't have any containers so we need to fake it to get the right headers from the template
<add> containerCtx := &containerContext{}
<add> tmpl.Execute(bytes.NewBufferString(""), containerCtx)
<add> header = containerCtx.fullHeader()
<add> }
<add>
<ide> t := tabwriter.NewWriter(ctx.Output, 20, 1, 3, ' ', 0)
<ide> t.Write([]byte(header))
<ide> t.Write([]byte("\n"))
<ide><path>integration-cli/docker_cli_ps_test.go
<ide> func (s *DockerSuite) TestPsFormatMultiNames(c *check.C) {
<ide> }
<ide>
<ide> }
<add>
<add>func (s *DockerSuite) TestPsFormatHeaders(c *check.C) {
<add> // make sure no-container "docker ps" still prints the header row
<add> out, _ := dockerCmd(c, "ps", "--format", "table {{.ID}}")
<add> if out != "CONTAINER ID\n" {
<add> c.Fatalf(`Expected 'CONTAINER ID\n', got %v`, out)
<add> }
<add>
<add> // verify that "docker ps" with a container still prints the header row also
<add> dockerCmd(c, "run", "--name=test", "-d", "busybox", "top")
<add> out, _ = dockerCmd(c, "ps", "--format", "table {{.Names}}")
<add> if out != "NAMES\ntest\n" {
<add> c.Fatalf(`Expected 'NAMES\ntest\n', got %v`, out)
<add> }
<add>} | 2 |
Text | Text | add guidance on console output in tests | 4d89fcc6d6581dfbe798fd0dc76d83fda98c1b87 | <ide><path>doc/guides/writing-tests.md
<ide> assert.throws(
<ide> );
<ide> ```
<ide>
<add>### Console output
<add>
<add>Output written by tests to stdout or stderr, such as with `console.log()` or
<add>`console.error()`, can be useful when writing tests, as well as for debugging
<add>them during later maintenance. The output will be supressed by the test runner
<add>(`./tools/test.py`) unless the test fails, but will always be displayed when
<add>running tests directly with `node`. For failing tests, the test runner will
<add>include the output along with the failed test assertion in the test report.
<add>
<add>Some output can help debugging by giving context to test failures. For example,
<add>when troubleshooting tests that timeout in CI. With no log statements, we have
<add>no idea where the test got hung up. There have been cases where tests fail
<add>without `console.log()`, and then pass when its added, so be cautious about its
<add>use, particularly in tests of the I/O and streaming APIs.
<add>
<add>Excessive use of console output is discouraged as it can overwhelm the display,
<add>including the Jenkins console and test report displays. Be particularly
<add>cautious of output in loops, or other contexts where output may be repeated many
<add>times in the case of failure.
<add>
<add>In some tests, it can be unclear whether a `console.log()` statement is required
<add>as part of the test (message tests, tests that check output from child
<add>processes, etc.), or is there as a debug aide. If there is any chance of
<add>confusion, use comments to make the purpose clear.
<add>
<add>
<ide> ### ES.Next features
<ide>
<ide> For performance considerations, we only use a selected subset of ES.Next | 1 |
Javascript | Javascript | expose tech#resize event as player#resize | e176b56843b2084c8b0c985d9ea491b5de1da113 | <ide><path>src/js/component.js
<ide> class Component {
<ide> * The width that you want to set postfixed with '%', 'px' or nothing.
<ide> *
<ide> * @param {boolean} [skipListeners]
<del> * Skip the resize event trigger
<add> * Skip the componentresize event trigger
<ide> *
<ide> * @return {number|string}
<ide> * The width when getting, zero if there is no width. Can be a string
<ide> class Component {
<ide> * The height that you want to set postfixed with '%', 'px' or nothing.
<ide> *
<ide> * @param {boolean} [skipListeners]
<del> * Skip the resize event trigger
<add> * Skip the componentresize event trigger
<ide> *
<ide> * @return {number|string}
<ide> * The width when getting, zero if there is no width. Can be a string
<ide> class Component {
<ide> * Height to set the `Component`s element to.
<ide> */
<ide> dimensions(width, height) {
<del> // Skip resize listeners on width for optimization
<add> // Skip componentresize listeners on width for optimization
<ide> this.width(width, true);
<ide> this.height(height);
<ide> }
<ide> class Component {
<ide> * - If you want the computed style of the component, use {@link Component#currentWidth}
<ide> * and {@link {Component#currentHeight}
<ide> *
<del> * @fires Component#resize
<add> * @fires Component#componentresize
<ide> *
<ide> * @param {string} widthOrHeight
<ide> 8 'width' or 'height'
<ide> class Component {
<ide> 8 New dimension
<ide> *
<ide> * @param {boolean} [skipListeners]
<del> * Skip resize event trigger
<add> * Skip componentresize event trigger
<ide> *
<ide> * @return {number}
<ide> * The dimension when getting or 0 if unset
<ide> class Component {
<ide> /**
<ide> * Triggered when a component is resized.
<ide> *
<del> * @event Component#resize
<add> * @event Component#componentresize
<ide> * @type {EventTarget~Event}
<ide> */
<del> this.trigger('resize');
<add> this.trigger('componentresize');
<ide> }
<ide>
<ide> return;
<ide><path>src/js/player.js
<ide> const TECH_EVENTS_RETRIGGER = [
<ide> */
<ide> 'ratechange',
<ide>
<add> /**
<add> * Fires when the video's intrinsic dimensions change
<add> *
<add> * @event Player#resize
<add> * @type {event}
<add> */
<add> /**
<add> * Retrigger the `resize` event that was triggered by the {@link Tech}.
<add> *
<add> * @private
<add> * @method Player#handleTechResize_
<add> * @fires Player#resize
<add> * @listens Tech#resize
<add> */
<add> 'resize',
<add>
<ide> /**
<ide> * Fires when the volume has been changed
<ide> *
<ide><path>src/js/tech/html5.js
<ide> Html5.Events = [
<ide> 'play',
<ide> 'pause',
<ide> 'ratechange',
<add> 'resize',
<ide> 'volumechange'
<ide> ];
<ide> | 3 |
Python | Python | fix setup.py to work in maintenance branches | 9a37cd93d2e57bb8058921dbff52669365e2f3a8 | <ide><path>setup.py
<ide> import textwrap
<ide> import warnings
<ide> import builtins
<add>import re
<ide>
<ide>
<ide> # Python supported version checks. Keep right after stdlib imports to ensure we
<ide> # The version components are changed from ints to strings, but only VERSION
<ide> # seems to matter outside of this module and it was already a str.
<ide> FULLVERSION = versioneer.get_version()
<del>ISRELEASED = 'dev' not in FULLVERSION
<del>MAJOR, MINOR, MICRO = FULLVERSION.split('.')[:3]
<add>
<add># Capture the version string:
<add># 1.22.0.dev0+ ... -> ISRELEASED == False, VERSION == 1.22.0
<add># 1.22.0rc1+ ... -> ISRELEASED == False, VERSION == 1.22.0
<add># 1.22.0 ... -> ISRELEASED == True, VERSION == 1.22.0
<add># 1.22.0rc1 ... -> ISRELEASED == True, VERSION == 1.22.0
<add>ISRELEASED = re.search(r'(dev|\+)', FULLVERSION) is None
<add>MAJOR, MINOR, MICRO = re.match(r'(\d+)\.(\d+)\.(\d+)', FULLVERSION).groups()
<ide> VERSION = '{}.{}.{}'.format(MAJOR, MINOR, MICRO)
<ide>
<ide> # The first version not in the `Programming Language :: Python :: ...` classifiers above | 1 |
Python | Python | fix a2dd618 for mysql | 3b765029f04a43a2ba432936553e33d7c4a4ce29 | <ide><path>tests/custom_lookups/tests.py
<ide> def __call__(self, *args, **kwargs):
<ide> return SQLFunc(self.name, *args, **kwargs)
<ide>
<ide>
<del>class CustomField(models.Field):
<add>class CustomField(models.TextField):
<ide>
<ide> def get_lookup(self, lookup_name):
<ide> if lookup_name.startswith('lookupfunc_'): | 1 |
Python | Python | support py3 for configure.py | a582c6b07ccce44973acaba4c60c4549cbaa5c38 | <ide><path>configure.py
<ide> import shlex
<ide> import subprocess
<ide> import shutil
<del>import string
<ide> from distutils.spawn import find_executable as which
<ide>
<ide> # If not run from node/, cd to node/.
<ide> def print_verbose(x):
<ide>
<ide> def b(value):
<ide> """Returns the string 'true' if value is truthy, 'false' otherwise."""
<del> if value:
<del> return 'true'
<del> else:
<del> return 'false'
<add> return 'true' if value else 'false'
<ide>
<ide> def B(value):
<ide> """Returns 1 if value is truthy, 0 otherwise."""
<del> if value:
<del> return 1
<del> else:
<del> return 0
<add> return 1 if value else 0
<ide>
<add>def to_utf8(s):
<add> return s if isinstance(s, str) else s.decode("utf-8")
<ide>
<ide> def pkg_config(pkg):
<ide> """Run pkg-config on the specified package
<ide> def pkg_config(pkg):
<ide> try:
<ide> proc = subprocess.Popen(shlex.split(pkg_config) + args,
<ide> stdout=subprocess.PIPE)
<del> val = proc.communicate()[0].strip()
<add> val = to_utf8(proc.communicate()[0]).strip()
<ide> except OSError as e:
<ide> if e.errno != errno.ENOENT: raise e # Unexpected error.
<ide> return (None, None, None, None) # No pkg-config/pkgconf installed.
<ide> def try_check_compiler(cc, lang):
<ide> except OSError:
<ide> return (False, False, '', '')
<ide>
<del> proc.stdin.write('__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ '
<del> '__clang_major__ __clang_minor__ __clang_patchlevel__')
<add> proc.stdin.write(b'__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ '
<add> b'__clang_major__ __clang_minor__ __clang_patchlevel__')
<ide>
<del> values = (proc.communicate()[0].split() + ['0'] * 7)[0:7]
<add> values = (to_utf8(proc.communicate()[0]).split() + ['0'] * 7)[0:7]
<ide> is_clang = values[0] == '1'
<ide> gcc_version = tuple(map(int, values[1:1+3]))
<ide> clang_version = tuple(map(int, values[4:4+3])) if is_clang else None
<ide> def get_version_helper(cc, regexp):
<ide> consider adjusting the CC environment variable if you installed
<ide> it in a non-standard prefix.''')
<ide>
<del> match = re.search(regexp, proc.communicate()[1])
<add> match = re.search(regexp, to_utf8(proc.communicate()[1]))
<ide>
<ide> if match:
<ide> return match.group(2)
<ide> def get_nasm_version(asm):
<ide> return '0'
<ide>
<ide> match = re.match(r"NASM version ([2-9]\.[0-9][0-9]+)",
<del> proc.communicate()[0])
<add> to_utf8(proc.communicate()[0]))
<ide>
<ide> if match:
<ide> return match.group(1)
<ide> def get_gas_version(cc):
<ide> consider adjusting the CC environment variable if you installed
<ide> it in a non-standard prefix.''')
<ide>
<del> gas_ret = proc.communicate()[1]
<add> gas_ret = to_utf8(proc.communicate()[1])
<ide> match = re.match(r"GNU assembler version ([2-9]\.[0-9]+)", gas_ret)
<ide>
<ide> if match:
<ide> def cc_macros(cc=None):
<ide> consider adjusting the CC environment variable if you installed
<ide> it in a non-standard prefix.''')
<ide>
<del> p.stdin.write('\n')
<del> out = p.communicate()[0]
<del>
<del> out = str(out).split('\n')
<add> p.stdin.write(b'\n')
<add> out = to_utf8(p.communicate()[0]).split('\n')
<ide>
<ide> k = {}
<ide> for line in out:
<ide> def glob_to_var(dir_base, dir_sub, patch_dir):
<ide>
<ide> def configure_intl(o):
<ide> def icu_download(path):
<del> depFile = 'tools/icu/current_ver.dep';
<add> depFile = 'tools/icu/current_ver.dep'
<ide> with open(depFile) as f:
<ide> icus = json.load(f)
<ide> # download ICU, if needed
<ide> def write_config(data, name):
<ide> o['variables']['icu_small'] = b(True)
<ide> locs = set(options.with_icu_locales.split(','))
<ide> locs.add('root') # must have root
<del> o['variables']['icu_locales'] = string.join(locs,',')
<add> o['variables']['icu_locales'] = ','.join(str(loc) for loc in locs)
<ide> # We will check a bit later if we can use the canned deps/icu-small
<ide> elif with_intl == 'full-icu':
<ide> # full ICU
<ide> def write_config(data, name):
<ide> elif int(icu_ver_major) < icu_versions['minimum_icu']:
<ide> error('icu4c v%s.x is too old, v%d.x or later is required.' %
<ide> (icu_ver_major, icu_versions['minimum_icu']))
<del> icu_endianness = sys.byteorder[0];
<add> icu_endianness = sys.byteorder[0]
<ide> o['variables']['icu_ver_major'] = icu_ver_major
<ide> o['variables']['icu_endianness'] = icu_endianness
<ide> icu_data_file_l = 'icudt%s%s.dat' % (icu_ver_major, 'l') | 1 |
Python | Python | remove unused usage of logging module | 201d6e43f31f2816f4c60ccd225092dc9e6887bd | <ide><path>airflow/api_connexion/endpoints/dag_source_endpoint.py
<ide> # KIND, either express or implied. See the License for the
<ide> # specific language governing permissions and limitations
<ide> # under the License.
<del>import logging
<ide>
<ide> from flask import Response, current_app, request
<ide> from itsdangerous import BadSignature, URLSafeSerializer
<ide> from airflow.models.dagcode import DagCode
<ide> from airflow.security import permissions
<ide>
<del>log = logging.getLogger(__name__)
<del>
<ide>
<ide> @security.requires_access([(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE)])
<ide> def get_dag_source(file_token: str):
<ide><path>airflow/api_connexion/endpoints/version_endpoint.py
<ide> # specific language governing permissions and limitations
<ide> # under the License.
<ide>
<del>import logging
<ide> from typing import NamedTuple, Optional
<ide>
<ide> import airflow
<ide> from airflow.api_connexion.schemas.version_schema import version_info_schema
<ide> from airflow.utils.platform import get_airflow_git_version
<ide>
<del>log = logging.getLogger(__name__)
<del>
<ide>
<ide> class VersionInfo(NamedTuple):
<ide> """Version information""" | 2 |
Javascript | Javascript | improve landing example [ci skip] | dcd6e06c47f3804f50251dd82635625c4d83fb4c | <ide><path>website/src/widgets/landing.js
<ide> import Link from '../components/link'
<ide> import BenchmarksChoi from 'usage/_benchmarks-choi.md'
<ide>
<ide> const CODE_EXAMPLE = `# pip install spacy
<del># python -m spacy download en_core_web_md
<add># python -m spacy download en_core_web_sm
<ide>
<ide> import spacy
<ide>
<ide> # Load English tokenizer, tagger, parser, NER and word vectors
<del>nlp = spacy.load("en_core_web_md")
<add>nlp = spacy.load("en_core_web_sm")
<ide>
<ide> # Process whole documents
<del>text = (u"When Sebastian Thrun started working on self-driving cars at "
<del> u"Google in 2007, few people outside of the company took him "
<del> u"seriously. “I can tell you very senior CEOs of major American "
<del> u"car companies would shake my hand and turn away because I wasn’t "
<del> u"worth talking to,” said Thrun, now the co-founder and CEO of "
<del> u"online higher education startup Udacity, in an interview with "
<del> u"Recode earlier this week.")
<add>text = ("When Sebastian Thrun started working on self-driving cars at "
<add> "Google in 2007, few people outside of the company took him "
<add> "seriously. “I can tell you very senior CEOs of major American "
<add> "car companies would shake my hand and turn away because I wasn’t "
<add> "worth talking to,” said Thrun, in an interview with Recode earlier "
<add> "this week.")
<ide> doc = nlp(text)
<ide>
<add># Analyze syntax
<add>print("Noun phrases:", [chunk.text for chunk in doc.noun_chunks])
<add>print("Verbs:", [token.lemma_ for token in doc if token.pos_ == "VERB"])
<add>
<ide> # Find named entities, phrases and concepts
<ide> for entity in doc.ents:
<ide> print(entity.text, entity.label_)
<del>
<del># Determine semantic similarities
<del>doc1 = nlp(u"my fries were super gross")
<del>doc2 = nlp(u"such disgusting fries")
<del>similarity = doc1.similarity(doc2)
<del>print(doc1.text, doc2.text, similarity)
<ide> `
<ide>
<ide> /** | 1 |
Javascript | Javascript | reduce requestanimationframe polyfill | 4462a2c950b989aabf6d5c50b20b6b79a9e6a2ba | <ide><path>src/helpers/index.js
<ide> export default {
<ide>
<ide> return niceFraction * Math.pow(10, exponent);
<ide> },
<del> // Request animation polyfill - https://www.paulirish.com/2011/requestanimationframe-for-smart-animating/
<add> // Request animation polyfill
<ide> requestAnimFrame: (function() {
<ide> if (typeof window === 'undefined') {
<ide> return function(callback) {
<ide> callback();
<ide> };
<ide> }
<del> return window.requestAnimationFrame ||
<del> window.webkitRequestAnimationFrame ||
<del> window.mozRequestAnimationFrame ||
<del> window.oRequestAnimationFrame ||
<del> window.msRequestAnimationFrame ||
<del> function(callback) {
<del> return window.setTimeout(callback, 1000 / 60);
<del> };
<add> return window.requestAnimationFrame;
<ide> }()),
<ide> // -- Canvas methods
<ide> fontString: function(pixelSize, fontStyle, fontFamily) { | 1 |
Text | Text | harmonize yaml comments | 0aa2c5bec64db87945c66161bac030da20147fc2 | <ide><path>doc/api/buffer.md
<ide> added: v0.5.5
<ide> changes:
<ide> - version:
<ide> - v14.9.0
<del> - 12.19.0
<add> - v12.19.0
<ide> pr-url: https://github.com/nodejs/node/pull/34729
<ide> description: This function is also available as `buf.writeUint16LE()`.
<ide> - version: v10.0.0
<ide><path>doc/api/deprecations.md
<ide> changes:
<ide> pr-url: https://github.com/nodejs/node/pull/10116
<ide> description: A deprecation code has been assigned.
<ide> - version: v1.0.0
<del> pr-url: https://github.com/iojs/io.js/pull/166
<add> pr-url: https://github.com/nodejs/node/pull/166
<ide> description: Documentation-only deprecation.
<ide> -->
<ide>
<ide> The `crypto._toBuf()` function was not designed to be used by modules outside
<ide> of Node.js core and was removed.
<ide>
<ide> ### DEP0115: `crypto.prng()`, `crypto.pseudoRandomBytes()`, `crypto.rng()`
<add>
<add><!--lint disable nodejs-yaml-comments -->
<add>
<ide> <!-- YAML
<ide> changes:
<ide> - version: v11.0.0
<ide> changes:
<ide> with `--pending-deprecation` support.
<ide> -->
<ide>
<add><!--lint enable nodejs-yaml-comments -->
<add>
<ide> Type: Documentation-only (supports [`--pending-deprecation`][])
<ide>
<ide> In recent versions of Node.js, there is no difference between
<ide> purpose and is only available on CommonJS environment.
<ide> <!-- YAML
<ide> changes:
<ide> - version:
<del> - v12.19.0
<ide> - v14.0.0
<add> - v12.19.0
<ide> pr-url: https://github.com/nodejs/node/pull/32499
<ide> description: Documentation-only deprecation.
<ide> -->
<ide> The [`crypto.Certificate()` constructor][] is deprecated. Use
<ide> ### DEP0XXX: `fs.rmdir(path, { recursive: true })`
<ide> <!-- YAML
<ide> changes:
<del> - version: REPLACME
<add> - version: REPLACEME
<ide> pr-url: https://github.com/nodejs/node/pull/35579
<ide> description: Documentation-only deprecation.
<ide> -->
<ide><path>doc/api/errors.md
<ide> changes:
<ide> - version:
<ide> - v11.4.0
<ide> - v10.15.0
<del> pr-url: https://github.com/nodejs/node/commit/186035243fad247e3955f
<add> commit: 186035243fad247e3955f
<add> pr-url: https://github.com/nodejs-private/node-private/pull/143
<ide> description: Max header size in `http_parser` was set to 8KB.
<ide> -->
<ide>
<ide><path>doc/api/http.md
<ide> per connection (in the case of HTTP Keep-Alive connections).
<ide> added: v0.1.94
<ide> changes:
<ide> - version: v10.0.0
<del> pr-url: v10.0.0
<add> pr-url: https://github.com/nodejs/node/pull/19981
<ide> description: Not listening to this event no longer causes the socket
<ide> to be destroyed if a client sends an Upgrade header.
<ide> -->
<ide><path>doc/api/http2.md
<ide> changes:
<ide> - v14.4.0
<ide> - v12.18.0
<ide> - v10.21.0
<add> commit: 3948830ce6408be620b09a70bf66158623022af0
<ide> pr-url: https://github.com/nodejs-private/node-private/pull/204
<ide> description: Added `maxSettings` option with a default of 32.
<ide> - version:
<ide> changes:
<ide> - v14.4.0
<ide> - v12.18.0
<ide> - v10.21.0
<add> commit: 3948830ce6408be620b09a70bf66158623022af0
<ide> pr-url: https://github.com/nodejs-private/node-private/pull/204
<ide> description: Added `maxSettings` option with a default of 32.
<ide> - version:
<ide> changes:
<ide> - v14.4.0
<ide> - v12.18.0
<ide> - v10.21.0
<add> commit: 3948830ce6408be620b09a70bf66158623022af0
<ide> pr-url: https://github.com/nodejs-private/node-private/pull/204
<ide> description: Added `maxSettings` option with a default of 32.
<ide> - version: v13.0.0
<ide><path>doc/api/util.md
<ide> is not supported.
<ide> added: v8.3.0
<ide> changes:
<ide> - version: v11.0.0
<del> pr-url: v11.0.0
<add> pr-url: https://github.com/nodejs/node/pull/22281
<ide> description: The class is now available on the global object.
<ide> -->
<ide>
<ide> mark.
<ide> added: v8.3.0
<ide> changes:
<ide> - version: v11.0.0
<del> pr-url: v11.0.0
<add> pr-url: https://github.com/nodejs/node/pull/22281
<ide> description: The class is now available on the global object.
<ide> -->
<ide> | 6 |
Python | Python | add tests for new feature and fix in random.choice | 7f9d7bcf616371f3318513ca5500f19124f573e1 | <ide><path>numpy/random/tests/test_random.py
<ide> def test_choice_nonuniform_noreplace(self):
<ide> np.random.seed(self.seed)
<ide> actual = np.random.choice(4, 3, replace=False,
<ide> p=[0.1, 0.3, 0.5, 0.1])
<del> desired = np.array([2, 1, 3])
<add> desired = np.array([2, 3, 1])
<ide> np.testing.assert_array_equal(actual, desired)
<ide>
<ide> def test_choice_noninteger(self):
<ide> def test_choice_exceptions(self):
<ide> assert_raises(ValueError, sample, [1,2,3], 2, replace=False,
<ide> p=[1,0,0])
<ide>
<add> def test_choice_return_shape(self):
<add> p = [0.1,0.9]
<add> # Check scalar
<add> assert_(np.isscalar(np.random.choice(2, replace=True)))
<add> assert_(np.isscalar(np.random.choice(2, replace=False)))
<add> assert_(np.isscalar(np.random.choice(2, replace=True, p=p)))
<add> assert_(np.isscalar(np.random.choice(2, replace=False, p=p)))
<add> assert_(np.isscalar(np.random.choice([1,2], replace=True)))
<add>
<add> # Check 0-d array
<add> s = tuple()
<add> assert_(not np.isscalar(np.random.choice(2, s, replace=True)))
<add> assert_(not np.isscalar(np.random.choice(2, s, replace=False)))
<add> assert_(not np.isscalar(np.random.choice(2, s, replace=True, p=p)))
<add> assert_(not np.isscalar(np.random.choice(2, s, replace=False, p=p)))
<add> assert_(not np.isscalar(np.random.choice([1,2], s, replace=True)))
<add>
<add> # Check multi dimensional array
<add> s = (2,3)
<add> p = [0.1, 0.1, 0.1, 0.1, 0.4, 0.2]
<add> assert_(np.random.choice(6, s, replace=True).shape, s)
<add> assert_(np.random.choice(6, s, replace=False).shape, s)
<add> assert_(np.random.choice(6, s, replace=True, p=p).shape, s)
<add> assert_(np.random.choice(6, s, replace=False, p=p).shape, s)
<add> assert_(np.random.choice(np.arange(6), s, replace=True).shape, s)
<add>
<ide> def test_bytes(self):
<ide> np.random.seed(self.seed)
<ide> actual = np.random.bytes(10) | 1 |
Ruby | Ruby | apply suggestions from code review | 3d27894015962a780dc3624ea6284eb303824bef | <ide><path>Library/Homebrew/formula.rb
<ide> def method_added(method)
<ide> # @!attribute [w]
<ide> # The SPDX ID of the open-source license that the formula uses.
<ide> # Shows when running `brew info`.
<add> # Multiple licenses means that the software is licensed under multiple licenses.
<add> # Do not use multiple licenses if e.g. different parts are under different licenses.
<ide> #
<ide> # <pre>license "BSD-2-Clause"</pre>
<del> def license args=nil
<del> if args.blank?
<add> def license(args = nil)
<add> if args.nil?
<ide> return @licenses
<ide> else
<del> @licenses = args.class == String ? [args] : args
<add> @licenses = Array(args)
<ide> puts @licenses
<ide> # license.
<ide> end | 1 |
Javascript | Javascript | remove unused code | 76b718092a0f0aad73300371c2ed43f5388c9dd3 | <ide><path>d3.layout.js
<ide> d3.layout.hierarchy = function() {
<ide> return root;
<ide> };
<ide>
<del> // If the new API is used, enabling inlining.
<del> hierarchy.nodes = function(d) {
<del> d3_layout_hierarchyInline = true;
<del> return (hierarchy.nodes = hierarchy)(d);
<del> };
<del>
<ide> return hierarchy;
<ide> };
<ide>
<ide> function d3_layout_clusterX(children) {
<ide>
<ide> function d3_layout_clusterLeft(node) {
<ide> var children = node.children;
<del> return children && children.length
<del> ? d3_layout_clusterLeft(children[0]) : node;
<add> return children && children.length ? d3_layout_clusterLeft(children[0]) : node;
<ide> }
<ide>
<ide> function d3_layout_clusterRight(node) {
<del> var children = node.children,
<del> n;
<del> return children && (n = children.length)
<del> ? d3_layout_clusterRight(children[n - 1]) : node;
<add> var children = node.children, n;
<add> return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node;
<ide> }
<ide> // Node-link tree diagram using the Reingold-Tilford "tidy" algorithm
<ide> d3.layout.tree = function() {
<ide><path>d3.layout.min.js
<del>(function(){function bc(a,b){var c=a.x+b[3],d=a.y+b[0],e=a.dx-b[1]-b[3],f=a.dy-b[0]-b[2];e<0&&(c+=e/2,e=0),f<0&&(d+=f/2,f=0);return{x:c,y:d,dx:e,dy:f}}function bb(a){return{x:a.x,y:a.y,dx:a.dx,dy:a.dy}}function ba(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function _(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function $(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function Z(a,b){function c(a,d){var e=a.children;if(e){var f,g=null,h=-1,i=e.length;while(++h<i)f=e[h],c(f,g),g=f}b(a,d)}c(a,null)}function Y(a,b){return a.depth-b.depth}function X(a,b){return b.x-a.x}function W(a,b){return a.x-b.x}function V(a,b){var c=a.children;if(c){var d,e=c.length,f=-1;while(++f<e)b(d=V(c[f],b),a)>0&&(a=d)}return a}function U(a){return a.children?a.children[a.children.length-1]:a._tree.thread}function T(a){return a.children?a.children[0]:a._tree.thread}function S(a,b){return a.parent==b.parent?1:2}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function O(a){return 1+d3.max(a,function(a){return a.y})}function N(a,b,c){var d=b.r+c.r,e=a.r+c.r,f=b.x-a.x,g=b.y-a.y,h=Math.sqrt(f*f+g*g),i=(e*e+h*h-d*d)/(2*e*h),j=Math.acos(i),k=i*e,l=Math.sin(j)*e;f/=h,g/=h,c.x=a.x+k*f+l*g,c.y=a.y+k*g-l*f}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f<g)M(e[f],b,c,d)}}function L(a){var b=a.children;b?(b.forEach(L),a.r=I(b)):a.r=Math.sqrt(a.value)}function K(a){delete a._pack_next,delete a._pack_prev}function J(a){a._pack_next=a._pack_prev=a}function I(a){function l(a){b=Math.min(a.x-a.r,b),c=Math.max(a.x+a.r,c),d=Math.min(a.y-a.r,d),e=Math.max(a.y+a.r,e)}var b=Infinity,c=-Infinity,d=Infinity,e=-Infinity,f=a.length,g,h,i,j,k;a.forEach(J),g=a[0],g.x=-g.r,g.y=0,l(g);if(f>1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m<f;m++){N(g,h,i=a[m]);var n=0,o=1,p=1;for(j=h._pack_next;j!==h;j=j._pack_next,o++)if(H(j,i)){n=1;break}if(n==1)for(k=g._pack_prev;k!==j._pack_prev;k=k._pack_prev,p++)if(H(k,i)){p<o&&(n=-1,j=k);break}n==0?(F(g,i),h=i,l(i)):n>0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m<f;m++){var t=a[m];t.x-=q,t.y-=r,s=Math.max(s,t.r+Math.sqrt(t.x*t.x+t.y*t.y))}a.forEach(K);return s}function H(a,b){var c=b.x-a.x,d=b.y-a.y,e=a.r+b.r;return e*e-c*c-d*d>.001}function G(a,b){a._pack_next=b,b._pack_prev=a}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function E(a,b){return a.value-b.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function B(a,b){return b.value-a.value}function A(a){return a.value}function z(a){return a.children}function y(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){D=!0;return(a.nodes=a)(b)};return a}function x(a){return[d3.min(a),d3.max(a)]}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function u(a,b){return a+b[1]}function t(a){return a.reduce(u,0)}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;b<f;++b)(e=a[b][1])>d&&(c=b,d=e);return c}function p(a,b,c){a.y0=b,a.y=c}function o(a){return a.y}function n(a){return a.x}function m(a){return 1}function l(a){return 20}function k(a){var b=0,c=0;a.count=0;if(!a.leaf){var d=a.nodes,e=d.length,f=-1,g;while(++f<e){g=d[f];if(g==null)continue;k(g),a.count+=g.count,b+=g.count*g.cx,c+=g.count*g.cy}}a.point&&(a.leaf||(a.point.x+=Math.random()-.5,a.point.y+=Math.random()-.5),a.count++,b+=a.point.x,c+=a.point.y),a.cx=b/a.count,a.cy=c/a.count}function j(){f.px+=d3.event.dx,f.py+=d3.event.dy,e.resume()}function i(){j(),f.fixed&=1,e=f=null}function h(a){a!==f&&(a.fixed&=1)}function g(a){a.fixed|=2}function c(a,c){if(a===c)return a;var d=b(a),e=b(c),f=d.pop(),g=e.pop(),h=null;while(f===g)h=f,f=d.pop(),g=e.pop();return h}function b(a){var b=[],c=a.parent;while(c!=null)b.push(a),a=c,c=c.parent;b.push(a);return b}function a(a){var b=a.source,d=a.target,e=c(b,d),f=[b];while(b!==e)b=b.parent,f.push(b);var g=f.length;while(d!==e)f.splice(g,0,d),d=d.parent;return f}d3.layout={},d3.layout.bundle=function(){return function(b){var c=[],d=-1,e=b.length;while(++d<e)c.push(a(b[d]));return c}},d3.layout.chord=function(){function k(){b.sort(function(a,b){return i(a.target.value,b.target.value)})}function j(){var a={},j=[],l=d3.range(e),m=[],n,o,p,q,r;b=[],c=[],n=0,q=-1;while(++q<e){o=0,r=-1;while(++r<e)o+=d[q][r];j.push(o),m.push(d3.range(e)),n+=o}g&&l.sort(function(a,b){return g(j[a],j[b])}),h&&m.forEach(function(a,b){a.sort(function(a,c){return h(d[b][a],d[b][c])})}),n=(2*Math.PI-f*e)/n,o=0,q=-1;while(++q<e){p=o,r=-1;while(++r<e){var s=l[q],t=m[q][r],u=d[s][t];a[s+"-"+t]={index:s,subindex:t,startAngle:o,endAngle:o+=u*n,value:u}}c.push({index:s,startAngle:p,endAngle:o,value:(o-p)/n}),o+=f}q=-1;while(++q<e){r=q-1;while(++r<e){var v=a[q+"-"+r],w=a[r+"-"+q];(v.value||w.value)&&b.push(v.value<w.value?{source:w,target:v}:{source:v,target:w})}}i&&k()}var a={},b,c,d,e,f=0,g,h,i;a.matrix=function(f){if(!arguments.length)return d;e=(d=f)&&d.length,b=c=null;return a},a.padding=function(d){if(!arguments.length)return f;f=d,b=c=null;return a},a.sortGroups=function(d){if(!arguments.length)return g;g=d,b=c=null;return a},a.sortSubgroups=function(c){if(!arguments.length)return h;h=c,b=null;return a},a.sortChords=function(c){if(!arguments.length)return i;i=c,b&&k();return a},a.chords=function(){b||j();return b},a.groups=function(){c||j();return c};return a},d3.layout.force=function(){function B(b){g(f=b),e=a}function A(){var a=v.length,d=w.length,e,f,g,h,i,j,l,m,p;for(f=0;f<d;++f){g=w[f],h=g.source,i=g.target,m=i.x-h.x,p=i.y-h.y;if(j=m*m+p*p)j=n*y[f]*((j=Math.sqrt(j))-x[f])/j,m*=j,p*=j,i.x-=m*(l=h.weight/(i.weight+h.weight)),i.y-=p*l,h.x+=m*(l=1-l),h.y+=p*l}if(l=n*s){m=c[0]/2,p=c[1]/2,f=-1;if(l)while(++f<a)g=v[f],g.x+=(m-g.x)*l,g.y+=(p-g.y)*l}if(l=n*r){k(e=d3.geom.quadtree(v)),f=-1;while(++f<a)(g=v[f]).fixed||e.visit(z(g,l))}f=-1;while(++f<a)g=v[f],g.fixed?(g.x=g.px,g.y=g.py):(g.x-=(g.px-(g.px=g.x))*o,g.y-=(g.py-(g.py=g.y))*o);b.tick.dispatch({type:"tick",alpha:n});return(n*=.99)<.005}function z(a,b){return function(c,d,e,f,g){if(c.point!==a){var h=c.cx-a.x,i=c.cy-a.y,j=1/Math.sqrt(h*h+i*i);if((f-d)*j<t){var k=b*c.count*j*j;a.px-=h*k,a.py-=i*k;return!0}if(c.point&&isFinite(j)){var k=b*j*j;a.px-=h*k,a.py-=i*k}}}}var a={},b=d3.dispatch("tick"),c=[1,1],d,n,o=.9,p=l,q=m,r=-30,s=.1,t=.8,u,v=[],w=[],x,y;a.on=function(c,d){b[c].add(d);return a},a.nodes=function(b){if(!arguments.length)return v;v=b;return a},a.links=function(b){if(!arguments.length)return w;w=b;return a},a.size=function(b){if(!arguments.length)return c;c=b;return a},a.linkDistance=function(b){if(!arguments.length)return p;p=d3.functor(b);return a},a.distance=a.linkDistance,a.linkStrength=function(b){if(!arguments.length)return q;q=d3.functor(b);return a},a.friction=function(b){if(!arguments.length)return o;o=b;return a},a.charge=function(b){if(!arguments.length)return r;r=b;return a},a.gravity=function(b){if(!arguments.length)return s;s=b;return a},a.theta=function(b){if(!arguments.length)return t;t=b;return a},a.start=function(){function l(){if(!i){i=[];for(d=0;d<e;++d)i[d]=[];for(d=0;d<f;++d){var a=w[d];i[a.source.index].push(a.target),i[a.target.index].push(a.source)}}return i[b]}function k(a,c){var d=l(b),e=-1,f=d.length,g;while(++e<f)if(!isNaN(g=d[e][a]))return g;return Math.random()*c}var b,d,e=v.length,f=w.length,g=c[0],h=c[1],i,j;for(b=0;b<e;++b)(j=v[b]).index=b,j.weight=0;x=[],y=[];for(b=0;b<f;++b)j=w[b],typeof j.source=="number"&&(j.source=v[j.source]),typeof j.target=="number"&&(j.target=v[j.target]),x[b]=p.call(this,j,b),y[b]=q.call(this,j,b),++j.source.weight,++j.target.weight;for(b=0;b<e;++b)j=v[b],isNaN(j.x)&&(j.x=k("x",g)),isNaN(j.y)&&(j.y=k("y",h)),isNaN(j.px)&&(j.px=j.x),isNaN(j.py)&&(j.py=j.y);return a.resume()},a.resume=function(){n=.1,d3.timer(A);return a},a.stop=function(){n=0;return a},a.drag=function(){d||(d=d3.behavior.drag().on("dragstart",B).on("drag",j).on("dragend",i)),this.on("mouseover.force",g).on("mouseout.force",h).call(d)};return a};var e,f;d3.layout.partition=function(){function e(e,f){var g=a.call(this,e,f);c(g[0],0,b[0],b[1]/d(g[0]));return g}function d(a){var b=a.children,c=0;if(b){var e=-1,f=b.length;while(++e<f)c=Math.max(c,d(b[e]))}return 1+c}function c(a,b,d,e){var f=a.children;a.x=b,a.y=a.depth*e,a.dx=d,a.dy=e;if(f){var g=-1,h=f.length,i,j;d=a.value?d/a.value:0;while(++g<h)c(i=f[g],b,j=i.value*d,e),b+=j}}var a=d3.layout.hierarchy(),b=[1,1];e.size=function(a){if(!arguments.length)return b;b=a;return e};return y(e,a)},d3.layout.pie=function(){function f(f,g){var h=+(typeof c=="function"?c.apply(this,arguments):c),i=(typeof e=="function"?e.apply(this,arguments):e)-c,j=d3.range(f.length);b!=null&&j.sort(function(a,c){return b(f[a],f[c])});var k=f.map(a);i/=k.reduce(function(a,b){return a+b},0);var l=j.map(function(a){return{data:f[a],value:d=k[a],startAngle:h,endAngle:h+=d*i}});return f.map(function(a,b){return l[j[b]]})}var a=Number,b=null,c=0,e=2*Math.PI;f.value=function(b){if(!arguments.length)return a;a=b;return f},f.sort=function(a){if(!arguments.length)return b;b=a;return f},f.startAngle=function(a){if(!arguments.length)return c;c=a;return f},f.endAngle=function(a){if(!arguments.length)return e;e=a;return f};return f},d3.layout.stack=function(){function g(h,i){var j=h.map(function(b,c){return a.call(g,b,c)}),k=j.map(function(a,b){return a.map(function(a,b){return[e.call(g,a,b),f.call(g,a,b)]})}),l=b.call(g,k,i);j=d3.permute(j,l),k=d3.permute(k,l);var m=c.call(g,k,i),n=j.length,o=j[0].length,p,q,r;for(q=0;q<o;++q){d.call(g,j[0][q],r=m[q],k[0][q][1]);for(p=1;p<n;++p)d.call(g,j[p][q],r+=k[p-1][q][1],k[p][q][1])}return h}var a=Object,b=q["default"],c=r.zero,d=p,e=n,f=o;g.values=function(b){if(!arguments.length)return a;a=b;return g},g.order=function(a){if(!arguments.length)return b;b=typeof a=="function"?a:q[a];return g},g.offset=function(a){if(!arguments.length)return c;c=typeof a=="function"?a:r[a];return g},g.x=function(a){if(!arguments.length)return e;e=a;return g},g.y=function(a){if(!arguments.length)return f;f=a;return g},g.out=function(a){if(!arguments.length)return d;d=a;return g};return g};var q={"inside-out":function(a){var b=a.length,c,d,e=a.map(s),f=a.map(t),g=d3.range(b).sort(function(a,b){return e[a]-e[b]}),h=0,i=0,j=[],k=[];for(c=0;c<b;++c)d=g[c],h<i?(h+=f[d],j.push(d)):(i+=f[d],k.push(d));return k.reverse().concat(j)},reverse:function(a){return d3.range(a.length).reverse()},"default":function(a){return d3.range(a.length)}},r={silhouette:function(a){var b=a.length,c=a[0].length,d=[],e=0,f,g,h,i=[];for(g=0;g<c;++g){for(f=0,h=0;f<b;f++)h+=a[f][g][1];h>e&&(e=h),d.push(h)}for(g=0;g<c;++g)i[g]=(e-d[g])/2;return i},wiggle:function(a){var b=a.length,c=a[0],d=c.length,e=0,f,g,h,i,j,k,l,m,n,o=[];o[0]=m=n=0;for(g=1;g<d;++g){for(f=0,i=0;f<b;++f)i+=a[f][g][1];for(f=0,j=0,l=c[g][0]-c[g-1][0];f<b;++f){for(h=0,k=(a[f][g][1]-a[f][g-1][1])/(2*l);h<f;++h)k+=(a[h][g][1]-a[h][g-1][1])/l;j+=k*a[f][g][1]}o[g]=m-=i?j/i*l:0,m<n&&(n=m)}for(g=0;g<d;++g)o[g]-=n;return o},expand:function(a){var b=a.length,c=a[0].length,d=1/b,e,f,g,h=[];for(f=0;f<c;++f){for(e=0,g=0;e<b;e++)g+=a[e][f][1];if(g)for(e=0;e<b;e++)a[e][f][1]/=g;else for(e=0;e<b;e++)a[e][f][1]=d}for(f=0;f<c;++f)h[f]=0;return h},zero:function(a){var b=-1,c=a[0].length,d=[];while(++b<c)d[b]=0;return d}};d3.layout.histogram=function(){function e(e,f){var g=[],h=e.map(b,this),i=c.call(this,h,f),j=d.call(this,i,h,f),k,f=-1,l=h.length,m=j.length-1,n=a?1:1/l,o;while(++f<m)k=g[f]=[],k.dx=j[f+1]-(k.x=j[f]),k.y=0;f=-1;while(++f<l)o=h[f],o>=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h<i)e+=f(d[h],j)}else c&&(e=+c.call(g,D?a:a.data,b)||0);c&&(a.value=e);return e}function e(f,h,i){var j=b.call(g,f,h),k=D?f:{data:f};k.depth=h,i.push(k);if(j&&(m=j.length)){var l=-1,m,n=k.children=[],o=0,p=h+1;while(++l<m)d=e(j[l],p,i),d.parent=k,n.push(d),o+=d.value;a&&n.sort(a),c&&(k.value=o)}else c&&(k.value=+c.call(g,f,h)||0);return k}var a=B,b=z,c=A;g.sort=function(b){if(!arguments.length)return a;a=b;return g},g.children=function(a){if(!arguments.length)return b;b=a;return g},g.value=function(a){if(!arguments.length)return c;c=a;return g},g.revalue=function(a){f(a,0);return a},g.nodes=function(a){D=!0;return(g.nodes=g)(a)};return g};var D=!1;d3.layout.pack=function(){function c(c,d){var e=a.call(this,c,d),f=e[0];f.x=0,f.y=0,L(f);var g=b[0],h=b[1],i=1/Math.max(2*f.r/g,2*f.r/h);M(f,g/2,h/2,i);return e}var a=d3.layout.hierarchy().sort(E),b=[1,1];c.size=function(a){if(!arguments.length)return b;b=a;return c};return y(c,a)},d3.layout.cluster=function(){function d(d,e){var f=a.call(this,d,e),g=f[0],h,i=0,j,k;Z(g,function(a){var c=a.children;c&&c.length?(a.x=P(c),a.y=O(c)):(a.x=h?i+=b(a,h):0,a.y=0,h=a)});var l=Q(g),m=R(g),n=l.x-b(l,m)/2,o=m.x+b(m,l)/2;Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=(1-a.y/g.y)*c[1]});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return y(d,a)},d3.layout.tree=function(){function d(d,e){function j(a,c,d){if(c){var e=a,f=a,g=c,h=a.parent.children[0],i=e._tree.mod,j=f._tree.mod,k=g._tree.mod,l=h._tree.mod,m;while(g=U(g),e=T(e),g&&e)h=T(h),f=U(f),f._tree.ancestor=a,m=g._tree.prelim+k-e._tree.prelim-i+b(g,e),m>0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c){var d=-1,e=c.length;b+=a._tree.mod;while(++d<e)i(c[d],b)}}function h(a,c){var d=a.children,e=a._tree;if(d&&(f=d.length)){var f,g=d[0],i,k=g,l,m=-1;while(++m<f)l=d[m],h(l,i),k=j(l,i,k),i=l;$(a);var n=.5*(g._tree.prelim+l._tree.prelim);c?(e.prelim=c._tree.prelim+b(a,c),e.mod=e.prelim-n):e.prelim=n}else c&&(e.prelim=c._tree.prelim+b(a,c))}var f=a.call(this,d,e),g=f[0];Z(g,function(a,b){a._tree={ancestor:a,prelim:0,mod:0,change:0,shift:0,number:b?b._tree.number+1:0}}),h(g),i(g,-g._tree.prelim);var k=V(g,X),l=V(g,W),m=V(g,Y),n=k.x-b(k,l)/2,o=l.x+b(l,k)/2,p=m.depth||1;Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=a.depth/p*c[1],delete a._tree});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return y(d,a)},d3.layout.treemap=function(){function n(b){var d=g||a(b),e=d[0];e.x=0,e.y=0,e.dx=c[0],e.dy=c[1],g&&a.revalue(e),i([e],e.dx*e.dy/e.value),(g?k:j)(e),f&&(g=d);return d}function m(a,c,d,e){var f=-1,g=a.length,h=d.x,i=d.y,j=c?b(a.area/c):0,k;if(c==d.dx){if(e||j>d.dy)j=j?d.dy:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dy=j,h+=k.dx=j?b(k.area/j):0;k.z=!0,k.dx+=d.x+d.dx-h,d.y+=j,d.dy-=j}else{if(e||j>d.dx)j=j?d.dx:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dx=j,i+=k.dy=j?b(k.area/j):0;k.z=!1,k.dy+=d.y+d.dy-i,d.x+=j,d.dx-=j}}function l(a,b){var c=a.area,d,e=0,f=Infinity,g=-1,i=a.length;while(++g<i){if(!(d=a[g].area))continue;d<f&&(f=d),d>e&&(e=d)}c*=c,b*=b;return c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function k(a){if(!!a.children){var b=e(a),c=a.children.slice(),d,f=[];i(c,b.dx*b.dy/a.value),f.area=0;while(d=c.pop())f.push(d),f.area+=d.area,d.z!=null&&(m(f,d.z?b.dx:b.dy,b,!c.length),f.length=f.area=0);a.children.forEach(k)}}function j(a){if(!!a.children){var b=e(a),c=[],d=a.children.slice(),f,g=Infinity,h,k=Math.min(b.dx,b.dy),n;i(d,b.dx*b.dy/a.value),c.area=0;while((n=d.length)>0)c.push(f=d[n-1]),c.area+=f.area,(h=l(c,k))<=g?(d.pop(),g=h):(c.area-=c.pop().area,m(c,k,b,!1),k=Math.min(b.dx,b.dy),c.length=c.area=0,g=Infinity);c.length&&(m(c,k,b,!0),c.length=c.area=0),a.children.forEach(j)}}function i(a,b){var c=-1,d=a.length,e,f;while(++c<d)f=(e=a[c]).value*(b<0?0:b),e.area=isNaN(f)||f<=0?0:f}var a=d3.layout.hierarchy(),b=Math.round,c=[1,1],d=null,e=bb,f=!1,g,h=.5*(1+Math.sqrt(5));n.size=function(a){if(!arguments.length)return c;c=a;return n},n.padding=function(a){function c(b){return bc(b,a)}function b(b){var c=a.call(n,b,b.depth);return c==null?bb(b):bc(b,typeof c=="number"?[c,c,c,c]:c)}if(!arguments.length)return d;var f;e=(d=a)==null?bb:(f=typeof a)==="function"?b:f==="number"?(a=[a,a,a,a],c):c;return n},n.round=function(a){if(!arguments.length)return b!=Number;b=a?Math.round:Number;return n},n.sticky=function(a){if(!arguments.length)return f;f=a,g=null;return n},n.ratio=function(a){if(!arguments.length)return h;h=a;return n};return y(n,a)}})()
<ide>\ No newline at end of file
<add>(function(){function bc(a,b){var c=a.x+b[3],d=a.y+b[0],e=a.dx-b[1]-b[3],f=a.dy-b[0]-b[2];e<0&&(c+=e/2,e=0),f<0&&(d+=f/2,f=0);return{x:c,y:d,dx:e,dy:f}}function bb(a){return{x:a.x,y:a.y,dx:a.dx,dy:a.dy}}function ba(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function _(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function $(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function Z(a,b){function c(a,d){var e=a.children;if(e){var f,g=null,h=-1,i=e.length;while(++h<i)f=e[h],c(f,g),g=f}b(a,d)}c(a,null)}function Y(a,b){return a.depth-b.depth}function X(a,b){return b.x-a.x}function W(a,b){return a.x-b.x}function V(a,b){var c=a.children;if(c){var d,e=c.length,f=-1;while(++f<e)b(d=V(c[f],b),a)>0&&(a=d)}return a}function U(a){return a.children?a.children[a.children.length-1]:a._tree.thread}function T(a){return a.children?a.children[0]:a._tree.thread}function S(a,b){return a.parent==b.parent?1:2}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function O(a){return 1+d3.max(a,function(a){return a.y})}function N(a,b,c){var d=b.r+c.r,e=a.r+c.r,f=b.x-a.x,g=b.y-a.y,h=Math.sqrt(f*f+g*g),i=(e*e+h*h-d*d)/(2*e*h),j=Math.acos(i),k=i*e,l=Math.sin(j)*e;f/=h,g/=h,c.x=a.x+k*f+l*g,c.y=a.y+k*g-l*f}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f<g)M(e[f],b,c,d)}}function L(a){var b=a.children;b?(b.forEach(L),a.r=I(b)):a.r=Math.sqrt(a.value)}function K(a){delete a._pack_next,delete a._pack_prev}function J(a){a._pack_next=a._pack_prev=a}function I(a){function l(a){b=Math.min(a.x-a.r,b),c=Math.max(a.x+a.r,c),d=Math.min(a.y-a.r,d),e=Math.max(a.y+a.r,e)}var b=Infinity,c=-Infinity,d=Infinity,e=-Infinity,f=a.length,g,h,i,j,k;a.forEach(J),g=a[0],g.x=-g.r,g.y=0,l(g);if(f>1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m<f;m++){N(g,h,i=a[m]);var n=0,o=1,p=1;for(j=h._pack_next;j!==h;j=j._pack_next,o++)if(H(j,i)){n=1;break}if(n==1)for(k=g._pack_prev;k!==j._pack_prev;k=k._pack_prev,p++)if(H(k,i)){p<o&&(n=-1,j=k);break}n==0?(F(g,i),h=i,l(i)):n>0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m<f;m++){var t=a[m];t.x-=q,t.y-=r,s=Math.max(s,t.r+Math.sqrt(t.x*t.x+t.y*t.y))}a.forEach(K);return s}function H(a,b){var c=b.x-a.x,d=b.y-a.y,e=a.r+b.r;return e*e-c*c-d*d>.001}function G(a,b){a._pack_next=b,b._pack_prev=a}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function E(a,b){return a.value-b.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function B(a,b){return b.value-a.value}function A(a){return a.value}function z(a){return a.children}function y(a,b){a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){D=!0;return(a.nodes=a)(b)};return a}function x(a){return[d3.min(a),d3.max(a)]}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function u(a,b){return a+b[1]}function t(a){return a.reduce(u,0)}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;b<f;++b)(e=a[b][1])>d&&(c=b,d=e);return c}function p(a,b,c){a.y0=b,a.y=c}function o(a){return a.y}function n(a){return a.x}function m(a){return 1}function l(a){return 20}function k(a){var b=0,c=0;a.count=0;if(!a.leaf){var d=a.nodes,e=d.length,f=-1,g;while(++f<e){g=d[f];if(g==null)continue;k(g),a.count+=g.count,b+=g.count*g.cx,c+=g.count*g.cy}}a.point&&(a.leaf||(a.point.x+=Math.random()-.5,a.point.y+=Math.random()-.5),a.count++,b+=a.point.x,c+=a.point.y),a.cx=b/a.count,a.cy=c/a.count}function j(){f.px+=d3.event.dx,f.py+=d3.event.dy,e.resume()}function i(){j(),f.fixed&=1,e=f=null}function h(a){a!==f&&(a.fixed&=1)}function g(a){a.fixed|=2}function c(a,c){if(a===c)return a;var d=b(a),e=b(c),f=d.pop(),g=e.pop(),h=null;while(f===g)h=f,f=d.pop(),g=e.pop();return h}function b(a){var b=[],c=a.parent;while(c!=null)b.push(a),a=c,c=c.parent;b.push(a);return b}function a(a){var b=a.source,d=a.target,e=c(b,d),f=[b];while(b!==e)b=b.parent,f.push(b);var g=f.length;while(d!==e)f.splice(g,0,d),d=d.parent;return f}d3.layout={},d3.layout.bundle=function(){return function(b){var c=[],d=-1,e=b.length;while(++d<e)c.push(a(b[d]));return c}},d3.layout.chord=function(){function k(){b.sort(function(a,b){return i(a.target.value,b.target.value)})}function j(){var a={},j=[],l=d3.range(e),m=[],n,o,p,q,r;b=[],c=[],n=0,q=-1;while(++q<e){o=0,r=-1;while(++r<e)o+=d[q][r];j.push(o),m.push(d3.range(e)),n+=o}g&&l.sort(function(a,b){return g(j[a],j[b])}),h&&m.forEach(function(a,b){a.sort(function(a,c){return h(d[b][a],d[b][c])})}),n=(2*Math.PI-f*e)/n,o=0,q=-1;while(++q<e){p=o,r=-1;while(++r<e){var s=l[q],t=m[q][r],u=d[s][t];a[s+"-"+t]={index:s,subindex:t,startAngle:o,endAngle:o+=u*n,value:u}}c.push({index:s,startAngle:p,endAngle:o,value:(o-p)/n}),o+=f}q=-1;while(++q<e){r=q-1;while(++r<e){var v=a[q+"-"+r],w=a[r+"-"+q];(v.value||w.value)&&b.push(v.value<w.value?{source:w,target:v}:{source:v,target:w})}}i&&k()}var a={},b,c,d,e,f=0,g,h,i;a.matrix=function(f){if(!arguments.length)return d;e=(d=f)&&d.length,b=c=null;return a},a.padding=function(d){if(!arguments.length)return f;f=d,b=c=null;return a},a.sortGroups=function(d){if(!arguments.length)return g;g=d,b=c=null;return a},a.sortSubgroups=function(c){if(!arguments.length)return h;h=c,b=null;return a},a.sortChords=function(c){if(!arguments.length)return i;i=c,b&&k();return a},a.chords=function(){b||j();return b},a.groups=function(){c||j();return c};return a},d3.layout.force=function(){function B(b){g(f=b),e=a}function A(){var a=v.length,d=w.length,e,f,g,h,i,j,l,m,p;for(f=0;f<d;++f){g=w[f],h=g.source,i=g.target,m=i.x-h.x,p=i.y-h.y;if(j=m*m+p*p)j=n*y[f]*((j=Math.sqrt(j))-x[f])/j,m*=j,p*=j,i.x-=m*(l=h.weight/(i.weight+h.weight)),i.y-=p*l,h.x+=m*(l=1-l),h.y+=p*l}if(l=n*s){m=c[0]/2,p=c[1]/2,f=-1;if(l)while(++f<a)g=v[f],g.x+=(m-g.x)*l,g.y+=(p-g.y)*l}if(l=n*r){k(e=d3.geom.quadtree(v)),f=-1;while(++f<a)(g=v[f]).fixed||e.visit(z(g,l))}f=-1;while(++f<a)g=v[f],g.fixed?(g.x=g.px,g.y=g.py):(g.x-=(g.px-(g.px=g.x))*o,g.y-=(g.py-(g.py=g.y))*o);b.tick.dispatch({type:"tick",alpha:n});return(n*=.99)<.005}function z(a,b){return function(c,d,e,f,g){if(c.point!==a){var h=c.cx-a.x,i=c.cy-a.y,j=1/Math.sqrt(h*h+i*i);if((f-d)*j<t){var k=b*c.count*j*j;a.px-=h*k,a.py-=i*k;return!0}if(c.point&&isFinite(j)){var k=b*j*j;a.px-=h*k,a.py-=i*k}}}}var a={},b=d3.dispatch("tick"),c=[1,1],d,n,o=.9,p=l,q=m,r=-30,s=.1,t=.8,u,v=[],w=[],x,y;a.on=function(c,d){b[c].add(d);return a},a.nodes=function(b){if(!arguments.length)return v;v=b;return a},a.links=function(b){if(!arguments.length)return w;w=b;return a},a.size=function(b){if(!arguments.length)return c;c=b;return a},a.linkDistance=function(b){if(!arguments.length)return p;p=d3.functor(b);return a},a.distance=a.linkDistance,a.linkStrength=function(b){if(!arguments.length)return q;q=d3.functor(b);return a},a.friction=function(b){if(!arguments.length)return o;o=b;return a},a.charge=function(b){if(!arguments.length)return r;r=b;return a},a.gravity=function(b){if(!arguments.length)return s;s=b;return a},a.theta=function(b){if(!arguments.length)return t;t=b;return a},a.start=function(){function l(){if(!i){i=[];for(d=0;d<e;++d)i[d]=[];for(d=0;d<f;++d){var a=w[d];i[a.source.index].push(a.target),i[a.target.index].push(a.source)}}return i[b]}function k(a,c){var d=l(b),e=-1,f=d.length,g;while(++e<f)if(!isNaN(g=d[e][a]))return g;return Math.random()*c}var b,d,e=v.length,f=w.length,g=c[0],h=c[1],i,j;for(b=0;b<e;++b)(j=v[b]).index=b,j.weight=0;x=[],y=[];for(b=0;b<f;++b)j=w[b],typeof j.source=="number"&&(j.source=v[j.source]),typeof j.target=="number"&&(j.target=v[j.target]),x[b]=p.call(this,j,b),y[b]=q.call(this,j,b),++j.source.weight,++j.target.weight;for(b=0;b<e;++b)j=v[b],isNaN(j.x)&&(j.x=k("x",g)),isNaN(j.y)&&(j.y=k("y",h)),isNaN(j.px)&&(j.px=j.x),isNaN(j.py)&&(j.py=j.y);return a.resume()},a.resume=function(){n=.1,d3.timer(A);return a},a.stop=function(){n=0;return a},a.drag=function(){d||(d=d3.behavior.drag().on("dragstart",B).on("drag",j).on("dragend",i)),this.on("mouseover.force",g).on("mouseout.force",h).call(d)};return a};var e,f;d3.layout.partition=function(){function e(e,f){var g=a.call(this,e,f);c(g[0],0,b[0],b[1]/d(g[0]));return g}function d(a){var b=a.children,c=0;if(b){var e=-1,f=b.length;while(++e<f)c=Math.max(c,d(b[e]))}return 1+c}function c(a,b,d,e){var f=a.children;a.x=b,a.y=a.depth*e,a.dx=d,a.dy=e;if(f){var g=-1,h=f.length,i,j;d=a.value?d/a.value:0;while(++g<h)c(i=f[g],b,j=i.value*d,e),b+=j}}var a=d3.layout.hierarchy(),b=[1,1];e.size=function(a){if(!arguments.length)return b;b=a;return e};return y(e,a)},d3.layout.pie=function(){function f(f,g){var h=+(typeof c=="function"?c.apply(this,arguments):c),i=(typeof e=="function"?e.apply(this,arguments):e)-c,j=d3.range(f.length);b!=null&&j.sort(function(a,c){return b(f[a],f[c])});var k=f.map(a);i/=k.reduce(function(a,b){return a+b},0);var l=j.map(function(a){return{data:f[a],value:d=k[a],startAngle:h,endAngle:h+=d*i}});return f.map(function(a,b){return l[j[b]]})}var a=Number,b=null,c=0,e=2*Math.PI;f.value=function(b){if(!arguments.length)return a;a=b;return f},f.sort=function(a){if(!arguments.length)return b;b=a;return f},f.startAngle=function(a){if(!arguments.length)return c;c=a;return f},f.endAngle=function(a){if(!arguments.length)return e;e=a;return f};return f},d3.layout.stack=function(){function g(h,i){var j=h.map(function(b,c){return a.call(g,b,c)}),k=j.map(function(a,b){return a.map(function(a,b){return[e.call(g,a,b),f.call(g,a,b)]})}),l=b.call(g,k,i);j=d3.permute(j,l),k=d3.permute(k,l);var m=c.call(g,k,i),n=j.length,o=j[0].length,p,q,r;for(q=0;q<o;++q){d.call(g,j[0][q],r=m[q],k[0][q][1]);for(p=1;p<n;++p)d.call(g,j[p][q],r+=k[p-1][q][1],k[p][q][1])}return h}var a=Object,b=q["default"],c=r.zero,d=p,e=n,f=o;g.values=function(b){if(!arguments.length)return a;a=b;return g},g.order=function(a){if(!arguments.length)return b;b=typeof a=="function"?a:q[a];return g},g.offset=function(a){if(!arguments.length)return c;c=typeof a=="function"?a:r[a];return g},g.x=function(a){if(!arguments.length)return e;e=a;return g},g.y=function(a){if(!arguments.length)return f;f=a;return g},g.out=function(a){if(!arguments.length)return d;d=a;return g};return g};var q={"inside-out":function(a){var b=a.length,c,d,e=a.map(s),f=a.map(t),g=d3.range(b).sort(function(a,b){return e[a]-e[b]}),h=0,i=0,j=[],k=[];for(c=0;c<b;++c)d=g[c],h<i?(h+=f[d],j.push(d)):(i+=f[d],k.push(d));return k.reverse().concat(j)},reverse:function(a){return d3.range(a.length).reverse()},"default":function(a){return d3.range(a.length)}},r={silhouette:function(a){var b=a.length,c=a[0].length,d=[],e=0,f,g,h,i=[];for(g=0;g<c;++g){for(f=0,h=0;f<b;f++)h+=a[f][g][1];h>e&&(e=h),d.push(h)}for(g=0;g<c;++g)i[g]=(e-d[g])/2;return i},wiggle:function(a){var b=a.length,c=a[0],d=c.length,e=0,f,g,h,i,j,k,l,m,n,o=[];o[0]=m=n=0;for(g=1;g<d;++g){for(f=0,i=0;f<b;++f)i+=a[f][g][1];for(f=0,j=0,l=c[g][0]-c[g-1][0];f<b;++f){for(h=0,k=(a[f][g][1]-a[f][g-1][1])/(2*l);h<f;++h)k+=(a[h][g][1]-a[h][g-1][1])/l;j+=k*a[f][g][1]}o[g]=m-=i?j/i*l:0,m<n&&(n=m)}for(g=0;g<d;++g)o[g]-=n;return o},expand:function(a){var b=a.length,c=a[0].length,d=1/b,e,f,g,h=[];for(f=0;f<c;++f){for(e=0,g=0;e<b;e++)g+=a[e][f][1];if(g)for(e=0;e<b;e++)a[e][f][1]/=g;else for(e=0;e<b;e++)a[e][f][1]=d}for(f=0;f<c;++f)h[f]=0;return h},zero:function(a){var b=-1,c=a[0].length,d=[];while(++b<c)d[b]=0;return d}};d3.layout.histogram=function(){function e(e,f){var g=[],h=e.map(b,this),i=c.call(this,h,f),j=d.call(this,i,h,f),k,f=-1,l=h.length,m=j.length-1,n=a?1:1/l,o;while(++f<m)k=g[f]=[],k.dx=j[f+1]-(k.x=j[f]),k.y=0;f=-1;while(++f<l)o=h[f],o>=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;e.value=function(a){if(!arguments.length)return b;b=a;return e},e.range=function(a){if(!arguments.length)return c;c=d3.functor(a);return e},e.bins=function(a){if(!arguments.length)return d;d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a);return e},e.frequency=function(b){if(!arguments.length)return a;a=!!b;return e};return e},d3.layout.hierarchy=function(){function g(a){var b=[];e(a,0,b);return b}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h<i)e+=f(d[h],j)}else c&&(e=+c.call(g,D?a:a.data,b)||0);c&&(a.value=e);return e}function e(f,h,i){var j=b.call(g,f,h),k=D?f:{data:f};k.depth=h,i.push(k);if(j&&(m=j.length)){var l=-1,m,n=k.children=[],o=0,p=h+1;while(++l<m)d=e(j[l],p,i),d.parent=k,n.push(d),o+=d.value;a&&n.sort(a),c&&(k.value=o)}else c&&(k.value=+c.call(g,f,h)||0);return k}var a=B,b=z,c=A;g.sort=function(b){if(!arguments.length)return a;a=b;return g},g.children=function(a){if(!arguments.length)return b;b=a;return g},g.value=function(a){if(!arguments.length)return c;c=a;return g},g.revalue=function(a){f(a,0);return a};return g};var D=!1;d3.layout.pack=function(){function c(c,d){var e=a.call(this,c,d),f=e[0];f.x=0,f.y=0,L(f);var g=b[0],h=b[1],i=1/Math.max(2*f.r/g,2*f.r/h);M(f,g/2,h/2,i);return e}var a=d3.layout.hierarchy().sort(E),b=[1,1];c.size=function(a){if(!arguments.length)return b;b=a;return c};return y(c,a)},d3.layout.cluster=function(){function d(d,e){var f=a.call(this,d,e),g=f[0],h,i=0,j,k;Z(g,function(a){var c=a.children;c&&c.length?(a.x=P(c),a.y=O(c)):(a.x=h?i+=b(a,h):0,a.y=0,h=a)});var l=Q(g),m=R(g),n=l.x-b(l,m)/2,o=m.x+b(m,l)/2;Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=(1-a.y/g.y)*c[1]});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return y(d,a)},d3.layout.tree=function(){function d(d,e){function j(a,c,d){if(c){var e=a,f=a,g=c,h=a.parent.children[0],i=e._tree.mod,j=f._tree.mod,k=g._tree.mod,l=h._tree.mod,m;while(g=U(g),e=T(e),g&&e)h=T(h),f=U(f),f._tree.ancestor=a,m=g._tree.prelim+k-e._tree.prelim-i+b(g,e),m>0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c){var d=-1,e=c.length;b+=a._tree.mod;while(++d<e)i(c[d],b)}}function h(a,c){var d=a.children,e=a._tree;if(d&&(f=d.length)){var f,g=d[0],i,k=g,l,m=-1;while(++m<f)l=d[m],h(l,i),k=j(l,i,k),i=l;$(a);var n=.5*(g._tree.prelim+l._tree.prelim);c?(e.prelim=c._tree.prelim+b(a,c),e.mod=e.prelim-n):e.prelim=n}else c&&(e.prelim=c._tree.prelim+b(a,c))}var f=a.call(this,d,e),g=f[0];Z(g,function(a,b){a._tree={ancestor:a,prelim:0,mod:0,change:0,shift:0,number:b?b._tree.number+1:0}}),h(g),i(g,-g._tree.prelim);var k=V(g,X),l=V(g,W),m=V(g,Y),n=k.x-b(k,l)/2,o=l.x+b(l,k)/2,p=m.depth||1;Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=a.depth/p*c[1],delete a._tree});return f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];d.separation=function(a){if(!arguments.length)return b;b=a;return d},d.size=function(a){if(!arguments.length)return c;c=a;return d};return y(d,a)},d3.layout.treemap=function(){function n(b){var d=g||a(b),e=d[0];e.x=0,e.y=0,e.dx=c[0],e.dy=c[1],g&&a.revalue(e),i([e],e.dx*e.dy/e.value),(g?k:j)(e),f&&(g=d);return d}function m(a,c,d,e){var f=-1,g=a.length,h=d.x,i=d.y,j=c?b(a.area/c):0,k;if(c==d.dx){if(e||j>d.dy)j=j?d.dy:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dy=j,h+=k.dx=j?b(k.area/j):0;k.z=!0,k.dx+=d.x+d.dx-h,d.y+=j,d.dy-=j}else{if(e||j>d.dx)j=j?d.dx:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dx=j,i+=k.dy=j?b(k.area/j):0;k.z=!1,k.dy+=d.y+d.dy-i,d.x+=j,d.dx-=j}}function l(a,b){var c=a.area,d,e=0,f=Infinity,g=-1,i=a.length;while(++g<i){if(!(d=a[g].area))continue;d<f&&(f=d),d>e&&(e=d)}c*=c,b*=b;return c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function k(a){if(!!a.children){var b=e(a),c=a.children.slice(),d,f=[];i(c,b.dx*b.dy/a.value),f.area=0;while(d=c.pop())f.push(d),f.area+=d.area,d.z!=null&&(m(f,d.z?b.dx:b.dy,b,!c.length),f.length=f.area=0);a.children.forEach(k)}}function j(a){if(!!a.children){var b=e(a),c=[],d=a.children.slice(),f,g=Infinity,h,k=Math.min(b.dx,b.dy),n;i(d,b.dx*b.dy/a.value),c.area=0;while((n=d.length)>0)c.push(f=d[n-1]),c.area+=f.area,(h=l(c,k))<=g?(d.pop(),g=h):(c.area-=c.pop().area,m(c,k,b,!1),k=Math.min(b.dx,b.dy),c.length=c.area=0,g=Infinity);c.length&&(m(c,k,b,!0),c.length=c.area=0),a.children.forEach(j)}}function i(a,b){var c=-1,d=a.length,e,f;while(++c<d)f=(e=a[c]).value*(b<0?0:b),e.area=isNaN(f)||f<=0?0:f}var a=d3.layout.hierarchy(),b=Math.round,c=[1,1],d=null,e=bb,f=!1,g,h=.5*(1+Math.sqrt(5));n.size=function(a){if(!arguments.length)return c;c=a;return n},n.padding=function(a){function c(b){return bc(b,a)}function b(b){var c=a.call(n,b,b.depth);return c==null?bb(b):bc(b,typeof c=="number"?[c,c,c,c]:c)}if(!arguments.length)return d;var f;e=(d=a)==null?bb:(f=typeof a)==="function"?b:f==="number"?(a=[a,a,a,a],c):c;return n},n.round=function(a){if(!arguments.length)return b!=Number;b=a?Math.round:Number;return n},n.sticky=function(a){if(!arguments.length)return f;f=a,g=null;return n},n.ratio=function(a){if(!arguments.length)return h;h=a;return n};return y(n,a)}})()
<ide>\ No newline at end of file
<ide><path>src/layout/cluster.js
<ide> function d3_layout_clusterX(children) {
<ide>
<ide> function d3_layout_clusterLeft(node) {
<ide> var children = node.children;
<del> return children && children.length
<del> ? d3_layout_clusterLeft(children[0]) : node;
<add> return children && children.length ? d3_layout_clusterLeft(children[0]) : node;
<ide> }
<ide>
<ide> function d3_layout_clusterRight(node) {
<del> var children = node.children,
<del> n;
<del> return children && (n = children.length)
<del> ? d3_layout_clusterRight(children[n - 1]) : node;
<add> var children = node.children, n;
<add> return children && (n = children.length) ? d3_layout_clusterRight(children[n - 1]) : node;
<ide> }
<ide><path>src/layout/hierarchy.js
<ide> d3.layout.hierarchy = function() {
<ide> return root;
<ide> };
<ide>
<del> // If the new API is used, enabling inlining.
<del> hierarchy.nodes = function(d) {
<del> d3_layout_hierarchyInline = true;
<del> return (hierarchy.nodes = hierarchy)(d);
<del> };
<del>
<ide> return hierarchy;
<ide> };
<ide>
<ide><path>test/layout/hierarchy-test.js
<ide> var suite = vows.describe("d3.layout.hierarchy");
<ide>
<ide> suite.addBatch({
<ide> "hierarchy": {
<del> topic: d3.layout.hierarchy,
<add> topic: function() {
<add> return d3.layout.treemap(); // hierarchy is abstract, so test a subclass
<add> },
<ide> "doesn't overwrite the value of a node that has an empty children array": function(hierarchy) {
<del> var nodes = hierarchy.nodes({value: 1, children: []});
<del> assert.deepEqual(nodes, [
<del> {children: [], depth: 0, value: 1}
<del> ]);
<del> hierarchy.revalue(nodes[0]);
<del> assert.deepEqual(nodes, [
<del> {children: [], depth: 0, value: 1}
<del> ]);
<add> var nodes = hierarchy.sticky(true).nodes({value: 1, children: []});
<add> assert.equal(nodes[0].value, 1);
<add> hierarchy.nodes(nodes[0]);
<add> assert.equal(nodes[0].value, 1);
<ide> },
<ide> "a valueless node that has an empty children array gets a value of 0": function(hierarchy) {
<del> var nodes = hierarchy.nodes({children: []});
<del> assert.deepEqual(nodes, [
<del> {children: [], depth: 0, value: 0}
<del> ]);
<del> hierarchy.revalue(nodes[0]);
<del> assert.deepEqual(nodes, [
<del> {children: [], depth: 0, value: 0}
<del> ]);
<add> var nodes = hierarchy.sticky(true).nodes({children: []});
<add> assert.equal(nodes[0].value, 0);
<add> hierarchy.nodes(nodes[0]);
<add> assert.equal(nodes[0].value, 0);
<ide> }
<ide> }
<ide> }); | 5 |
Javascript | Javascript | escape sample code in ng a directive | e34519e93bacfa02e220a663aba671403cdbcada | <ide><path>src/ng/directive/a.js
<ide> *
<ide> * The reasoning for this change is to allow easy creation of action links with `ngClick` directive
<ide> * without changing the location or causing page reloads, e.g.:
<del> * <a href="" ng-click="model.$save()">Save</a>
<add> * `<a href="" ng-click="model.$save()">Save</a>`
<ide> */
<ide> var htmlAnchorDirective = valueFn({
<ide> restrict: 'E', | 1 |
PHP | PHP | apply styleci fixes | 41df68e606e946d096bf851068d2b988a1a18717 | <ide><path>src/Illuminate/Console/Scheduling/CallbackEvent.php
<ide> use LogicException;
<ide> use InvalidArgumentException;
<ide> use Illuminate\Contracts\Container\Container;
<del>use Illuminate\Contracts\Cache\Repository as Cache;
<ide>
<ide> class CallbackEvent extends Event
<ide> {
<ide><path>src/Illuminate/Console/Scheduling/OverlappingStrategy.php
<ide> interface OverlappingStrategy
<ide> {
<ide> /**
<del> * prevents overlapping for the given event
<add> * prevents overlapping for the given event.
<ide> *
<ide> * @param Event $event
<ide> * @return void
<ide> */
<ide> public function prevent(Event $event);
<ide>
<ide> /**
<del> * checks if the given event's command is already running
<add> * checks if the given event's command is already running.
<ide> *
<ide> * @param Event $event
<ide> * @return bool
<ide> */
<ide> public function overlaps(Event $event);
<ide>
<ide> /**
<del> * resets the overlapping strategy for the given event
<add> * resets the overlapping strategy for the given event.
<ide> *
<ide> * @param Event $event
<ide> * @return void
<ide><path>src/Illuminate/Console/Scheduling/Schedule.php
<ide> public function __construct()
<ide> {
<ide> $container = Container::getInstance();
<ide>
<del> if (!$container->bound(OverlappingStrategy::class)) {
<add> if (! $container->bound(OverlappingStrategy::class)) {
<ide> $this->overlappingStrategy = $container->make(CacheOverlappingStrategy::class);
<ide> } else {
<ide> $this->overlappingStrategy = $container->make(OverlappingStrategy::class);
<ide><path>tests/Console/ConsoleEventSchedulerTest.php
<ide>
<ide> namespace Illuminate\Tests\Console;
<ide>
<del>use Illuminate\Container\Container;
<ide> use Mockery as m;
<ide> use PHPUnit\Framework\TestCase;
<ide> use Illuminate\Console\Scheduling\Schedule;
<ide><path>tests/Console/Scheduling/CacheOverlappingStrategyTest.php
<ide>
<ide> namespace Illuminate\Tests\Console\Scheduling;
<ide>
<del>use Illuminate\Console\Scheduling\CacheOverlappingStrategy;
<del>use Illuminate\Console\Scheduling\Event;
<del>use PHPUnit\Framework\TestCase;
<ide> use Mockery as m;
<add>use PHPUnit\Framework\TestCase;
<add>use Illuminate\Console\Scheduling\Event;
<add>use Illuminate\Console\Scheduling\CacheOverlappingStrategy;
<ide>
<ide> class CacheOverlappingStrategyTest extends TestCase
<ide> { | 5 |
Text | Text | specify max size for secret in doc | 5f8fcd9d63c055262da82306273034060a3bb306 | <ide><path>docs/reference/commandline/secret_create.md
<ide> Options:
<ide>
<ide> ## Description
<ide>
<del>Creates a secret using standard input or from a file for the secret content. You must run this
<del>command on a manager node.
<add>Creates a secret using standard input or from a file for the secret content. You must run this command on a manager node.
<add>
<add>For detailed information about using secrets, refer to [manage sensitive data with Docker secrets](https://docs.docker.com/engine/swarm/secrets/).
<ide>
<ide> ## Examples
<ide>
<ide><path>docs/reference/commandline/secret_inspect.md
<ide> the given template will be executed for each result.
<ide> Go's [text/template](http://golang.org/pkg/text/template/) package
<ide> describes all the details of the format.
<ide>
<add>For detailed information about using secrets, refer to [manage sensitive data with Docker secrets](https://docs.docker.com/engine/swarm/secrets/).
<add>
<ide> ## Examples
<ide>
<ide> ### Inspect a secret by name or ID
<ide><path>docs/reference/commandline/secret_ls.md
<ide> Options:
<ide>
<ide> Run this command on a manager node to list the secrets in the swarm.
<ide>
<add>For detailed information about using secrets, refer to [manage sensitive data with Docker secrets](https://docs.docker.com/engine/swarm/secrets/).
<add>
<ide> ## Examples
<ide>
<ide> ```bash
<ide><path>docs/reference/commandline/secret_rm.md
<ide> Options:
<ide> Removes the specified secrets from the swarm. This command has to be run
<ide> targeting a manager node.
<ide>
<add>For detailed information about using secrets, refer to [manage sensitive data with Docker secrets](https://docs.docker.com/engine/swarm/secrets/).
<add>
<ide> ## Examples
<ide>
<ide> This example removes a secret: | 4 |
Python | Python | simplify block implementation | a5c6f0dbfff36956d879010177cfea3eb4f5b04f | <ide><path>numpy/core/shape_base.py
<ide> def stack(arrays, axis=0, out=None):
<ide> return _nx.concatenate(expanded_arrays, axis=axis, out=out)
<ide>
<ide>
<del>class _Recurser(object):
<del> """
<del> Utility class for recursing over nested iterables
<del> """
<del> def __init__(self, recurse_if):
<del> self.recurse_if = recurse_if
<del>
<del> def map_reduce(self, x, f_map=lambda x, **kwargs: x,
<del> f_reduce=lambda x, **kwargs: x,
<del> f_kwargs=lambda **kwargs: kwargs,
<del> **kwargs):
<del> """
<del> Iterate over the nested list, applying:
<del> * ``f_map`` (T -> U) to items
<del> * ``f_reduce`` (Iterable[U] -> U) to mapped items
<del>
<del> For instance, ``map_reduce([[1, 2], 3, 4])`` is::
<del>
<del> f_reduce([
<del> f_reduce([
<del> f_map(1),
<del> f_map(2)
<del> ]),
<del> f_map(3),
<del> f_map(4)
<del> ]])
<del>
<del>
<del> State can be passed down through the calls with `f_kwargs`,
<del> to iterables of mapped items. When kwargs are passed, as in
<del> ``map_reduce([[1, 2], 3, 4], **kw)``, this becomes::
<del>
<del> kw1 = f_kwargs(**kw)
<del> kw2 = f_kwargs(**kw1)
<del> f_reduce([
<del> f_reduce([
<del> f_map(1), **kw2)
<del> f_map(2, **kw2)
<del> ], **kw1),
<del> f_map(3, **kw1),
<del> f_map(4, **kw1)
<del> ]], **kw)
<del> """
<del> def f(x, **kwargs):
<del> if not self.recurse_if(x):
<del> return f_map(x, **kwargs)
<del> else:
<del> next_kwargs = f_kwargs(**kwargs)
<del> return f_reduce((
<del> f(xi, **next_kwargs)
<del> for xi in x
<del> ), **kwargs)
<del> return f(x, **kwargs)
<del>
<del> def walk(self, x, index=()):
<del> """
<del> Iterate over x, yielding (index, value, entering), where
<del>
<del> * ``index``: a tuple of indices up to this point
<del> * ``value``: equal to ``x[index[0]][...][index[-1]]``. On the first iteration, is
<del> ``x`` itself
<del> * ``entering``: bool. The result of ``recurse_if(value)``
<del> """
<del> do_recurse = self.recurse_if(x)
<del> yield index, x, do_recurse
<del>
<del> if not do_recurse:
<del> return
<del> for i, xi in enumerate(x):
<del> # yield from ...
<del> for v in self.walk(xi, index + (i,)):
<del> yield v
<add>def _check_block_depths_match(arrays, index=[]):
<add> def format_index(index):
<add> idx_str = ''.join('[{}]'.format(i) for i in index if i is not None)
<add> return 'arrays' + idx_str
<add> if isinstance(arrays, tuple):
<add> raise TypeError(
<add> '{} is a tuple. '
<add> 'Only lists can be used to arrange blocks, and np.block does '
<add> 'not allow implicit conversion from tuple to ndarray.'.format(
<add> format_index(index)
<add> )
<add> )
<add> elif isinstance(arrays, list) and len(arrays) > 0:
<add> indexes = [_check_block_depths_match(arr, index + [i])
<add> for i, arr in enumerate(arrays)]
<add>
<add> first_index = indexes[0]
<add> for i, index in enumerate(indexes):
<add> if len(index) != len(first_index):
<add> raise ValueError(
<add> "List depths are mismatched. First element was at depth "
<add> "{}, but there is an element at depth {} ({})".format(
<add> len(first_index),
<add> len(index),
<add> format_index(index)
<add> )
<add> )
<add> return first_index
<add> elif isinstance(arrays, list) and len(arrays) == 0:
<add> return index + [None]
<add> else:
<add> # We've 'bottomed out'
<add> return index
<add>
<add>
<add>def _block(arrays, depth=0):
<add> if isinstance(arrays, list):
<add> if len(arrays) == 0:
<add> raise ValueError('Lists cannot be empty')
<add> arrs, list_ndims = zip(*(_block(arr, depth+1) for arr in arrays))
<add> list_ndim = list_ndims[0]
<add> arr_ndim = max(arr.ndim for arr in arrs)
<add> ndim = max(list_ndim, arr_ndim)
<add> arrs = tuple(map(lambda a: _nx.array(a, ndmin=ndim), arrs))
<add> return _nx.concatenate(arrs, axis=depth+ndim-list_ndim), list_ndim
<add> else:
<add> # We've 'bottomed out'
<add> return _nx.array(arrays, ndmin=depth), depth
<ide>
<ide>
<ide> def block(arrays):
<ide> def block(arrays):
<ide>
<ide>
<ide> """
<del> def atleast_nd(x, ndim):
<del> x = asanyarray(x)
<del> diff = max(ndim - x.ndim, 0)
<del> return x[(None,)*diff + (Ellipsis,)]
<del>
<del> def format_index(index):
<del> return 'arrays' + ''.join('[{}]'.format(i) for i in index)
<del>
<del> rec = _Recurser(recurse_if=lambda x: type(x) is list)
<del>
<del> # ensure that the lists are all matched in depth
<del> list_ndim = None
<del> any_empty = False
<del> for index, value, entering in rec.walk(arrays):
<del> if type(value) is tuple:
<del> # not strictly necessary, but saves us from:
<del> # - more than one way to do things - no point treating tuples like
<del> # lists
<del> # - horribly confusing behaviour that results when tuples are
<del> # treated like ndarray
<del> raise TypeError(
<del> '{} is a tuple. '
<del> 'Only lists can be used to arrange blocks, and np.block does '
<del> 'not allow implicit conversion from tuple to ndarray.'.format(
<del> format_index(index)
<del> )
<del> )
<del> if not entering:
<del> curr_depth = len(index)
<del> elif len(value) == 0:
<del> curr_depth = len(index) + 1
<del> any_empty = True
<del> else:
<del> continue
<del>
<del> if list_ndim is not None and list_ndim != curr_depth:
<del> raise ValueError(
<del> "List depths are mismatched. First element was at depth {}, "
<del> "but there is an element at depth {} ({})".format(
<del> list_ndim,
<del> curr_depth,
<del> format_index(index)
<del> )
<del> )
<del> list_ndim = curr_depth
<del>
<del> # do this here so we catch depth mismatches first
<del> if any_empty:
<del> raise ValueError('Lists cannot be empty')
<del>
<del> # convert all the arrays to ndarrays
<del> arrays = rec.map_reduce(arrays,
<del> f_map=asanyarray,
<del> f_reduce=list
<del> )
<del>
<del> # determine the maximum dimension of the elements
<del> elem_ndim = rec.map_reduce(arrays,
<del> f_map=lambda xi: xi.ndim,
<del> f_reduce=max
<del> )
<del> ndim = max(list_ndim, elem_ndim)
<del>
<del> # first axis to concatenate along
<del> first_axis = ndim - list_ndim
<del>
<del> # Make all the elements the same dimension
<del> arrays = rec.map_reduce(arrays,
<del> f_map=lambda xi: atleast_nd(xi, ndim),
<del> f_reduce=list
<del> )
<del>
<del> # concatenate innermost lists on the right, outermost on the left
<del> return rec.map_reduce(arrays,
<del> f_reduce=lambda xs, axis: _nx.concatenate(list(xs), axis=axis),
<del> f_kwargs=lambda axis: dict(axis=axis+1),
<del> axis=first_axis
<del> )
<add> _check_block_depths_match(arrays)
<add> return _block(arrays)[0] | 1 |
PHP | PHP | add method for easier error bag checking | 4287ebc76025cd31e0ba6730481a95aeb471e305 | <ide><path>src/Illuminate/Foundation/Testing/TestResponse.php
<ide> public function assertSessionHasErrors($keys = [], $format = null, $errorBag = '
<ide> return $this;
<ide> }
<ide>
<add> /**
<add> * Assert that the session has the given errors.
<add> *
<add> * @param string $errorBag
<add> * @param string|array $keys
<add> * @param mixed $format
<add> * @return $this
<add> */
<add> public function assertSessionHasErrorsIn($errorBag, $keys = [], $format = null)
<add> {
<add> return $this->assertSessionHasErrors($keys, $format, $errorBag);
<add> }
<add>
<ide> /**
<ide> * Assert that the session does not have a given key.
<ide> * | 1 |
Java | Java | clarify semantics of classpathresource.getpath() | 1052f48eb69c51a63c06864856abd4d9c71d2348 | <ide><path>spring-core/src/main/java/org/springframework/core/io/ClassPathResource.java
<ide> /*
<del> * Copyright 2002-2021 the original author or authors.
<add> * Copyright 2002-2022 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> *
<ide> * <p>Supports resolution as {@code java.io.File} if the class path
<ide> * resource resides in the file system, but not for resources in a JAR.
<del> * Always supports resolution as URL.
<add> * Always supports resolution as {@code java.net.URL}.
<ide> *
<ide> * @author Juergen Hoeller
<ide> * @author Sam Brannen
<ide> * @since 28.12.2003
<ide> * @see ClassLoader#getResourceAsStream(String)
<add> * @see ClassLoader#getResource(String)
<ide> * @see Class#getResourceAsStream(String)
<add> * @see Class#getResource(String)
<ide> */
<ide> public class ClassPathResource extends AbstractFileResolvingResource {
<ide>
<ide> public class ClassPathResource extends AbstractFileResolvingResource {
<ide>
<ide> /**
<ide> * Create a new {@code ClassPathResource} for {@code ClassLoader} usage.
<del> * A leading slash will be removed, as the ClassLoader resource access
<del> * methods will not accept it.
<del> * <p>The thread context class loader will be used for
<del> * loading the resource.
<add> * <p>A leading slash will be removed, as the {@code ClassLoader} resource
<add> * access methods will not accept it.
<add> * <p>The default class loader will be used for loading the resource.
<ide> * @param path the absolute path within the class path
<del> * @see java.lang.ClassLoader#getResourceAsStream(String)
<del> * @see org.springframework.util.ClassUtils#getDefaultClassLoader()
<add> * @see ClassUtils#getDefaultClassLoader()
<ide> */
<ide> public ClassPathResource(String path) {
<ide> this(path, (ClassLoader) null);
<ide> }
<ide>
<ide> /**
<ide> * Create a new {@code ClassPathResource} for {@code ClassLoader} usage.
<del> * A leading slash will be removed, as the ClassLoader resource access
<del> * methods will not accept it.
<del> * @param path the absolute path within the classpath
<del> * @param classLoader the class loader to load the resource with,
<del> * or {@code null} for the thread context class loader
<del> * @see ClassLoader#getResourceAsStream(String)
<add> * <p>A leading slash will be removed, as the {@code ClassLoader} resource
<add> * access methods will not accept it.
<add> * <p>If the supplied {@code ClassLoader} is {@code null}, the default class
<add> * loader will be used for loading the resource.
<add> * @param path the absolute path within the class path
<add> * @param classLoader the class loader to load the resource with
<add> * @see ClassUtils#getDefaultClassLoader()
<ide> */
<ide> public ClassPathResource(String path, @Nullable ClassLoader classLoader) {
<ide> Assert.notNull(path, "Path must not be null");
<ide> public ClassPathResource(String path, @Nullable ClassLoader classLoader) {
<ide>
<ide> /**
<ide> * Create a new {@code ClassPathResource} for {@code Class} usage.
<del> * The path can be relative to the given class, or absolute within
<del> * the classpath via a leading slash.
<add> * <p>The path can be relative to the given class, or absolute within
<add> * the class path via a leading slash.
<add> * <p>If the supplied {@code Class} is {@code null}, the default class
<add> * loader will be used for loading the resource.
<ide> * @param path relative or absolute path within the class path
<ide> * @param clazz the class to load resources with
<del> * @see java.lang.Class#getResourceAsStream
<add> * @see ClassUtils#getDefaultClassLoader()
<ide> */
<ide> public ClassPathResource(String path, @Nullable Class<?> clazz) {
<ide> Assert.notNull(path, "Path must not be null");
<ide> public ClassPathResource(String path, @Nullable Class<?> clazz) {
<ide>
<ide>
<ide> /**
<del> * Return the path for this resource (as resource path within the class path).
<add> * Return the path for this resource.
<add> * <p>If this {@code ClassPathResource} was created using
<add> * {@link ClassPathResource#ClassPathResource(String) ClassPathResource(String)} or
<add> * {@link ClassPathResource#ClassPathResource(String, ClassLoader)
<add> * ClassPathResource(String, ClassLoader)}, the returned path is a
<add> * {@linkplain StringUtils#cleanPath(String) cleaned} version of the
<add> * <em>absolute path</em> supplied to the constructor.
<add> * <p>If this {@code ClassPathResource} was created using
<add> * {@link ClassPathResource#ClassPathResource(String, Class)}, the
<add> * returned path is a {@linkplain StringUtils#cleanPath(String) cleaned}
<add> * version of the <em>relative path</em> supplied to the constructor.
<ide> */
<ide> public final String getPath() {
<ide> return this.path;
<ide> }
<ide>
<ide> /**
<del> * Return the ClassLoader that this resource will be obtained from.
<add> * Return the {@link ClassLoader} that this resource will be obtained from.
<ide> */
<ide> @Nullable
<ide> public final ClassLoader getClassLoader() {
<ide> public final ClassLoader getClassLoader() {
<ide>
<ide> /**
<ide> * This implementation checks for the resolution of a resource URL.
<del> * @see java.lang.ClassLoader#getResource(String)
<del> * @see java.lang.Class#getResource(String)
<add> * @see ClassLoader#getResource(String)
<add> * @see Class#getResource(String)
<ide> */
<ide> @Override
<ide> public boolean exists() {
<ide> public boolean exists() {
<ide> /**
<ide> * This implementation checks for the resolution of a resource URL upfront,
<ide> * then proceeding with {@link AbstractFileResolvingResource}'s length check.
<del> * @see java.lang.ClassLoader#getResource(String)
<del> * @see java.lang.Class#getResource(String)
<add> * @see ClassLoader#getResource(String)
<add> * @see Class#getResource(String)
<ide> */
<ide> @Override
<ide> public boolean isReadable() {
<ide> else if (this.classLoader != null) {
<ide>
<ide> /**
<ide> * This implementation opens an InputStream for the given class path resource.
<del> * @see java.lang.ClassLoader#getResourceAsStream(String)
<del> * @see java.lang.Class#getResourceAsStream(String)
<add> * @see ClassLoader#getResourceAsStream(String)
<add> * @see Class#getResourceAsStream(String)
<ide> */
<ide> @Override
<ide> public InputStream getInputStream() throws IOException {
<ide> else if (this.classLoader != null) {
<ide> /**
<ide> * This implementation returns a URL for the underlying class path resource,
<ide> * if available.
<del> * @see java.lang.ClassLoader#getResource(String)
<del> * @see java.lang.Class#getResource(String)
<add> * @see ClassLoader#getResource(String)
<add> * @see Class#getResource(String)
<ide> */
<ide> @Override
<ide> public URL getURL() throws IOException {
<ide> public URL getURL() throws IOException {
<ide> }
<ide>
<ide> /**
<del> * This implementation creates a ClassPathResource, applying the given path
<del> * relative to the path of the underlying resource of this descriptor.
<del> * @see org.springframework.util.StringUtils#applyRelativePath(String, String)
<add> * This implementation creates a {@code ClassPathResource}, applying the given
<add> * path relative to the {@link #getPath() path} of the underlying resource of
<add> * this descriptor.
<add> * @see StringUtils#applyRelativePath(String, String)
<ide> */
<ide> @Override
<ide> public Resource createRelative(String relativePath) {
<ide> public Resource createRelative(String relativePath) {
<ide> /**
<ide> * This implementation returns the name of the file that this class path
<ide> * resource refers to.
<del> * @see org.springframework.util.StringUtils#getFilename(String)
<add> * @see StringUtils#getFilename(String)
<ide> */
<ide> @Override
<ide> @Nullable
<ide> public String getFilename() {
<ide> }
<ide>
<ide> /**
<del> * This implementation returns a description that includes the class path location.
<add> * This implementation returns a description that includes the absolute
<add> * class path location.
<ide> */
<ide> @Override
<ide> public String getDescription() { | 1 |
Javascript | Javascript | exclude docs and samples too | 2acf1467fc5bf85c8053820c3b83646946d2255a | <ide><path>script/lib/include-path-in-packaged-app.js
<ide> const EXCLUDE_REGEXPS_SOURCES = [
<ide>
<ide> // Ignore test and example folders
<ide> 'node_modules' + escapeRegExp(path.sep) + '.*' + escapeRegExp(path.sep) + '_*te?sts?_*' + escapeRegExp(path.sep),
<del> 'node_modules' + escapeRegExp(path.sep) + '.*' + escapeRegExp(path.sep) + 'examples?' + escapeRegExp(path.sep)
<add> 'node_modules' + escapeRegExp(path.sep) + '.*' + escapeRegExp(path.sep) + 'docs' + escapeRegExp(path.sep),
<add> 'node_modules' + escapeRegExp(path.sep) + '.*' + escapeRegExp(path.sep) + 'examples?' + escapeRegExp(path.sep),
<add> 'node_modules' + escapeRegExp(path.sep) + '.*' + escapeRegExp(path.sep) + 'samples?' + escapeRegExp(path.sep),
<ide> ]
<ide>
<ide> // Ignore spec directories in all bundled packages | 1 |
Ruby | Ruby | treat combined durations as a single unit | 32f215c3014e580e512db5e8772d0deadf3d6497 | <ide><path>activesupport/lib/active_support/duration.rb
<ide> class Duration
<ide> autoload :ISO8601Serializer, "active_support/duration/iso8601_serializer"
<ide>
<ide> def initialize(value, parts) #:nodoc:
<del> @value, @parts = value, parts
<add> @value, @parts = value, parts.to_h
<add> @parts.default = 0
<ide> end
<ide>
<ide> # Adds another Duration or a Numeric to this Duration. Numeric values
<ide> # are treated as seconds.
<ide> def +(other)
<ide> if Duration === other
<del> Duration.new(value + other.value, @parts + other.parts)
<add> parts = @parts.dup
<add> other.parts.each do |(key, value)|
<add> parts[key] += value
<add> end
<add> Duration.new(value + other.value, parts)
<ide> else
<del> Duration.new(value + other, @parts + [[:seconds, other]])
<add> seconds = @parts[:seconds] + other
<add> Duration.new(value + other, @parts.merge(seconds: seconds))
<ide> end
<ide> end
<ide>
<ide><path>activesupport/test/core_ext/duration_test.rb
<ide> def test_iso8601_parsing_across_autumn_dst_boundary
<ide> end
<ide> end
<ide>
<add> def test_adding_durations_do_not_hold_prior_states
<add> time = Time.parse("Nov 29, 2016")
<add> # If the implementation adds and subtracts 3 months, the
<add> # resulting date would have been in February so the day will
<add> # change to the 29th.
<add> d1 = 3.months - 3.months
<add> d2 = 2.months - 2.months
<add>
<add> assert_equal time + d1, time + d2
<add> end
<add>
<ide> private
<ide> def eastern_time_zone
<ide> if Gem.win_platform? | 2 |
PHP | PHP | add a test demonstrating current behavior | f9de012ca883b2051e4a7bd1013170a9452c46bc | <ide><path>tests/TestCase/ORM/AssociationTest.php
<ide> */
<ide> namespace Cake\Test\TestCase\ORM;
<ide>
<add>use Cake\Core\Plugin;
<ide> use Cake\ORM\Association;
<ide> use Cake\ORM\Table;
<ide> use Cake\ORM\TableRegistry;
<ide> public function testTarget()
<ide> $this->assertSame($other, $this->association->target());
<ide> }
<ide>
<add> /**
<add> * Tests that target() returns the correct Table object for plugins
<add> *
<add> * @return void
<add> */
<add> public function testTargetPlugin()
<add> {
<add> Plugin::load('TestPlugin');
<add>
<add> $config = [
<add> 'className' => 'TestPlugin.Comments',
<add> 'foreignKey' => 'a_key',
<add> 'conditions' => ['field' => 'value'],
<add> 'dependent' => true,
<add> 'sourceTable' => $this->source,
<add> 'joinType' => 'INNER'
<add> ];
<add> $this->association = $this->getMock('\Cake\ORM\Association',
<add> [
<add> '_options', 'attachTo', '_joinCondition', 'cascadeDelete', 'isOwningSide',
<add> 'saveAssociated', 'eagerLoader', 'type'
<add> ],
<add> ['ThisAssociationName', $config]
<add> );
<add>
<add> $table = $this->association->target();
<add> $this->assertInstanceOf('TestPlugin\Model\Table\CommentsTable', $table);
<add>
<add> $this->assertFalse(TableRegistry::exists('TestPlugin.Comments'));
<add> $this->assertFalse(TableRegistry::exists('Comments'));
<add> $this->assertTrue(TableRegistry::exists('ThisAssociationName'));
<add>
<add> $plugin = TableRegistry::get('ThisAssociationName');
<add> $this->assertSame($table, $plugin, 'Should be the same TestPlugin.Comments object');
<add> }
<add>
<ide> /**
<ide> * Tests that source() returns the correct Table object
<ide> * | 1 |
Text | Text | fix the typo url and info | f6bd62a8d735d4748dc4e5e4235e04284a7f5af6 | <ide><path>experimental/vlan-networks.md
<ide> Macvlan Bridge mode has a unique MAC address per container used to track MAC to
<ide>
<ide> - In Macvlan and Ipvlan L2 mode, containers on separate networks cannot reach one another without an external process routing between the two networks/subnets. This also applies to multiple subnets within the same `docker network`. See Ipvlan L3 mode for inter-subnet communications without a router.
<ide>
<del>In the following example, `eth0` on the docker host has an IP on the `192.168.1.0/24` network and a default gateway of `192.168.1.1`. The gateway is an external router with an address of `192.168.1.1`. An IP address is not required on the Docker host interface `eth0` in `bridge` mode, it merely needs to be on the proper upstream network to get forwarded by a network switch or network router.
<add>In the following example, `eth0` on the docker host has an IP on the `172.16.86.0/24` network and a default gateway of `172.16.86.1`. The gateway is an external router with an address of `172.16.86.1`. An IP address is not required on the Docker host interface `eth0` in `bridge` mode, it merely needs to be on the proper upstream network to get forwarded by a network switch or network router.
<ide>
<del>
<add>
<ide>
<ide>
<ide> **Note** For Macvlan bridge mode and Ipvlan L2 mode the subnet values need to match the NIC's interface of the Docker host. For example, Use the same subnet and gateway of the Docker host ethernet interface that is specified by the `-o parent=` option.
<ide> For more on Docker networking commands see [Working with Docker network commands
<ide>
<ide> The ipvlan `L2` mode example is virtually identical to the macvlan `bridge` mode example. The driver is specified with `-d driver_name` option. In this case `-d ipvlan`
<ide>
<del>
<add>
<ide>
<ide> The parent interface in the next example `-o parent=eth0` is configured as followed:
<ide> | 1 |
PHP | PHP | add getpath() to plugins | 4efb602bd2e5876f7d11e2b8c12bd7db81a8e912 | <ide><path>src/Core/PluginApp.php
<ide> */
<ide> namespace Cake\Core;
<ide>
<add>use ReflectionClass;
<add>
<ide> /**
<ide> * Base Plugin Class
<ide> *
<ide> class PluginApp implements PluginInterface
<ide> */
<ide> protected $consoleEnabled = true;
<ide>
<add> /**
<add> * The path to this plugin.
<add> *
<add> * @var string
<add> */
<add> protected $path;
<add>
<ide> /**
<ide> * Constructor
<ide> *
<ide> public function __construct(array $options = [])
<ide> $this->{"{$key}Enabled"} = (bool)$options[$key];
<ide> }
<ide> }
<add> if (isset($options['path'])) {
<add> $this->path = $options['path'];
<add> }
<ide>
<ide> $this->initialize();
<ide> }
<ide> public function getName()
<ide> return implode('/', $parts);
<ide> }
<ide>
<add> /**
<add> * {@inheritDoc}
<add> */
<add> public function getPath()
<add> {
<add> if ($this->path) {
<add> return $this->path;
<add> }
<add> $reflection = new ReflectionClass($this);
<add> $path = dirname($reflection->getFileName());
<add>
<add> // Trim off src
<add> if (substr($path, -3) === 'src') {
<add> $path = substr($path, 0, -3);
<add> }
<add>
<add> return rtrim($path, DIRECTORY_SEPARATOR) . DIRECTORY_SEPARATOR;
<add> }
<add>
<ide> /**
<ide> * {@inheritdoc}
<ide> */
<ide><path>src/Core/PluginInterface.php
<ide> interface PluginInterface
<ide> */
<ide> public function getName();
<ide>
<add> /**
<add> * Get the filesystem path to this plugin
<add> *
<add> * @return void
<add> */
<add> public function getPath();
<add>
<ide> /**
<ide> * Load all the application configuration and bootstrap logic.
<ide> *
<ide><path>tests/TestCase/Core/PluginAppTest.php
<ide> public function testConstructorArguments()
<ide> $this->assertFalse($plugin->isConsoleEnabled());
<ide> $this->assertFalse($plugin->isMiddlewareEnabled());
<ide> }
<add>
<add> public function testGetPathBaseClass()
<add> {
<add> $plugin = new PluginApp();
<add>
<add> $expected = CAKE . 'Core' . DS;
<add> $this->assertSame($expected, $plugin->getPath());
<add> }
<add>
<add> public function testGetPathOptionValue()
<add> {
<add> $plugin = new PluginApp(['path' => '/some/path']);
<add> $this->assertSame('/some/path', $plugin->getPath());
<add> }
<add>
<add> public function testGetPathSubclass()
<add> {
<add> $plugin = new TestPlugin();
<add> $this->assertSame(TEST_APP . 'Plugin/TestPlugin' . DS, $plugin->getPath());
<add> }
<ide> } | 3 |
PHP | PHP | add getbroker() and getguard() | 3f1c412034d6d3ef233afcb514f65c05a4a2e5b6 | <ide><path>src/Illuminate/Foundation/Auth/ResetsPasswords.php
<ide> trait ResetsPasswords
<ide> {
<ide> use RedirectsUsers;
<ide>
<add> /**
<add> * Get the broker to be used during resetting the password
<add> * @return string|null
<add> */
<add> public function getBroker()
<add> {
<add> return property_exists($this, 'broker') ? $this->broker : null;
<add> }
<add>
<add> /**
<add> * Get the guard to be used during authentication.
<add> *
<add> * @return string|null
<add> */
<add> protected function getGuard()
<add> {
<add> return property_exists($this, 'guard') ? $this->guard : null;
<add> }
<add>
<ide> /**
<ide> * Display the form to request a password reset link.
<ide> *
<ide> public function sendResetLinkEmail(Request $request)
<ide> {
<ide> $this->validate($request, ['email' => 'required|email']);
<ide>
<del> $response = Password::sendResetLink($request->only('email'), function (Message $message) {
<add> $response = Password::broker($this->getBroker())->sendResetLink($request->only('email'), function (Message $message) {
<ide> $message->subject($this->getEmailSubject());
<ide> });
<ide>
<ide> public function reset(Request $request)
<ide> 'email', 'password', 'password_confirmation', 'token'
<ide> );
<ide>
<del> $response = Password::reset($credentials, function ($user, $password) {
<add> $response = Password::broker($this->getBroker())->reset($credentials, function ($user, $password) {
<ide> $this->resetPassword($user, $password);
<ide> });
<ide>
<ide> protected function resetPassword($user, $password)
<ide>
<ide> $user->save();
<ide>
<del> Auth::login($user);
<add> Auth::guard($this->getGuard())->login($user);
<ide> }
<ide>
<ide> /** | 1 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.