content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
PHP
PHP
make code dryer
fe545c579e70added74073b7b51a05f8f56d13fa
<ide><path>src/Core/Configure/Engine/IniConfig.php <ide> public function __construct($path = null, $section = null) <ide> */ <ide> public function read($key) <ide> { <del> if (strpos($key, '..') !== false) { <del> throw new Exception('Cannot load configuration files with ../ in them.'); <del> } <del> <del> $file = $this->_getFilePath($key); <del> if (!is_file($file)) { <del> throw new Exception(sprintf('Could not load configuration file: %s', $file)); <del> } <add> $file = $this->_getFilePath($key, true); <ide> <ide> $contents = parse_ini_file($file, true); <ide> if (!empty($this->_section) && isset($contents[$this->_section])) { <ide><path>src/Core/Configure/Engine/PhpConfig.php <ide> public function __construct($path = null) <ide> */ <ide> public function read($key) <ide> { <del> if (strpos($key, '..') !== false) { <del> throw new Exception('Cannot load configuration files with ../ in them.'); <del> } <del> <del> $file = $this->_getFilePath($key); <del> if (!is_file($file)) { <del> throw new Exception(sprintf('Could not load configuration file: %s', $file)); <del> } <add> $file = $this->_getFilePath($key, true); <ide> <ide> include $file; <ide> if (!isset($config)) { <ide><path>src/Core/Configure/FileConfigTrait.php <ide> */ <ide> namespace Cake\Core\Configure; <ide> <add>use Cake\Core\Exception\Exception; <ide> use Cake\Core\Plugin; <ide> <ide> /** <ide> trait FileConfigTrait <ide> * <ide> * @param string $key The identifier to write to. If the key has a . it will be treated <ide> * as a plugin prefix. <del> * @param string $ext File extension. <add> * @param bool $checkExists Whether to check if file exists. Defaults to false. <ide> * @return string Full file path <add> * @throws \Cake\Core\Exception\Exception When files don't exist or when <add> * files contain '..' as this could lead to abusive reads. <ide> */ <del> protected function _getFilePath($key) <add> protected function _getFilePath($key, $checkExists = false) <ide> { <add> if (strpos($key, '..') !== false) { <add> throw new Exception('Cannot load/dump configuration files with ../ in them.'); <add> } <add> <ide> list($plugin, $key) = pluginSplit($key); <ide> <ide> if ($plugin) { <ide> protected function _getFilePath($key) <ide> $file = $this->_path . $key; <ide> } <ide> <del> return $file . $this->_extension; <add> $file .= $this->_extension; <add> <add> if ($checkExists && !is_file($file)) { <add> throw new Exception(sprintf('Could not load configuration file: %s', $file)); <add> } <add> <add> return $file; <ide> } <ide> }
3
Text
Text
update readme to clarify guide status
927010fa10c815132f1da6b38d046fcf9b9751f2
<ide><path>README.md <ide> Our community also has: <ide> - A [forum](https://www.freecodecamp.org/forum) where you can usually get programming help or project feedback within hours. <ide> - A [YouTube channel](https://youtube.com/freecodecamp) with free courses on Python, SQL, Android, and a wide variety of other technologies. <ide> - A [podcast](https://podcast.freecodecamp.org/) with technology insights and inspiring stories from developers. <del>- A comprehensive [guide to thousands of programming topics](https://guide.freecodecamp.org/) <ide> - A [Developer News](https://www.freecodecamp.org/news) publication, a free, open source, no-ads place to cross-post your blog articles. <ide> <ide> > ### [Join our community here](https://www.freecodecamp.org/signin).
1
Python
Python
fix four bugs in stackdrivertaskhandler
833e3383230e1f6f73f8022ddf439d3d531eff01
<ide><path>airflow/providers/google/cloud/log/stackdriver_task_handler.py <ide> def __init__( <ide> self.resource: Resource = resource <ide> self.labels: Optional[Dict[str, str]] = labels <ide> self.task_instance_labels: Optional[Dict[str, str]] = {} <add> self.task_instance_hostname = 'default-hostname' <ide> <ide> @cached_property <ide> def _client(self) -> gcp_logging.Client: <ide> def set_context(self, task_instance: TaskInstance) -> None: <ide> :type task_instance: :class:`airflow.models.TaskInstance` <ide> """ <ide> self.task_instance_labels = self._task_instance_to_labels(task_instance) <add> self.task_instance_hostname = task_instance.hostname <ide> <ide> def read( <ide> self, task_instance: TaskInstance, try_number: Optional[int] = None, metadata: Optional[Dict] = None <del> ) -> Tuple[List[str], List[Dict]]: <add> ) -> Tuple[List[Tuple[Tuple[str, str]]], List[Dict[str, str]]]: <ide> """ <ide> Read logs of given task instance from Stackdriver logging. <ide> <ide> def read( <ide> :type try_number: Optional[int] <ide> :param metadata: log metadata. It is used for steaming log reading and auto-tailing. <ide> :type metadata: Dict <del> :return: a tuple of list of logs and list of metadata <del> :rtype: Tuple[List[str], List[Dict]] <add> :return: a tuple of ( <add> list of (one element tuple with two element tuple - hostname and logs) <add> and list of metadata) <add> :rtype: Tuple[List[Tuple[Tuple[str, str]]], List[Dict[str, str]]] <ide> """ <ide> if try_number is not None and try_number < 1: <del> logs = [f"Error fetching the logs. Try number {try_number} is invalid."] <del> return logs, [{"end_of_log": "true"}] <add> logs = f"Error fetching the logs. Try number {try_number} is invalid." <add> return [((self.task_instance_hostname, logs),)], [{"end_of_log": "true"}] <ide> <ide> if not metadata: <ide> metadata = {} <ide> def read( <ide> if next_page_token: <ide> new_metadata['next_page_token'] = next_page_token <ide> <del> return [messages], [new_metadata] <add> return [((self.task_instance_hostname, messages),)], [new_metadata] <ide> <ide> def _prepare_log_filter(self, ti_labels: Dict[str, str]) -> str: <ide> """ <ide> def _read_logs( <ide> log_filter=log_filter, page_token=next_page_token <ide> ) <ide> messages.append(new_messages) <add> if not messages: <add> break <ide> <ide> end_of_log = True <ide> next_page_token = None <ide> def _read_single_logs_page(self, log_filter: str, page_token: Optional[str] = No <ide> :return: Downloaded logs and next page token <ide> :rtype: Tuple[str, str] <ide> """ <del> entries = self._client.list_entries(filter_=log_filter, page_token=page_token) <add> entries = self._client.list_entries( <add> filter_=log_filter, page_token=page_token, order_by='timestamp asc', page_size=1000 <add> ) <ide> page = next(entries.pages) <ide> next_page_token = entries.next_page_token <ide> messages = [] <ide> def get_external_log_url(self, task_instance: TaskInstance, try_number: int) -> <ide> <ide> url = f"{self.LOG_VIEWER_BASE_URL}?{urlencode(url_query_string)}" <ide> return url <add> <add> def close(self) -> None: <add> self._transport.flush() <ide><path>airflow/utils/log/log_reader.py <ide> # under the License. <ide> <ide> import logging <del>from typing import Any, Dict, Iterator, List, Optional, Tuple <add>from typing import Dict, Iterator, List, Optional, Tuple <ide> <ide> from cached_property import cached_property <ide> <ide> class TaskLogReader: <ide> <ide> def read_log_chunks( <ide> self, ti: TaskInstance, try_number: Optional[int], metadata <del> ) -> Tuple[List[str], Dict[str, Any]]: <add> ) -> Tuple[List[Tuple[Tuple[str, str]]], Dict[str, str]]: <ide> """ <ide> Reads chunks of Task Instance logs. <ide> <ide> def read_log_chunks( <ide> :type try_number: Optional[int] <ide> :param metadata: A dictionary containing information about how to read the task log <ide> :type metadata: dict <del> :rtype: Tuple[List[str], Dict[str, Any]] <add> :rtype: Tuple[List[Tuple[Tuple[str, str]]], Dict[str, str]] <ide> <ide> The following is an example of how to use this method to read log: <ide> <ide><path>tests/cli/commands/test_info_command.py <ide> import contextlib <ide> import importlib <ide> import io <add>import logging <ide> import os <ide> import unittest <ide> from unittest import mock <ide> def test_should_read_logging_configuration(self): <ide> assert "stackdriver" in text <ide> <ide> def tearDown(self) -> None: <add> for handler_ref in logging._handlerList[:]: <add> logging._removeHandlerRef(handler_ref) <ide> importlib.reload(airflow_local_settings) <ide> configure_logging() <ide> <ide><path>tests/providers/google/cloud/log/test_stackdriver_task_handler.py <ide> def _create_list_response(messages, token): <ide> return mock.MagicMock(pages=(n for n in [page]), next_page_token=token) <ide> <ide> <add>def _remove_stackdriver_handlers(): <add> for handler_ref in reversed(logging._handlerList[:]): <add> handler = handler_ref() <add> if not isinstance(handler, StackdriverTaskHandler): <add> continue <add> logging._removeHandlerRef(handler_ref) <add> del handler <add> <add> <ide> class TestStackdriverLoggingHandlerStandalone(unittest.TestCase): <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client') <ide> def test_should_pass_message_to_client(self, mock_client, mock_get_creds_and_project_id): <add> self.addCleanup(_remove_stackdriver_handlers) <add> <ide> mock_get_creds_and_project_id.return_value = ('creds', 'project_id') <ide> <ide> transport_type = mock.MagicMock() <ide> def setUp(self) -> None: <ide> self.ti.try_number = 1 <ide> self.ti.state = State.RUNNING <ide> self.addCleanup(self.dag.clear) <add> self.addCleanup(_remove_stackdriver_handlers) <ide> <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client') <ide> def test_should_read_logs_for_all_try(self, mock_client, mock_get_creds_and_proj <ide> <ide> logs, metadata = self.stackdriver_task_handler.read(self.ti) <ide> mock_client.return_value.list_entries.assert_called_once_with( <del> filter_='resource.type="global"\n' <del> 'logName="projects/asf-project/logs/airflow"\n' <del> 'labels.task_id="task_for_testing_file_log_handler"\n' <del> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <del> 'labels.execution_date="2016-01-01T00:00:00+00:00"', <add> filter_=( <add> 'resource.type="global"\n' <add> 'logName="projects/asf-project/logs/airflow"\n' <add> 'labels.task_id="task_for_testing_file_log_handler"\n' <add> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <add> 'labels.execution_date="2016-01-01T00:00:00+00:00"' <add> ), <add> order_by='timestamp asc', <add> page_size=1000, <ide> page_token=None, <ide> ) <del> assert ['MSG1\nMSG2'] == logs <add> assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs <ide> assert [{'end_of_log': True}] == metadata <ide> <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide> def test_should_read_logs_for_task_with_quote(self, mock_client, mock_get_creds_ <ide> self.ti.task_id = "K\"OT" <ide> logs, metadata = self.stackdriver_task_handler.read(self.ti) <ide> mock_client.return_value.list_entries.assert_called_once_with( <del> filter_='resource.type="global"\n' <del> 'logName="projects/asf-project/logs/airflow"\n' <del> 'labels.task_id="K\\"OT"\n' <del> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <del> 'labels.execution_date="2016-01-01T00:00:00+00:00"', <add> filter_=( <add> 'resource.type="global"\n' <add> 'logName="projects/asf-project/logs/airflow"\n' <add> 'labels.task_id="K\\"OT"\n' <add> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <add> 'labels.execution_date="2016-01-01T00:00:00+00:00"' <add> ), <add> order_by='timestamp asc', <add> page_size=1000, <ide> page_token=None, <ide> ) <del> assert ['MSG1\nMSG2'] == logs <add> assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs <ide> assert [{'end_of_log': True}] == metadata <ide> <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide> def test_should_read_logs_for_single_try(self, mock_client, mock_get_creds_and_p <ide> <ide> logs, metadata = self.stackdriver_task_handler.read(self.ti, 3) <ide> mock_client.return_value.list_entries.assert_called_once_with( <del> filter_='resource.type="global"\n' <del> 'logName="projects/asf-project/logs/airflow"\n' <del> 'labels.task_id="task_for_testing_file_log_handler"\n' <del> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <del> 'labels.execution_date="2016-01-01T00:00:00+00:00"\n' <del> 'labels.try_number="3"', <add> filter_=( <add> 'resource.type="global"\n' <add> 'logName="projects/asf-project/logs/airflow"\n' <add> 'labels.task_id="task_for_testing_file_log_handler"\n' <add> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <add> 'labels.execution_date="2016-01-01T00:00:00+00:00"\n' <add> 'labels.try_number="3"' <add> ), <add> order_by='timestamp asc', <add> page_size=1000, <ide> page_token=None, <ide> ) <del> assert ['MSG1\nMSG2'] == logs <add> assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs <ide> assert [{'end_of_log': True}] == metadata <ide> <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide> def test_should_read_logs_with_pagination(self, mock_client, mock_get_creds_and_ <ide> ] <ide> mock_get_creds_and_project_id.return_value = ('creds', 'project_id') <ide> logs, metadata1 = self.stackdriver_task_handler.read(self.ti, 3) <del> mock_client.return_value.list_entries.assert_called_once_with(filter_=mock.ANY, page_token=None) <del> assert ['MSG1\nMSG2'] == logs <add> mock_client.return_value.list_entries.assert_called_once_with( <add> filter_=mock.ANY, order_by='timestamp asc', page_size=1000, page_token=None <add> ) <add> assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs <ide> assert [{'end_of_log': False, 'next_page_token': 'TOKEN1'}] == metadata1 <ide> <ide> mock_client.return_value.list_entries.return_value.next_page_token = None <ide> logs, metadata2 = self.stackdriver_task_handler.read(self.ti, 3, metadata1[0]) <del> mock_client.return_value.list_entries.assert_called_with(filter_=mock.ANY, page_token="TOKEN1") <del> assert ['MSG3\nMSG4'] == logs <add> mock_client.return_value.list_entries.assert_called_with( <add> filter_=mock.ANY, order_by='timestamp asc', page_size=1000, page_token="TOKEN1" <add> ) <add> assert [(('default-hostname', 'MSG3\nMSG4'),)] == logs <ide> assert [{'end_of_log': True}] == metadata2 <ide> <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide> def test_should_read_logs_with_download(self, mock_client, mock_get_creds_and_pr <ide> <ide> logs, metadata1 = self.stackdriver_task_handler.read(self.ti, 3, {'download_logs': True}) <ide> <del> assert ['MSG1\nMSG2\nMSG3\nMSG4'] == logs <add> assert [(('default-hostname', 'MSG1\nMSG2\nMSG3\nMSG4'),)] == logs <ide> assert [{'end_of_log': True}] == metadata1 <ide> <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide> def test_should_read_logs_with_custom_resources(self, mock_client, mock_get_cred <ide> <ide> logs, metadata = self.stackdriver_task_handler.read(self.ti) <ide> mock_client.return_value.list_entries.assert_called_once_with( <del> filter_='resource.type="cloud_composer_environment"\n' <del> 'logName="projects/asf-project/logs/airflow"\n' <del> 'resource.labels."environment.name"="test-instancce"\n' <del> 'resource.labels.location="europpe-west-3"\n' <del> 'resource.labels.project_id="asf-project"\n' <del> 'labels.task_id="task_for_testing_file_log_handler"\n' <del> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <del> 'labels.execution_date="2016-01-01T00:00:00+00:00"', <add> filter_=( <add> 'resource.type="cloud_composer_environment"\n' <add> 'logName="projects/asf-project/logs/airflow"\n' <add> 'resource.labels."environment.name"="test-instancce"\n' <add> 'resource.labels.location="europpe-west-3"\n' <add> 'resource.labels.project_id="asf-project"\n' <add> 'labels.task_id="task_for_testing_file_log_handler"\n' <add> 'labels.dag_id="dag_for_testing_file_task_handler"\n' <add> 'labels.execution_date="2016-01-01T00:00:00+00:00"' <add> ), <add> order_by='timestamp asc', <add> page_size=1000, <ide> page_token=None, <ide> ) <del> assert ['TEXT\nTEXT'] == logs <add> assert [(('default-hostname', 'TEXT\nTEXT'),)] == logs <ide> assert [{'end_of_log': True}] == metadata <ide> <ide> @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') <ide><path>tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py <ide> def test_should_support_key_auth(self, session): <ide> assert 0 == subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() <ide> ti = session.query(TaskInstance).filter(TaskInstance.task_id == "create_entry_group").first() <ide> <del> self.assert_remote_logs("INFO - Task exited with return code 0", ti) <add> self.assert_remote_logs("terminated with exit code 0", ti) <ide> <ide> @provide_session <ide> def test_should_support_adc(self, session): <ide> def test_should_support_adc(self, session): <ide> assert 0 == subprocess.Popen(["airflow", "scheduler", "--num-runs", "1"]).wait() <ide> ti = session.query(TaskInstance).filter(TaskInstance.task_id == "create_entry_group").first() <ide> <del> self.assert_remote_logs("INFO - Task exited with return code 0", ti) <add> self.assert_remote_logs("terminated with exit code 0", ti) <ide> <ide> def assert_remote_logs(self, expected_message, ti): <ide> with provide_gcp_context(GCP_STACKDRIVER), conf_vars( <ide> def assert_remote_logs(self, expected_message, ti): <ide> <ide> task_log_reader = TaskLogReader() <ide> logs = "\n".join(task_log_reader.read_log_stream(ti, try_number=None, metadata={})) <add> # Preview content <add> print("=" * 80) <add> print(logs) <add> print("=" * 80) <ide> assert expected_message in logs
5
Ruby
Ruby
revise some doc changes
1fc6036b4b2fe314e30513925fd6b42a2b634b4b
<ide><path>actionpack/lib/action_view/helpers/form_tag_helper.rb <ide> def radio_button_tag(name, value, checked = false, options = {}) <ide> # submit_tag "Save edits", :disabled => true <ide> # # => <input disabled="disabled" name="commit" type="submit" value="Save edits" /> <ide> # <del> # <ide> # submit_tag "Complete sale", :disable_with => "Please wait..." <del> # # => <input name="commit" data-disable-with="Please wait..." <del> # # type="submit" value="Complete sale" /> <add> # # => <input name="commit" data-disable-with="Please wait..." type="submit" value="Complete sale" /> <ide> # <ide> # submit_tag nil, :class => "form_submit" <ide> # # => <input class="form_submit" name="commit" type="submit" /> <ide> # <ide> # submit_tag "Edit", :disable_with => "Editing...", :class => "edit_button" <del> # # => <input class="edit_button" data-disable_with="Editing..." <del> # # name="commit" type="submit" value="Edit" /> <add> # # => <input class="edit_button" data-disable_with="Editing..." name="commit" type="submit" value="Edit" /> <ide> # <ide> # submit_tag "Save", :confirm => "Are you sure?" <del> # # => <input name='commit' type='submit' value='Save' <del> # # data-confirm="Are you sure?" /> <add> # # => <input name='commit' type='submit' value='Save' data-confirm="Are you sure?" /> <ide> # <ide> def submit_tag(value = "Save changes", options = {}) <ide> options = options.stringify_keys <ide> def submit_tag(value = "Save changes", options = {}) <ide> # # </button> <ide> # <ide> # button_tag "Checkout", :disable_with => "Please wait..." <del> # # => <button data-disable-with="Please wait..." name="button" <del> # # type="submit">Checkout</button> <add> # # => <button data-disable-with="Please wait..." name="button" type="submit">Checkout</button> <ide> # <ide> def button_tag(content_or_options = nil, options = nil, &block) <ide> options = content_or_options if block_given? && content_or_options.is_a?(Hash)
1
Go
Go
break big lock into some tiny locks
1326f0cba5f933674e23769de1385d3b0841e758
<ide><path>container/monitor.go <ide> type containerMonitor struct { <ide> // StartMonitor initializes a containerMonitor for this container with the provided supervisor and restart policy <ide> // and starts the container's process. <ide> func (container *Container) StartMonitor(s supervisor, policy container.RestartPolicy) error { <add> container.Lock() <ide> container.monitor = &containerMonitor{ <ide> supervisor: s, <ide> container: container, <ide> func (container *Container) StartMonitor(s supervisor, policy container.RestartP <ide> stopChan: make(chan struct{}), <ide> startSignal: make(chan struct{}), <ide> } <add> container.Unlock() <ide> <ide> return container.monitor.wait() <ide> } <ide> func (m *containerMonitor) start() error { <ide> } <ide> m.Close() <ide> }() <add> <add> m.container.Lock() <ide> // reset stopped flag <ide> if m.container.HasBeenManuallyStopped { <ide> m.container.HasBeenManuallyStopped = false <ide> func (m *containerMonitor) start() error { <ide> if err := m.supervisor.StartLogging(m.container); err != nil { <ide> m.resetContainer(false) <ide> <add> m.container.Unlock() <ide> return err <ide> } <ide> <ide> pipes := execdriver.NewPipes(m.container.Stdin(), m.container.Stdout(), m.container.Stderr(), m.container.Config.OpenStdin) <add> m.container.Unlock() <ide> <ide> m.logEvent("start") <ide> <ide> m.lastStartTime = time.Now() <ide> <add> // don't lock Run because m.callback has own lock <ide> if exitStatus, err = m.supervisor.Run(m.container, pipes, m.callback); err != nil { <add> m.container.Lock() <ide> // if we receive an internal error from the initial start of a container then lets <ide> // return it instead of entering the restart loop <ide> // set to 127 for container cmd not found/does not exist) <ide> func (m *containerMonitor) start() error { <ide> if m.container.RestartCount == 0 { <ide> m.container.ExitCode = 127 <ide> m.resetContainer(false) <add> m.container.Unlock() <ide> return derr.ErrorCodeCmdNotFound <ide> } <ide> } <ide> func (m *containerMonitor) start() error { <ide> if m.container.RestartCount == 0 { <ide> m.container.ExitCode = 126 <ide> m.resetContainer(false) <add> m.container.Unlock() <ide> return derr.ErrorCodeCmdCouldNotBeInvoked <ide> } <ide> } <ide> func (m *containerMonitor) start() error { <ide> m.container.ExitCode = -1 <ide> m.resetContainer(false) <ide> <add> m.container.Unlock() <ide> return derr.ErrorCodeCantStart.WithArgs(m.container.ID, utils.GetErrorMessage(err)) <ide> } <ide> <add> m.container.Unlock() <ide> logrus.Errorf("Error running container: %s", err) <del> } <add> } // end if <ide> <ide> // here container.Lock is already lost <ide> afterRun = true <ide> func (m *containerMonitor) start() error { <ide> if m.shouldStop { <ide> return err <ide> } <add> m.container.Lock() <ide> continue <ide> } <ide> <ide> m.logEvent("die") <ide> m.resetContainer(true) <ide> return err <del> } <add> } // end for <ide> } <ide> <ide> // resetMonitor resets the stateful fields on the containerMonitor based on the <ide> func (m *containerMonitor) callback(processConfig *execdriver.ProcessConfig, pid <ide> } <ide> } <ide> <del> m.container.SetRunning(pid) <add> m.container.SetRunningLocking(pid) <ide> <ide> // signal that the process has started <ide> // close channel only if not closed <ide><path>container/state.go <ide> func (s *State) getExitCode() int { <ide> return res <ide> } <ide> <add>// SetRunningLocking locks container and sets it to "running" <add>func (s *State) SetRunningLocking(pid int) { <add> s.Lock() <add> s.SetRunning(pid) <add> s.Unlock() <add>} <add> <ide> // SetRunning sets the state of the container to "running". <ide> func (s *State) SetRunning(pid int) { <ide> s.Error = "" <ide> func (s *State) SetRunning(pid int) { <ide> s.waitChan = make(chan struct{}) <ide> } <ide> <del>// SetStoppedLocking locks the container state is sets it to "stopped". <add>// SetStoppedLocking locks the container state and sets it to "stopped". <ide> func (s *State) SetStoppedLocking(exitStatus *execdriver.ExitStatus) { <ide> s.Lock() <ide> s.SetStopped(exitStatus) <ide><path>daemon/start.go <ide> func (daemon *Daemon) containerStart(container *container.Container) (err error) <ide> mounts = append(mounts, container.TmpfsMounts()...) <ide> <ide> container.Command.Mounts = mounts <add> container.Unlock() <add> <add> // don't lock waitForStart because it has potential risk of blocking <add> // which will lead to dead lock, forever. <ide> if err := daemon.waitForStart(container); err != nil { <add> container.Lock() <ide> return err <ide> } <add> container.Lock() <ide> container.HasBeenStartedBefore = true <ide> return nil <ide> }
3
Go
Go
fix inspect object by invalid reference
3cd39aaeab37102e4b12decc0c36042e477e2fa6
<ide><path>integration-cli/docker_cli_inspect_test.go <ide> func (s *DockerSuite) TestInspectUnknownObject(c *check.C) { <ide> c.Assert(out, checker.Contains, "Error: No such object: foobar") <ide> c.Assert(err.Error(), checker.Contains, "Error: No such object: foobar") <ide> } <add> <add>func (s *DockerSuite) TestInpectInvalidReference(c *check.C) { <add> // This test should work on both Windows and Linux <add> out, _, err := dockerCmdWithError("inspect", "FooBar") <add> c.Assert(err, checker.NotNil) <add> c.Assert(out, checker.Contains, "Error: No such object: FooBar") <add> c.Assert(err.Error(), checker.Contains, "Error: No such object: FooBar") <add>} <ide><path>plugin/store.go <ide> func (ps *Store) resolvePluginID(idOrName string) (string, error) { <ide> <ide> ref, err := reference.ParseNamed(idOrName) <ide> if err != nil { <del> return "", errors.Wrapf(err, "failed to parse %v", idOrName) <add> return "", errors.WithStack(ErrNotFound(idOrName)) <ide> } <ide> if _, ok := ref.(reference.Canonical); ok { <ide> logrus.Warnf("canonical references cannot be resolved: %v", ref.String())
2
Text
Text
remove rebase docs
15f40138593c9add014cfabd4c0cb27a896cad6c
<ide><path>docs/topics/release-notes.md <ide> You can determine your currently installed version using `pip freeze`: <ide> <ide> **Date**: 4th April 2013 <ide> <del>* DecimalField support. <ide> * OAuth2 authentication no longer requires unneccessary URL parameters in addition to the token. <ide> * URL hyperlinking in browseable API now handles more cases correctly. <ide> * Long HTTP headers in browsable API are broken in multiple lines when possible.
1
Text
Text
add wiki link to readme.md
0e325d9e344d71136c469ab3404deb79cd5d3291
<ide><path>README.md <ide> performing complex surgeries on DAGs a snap. The rich user interface <ide> makes it easy to visualize pipelines running in production, <ide> monitor progress, and troubleshoot issues when needed. <ide> <add>For more information and documentation, please visit the [Airflow Wiki](https://github.com/airbnb/airflow/wiki). <add> <ide> ## Beyond the Horizon <ide> <ide> Airflow **is not** a data streaming solution. Tasks do not move data from
1
Text
Text
add teams for platform-specific issues
ab9b2fdbfb542e37eb988cb11677e878ae6ef0ea
<ide><path>doc/onboarding-extras.md <ide> | upgrading c-ares | @jbergstroem | <ide> | upgrading http-parser | @jbergstroem, @nodejs/http | <ide> | upgrading libuv | @saghul | <add>| platform specific | @nodejs/platform-{aix,arm,freebsd,macos,ppc,smartos,s390,windows} | <ide> <ide> <ide> When things need extra attention, are controversial, or `semver-major`: @nodejs/ctc
1
Text
Text
update ecosystem page
db677195b08c55aac0adbd8d09fed98ca2ac710e
<ide><path>docs/introduction/Ecosystem.md <ide> Reset the redux state on certain actions <ide> **[ForbesLindesay/redux-optimist](https://github.com/ForbesLindesay/redux-optimist)** <br /> <ide> A reducer enhancer to enable type-agnostic optimistic updates <ide> <del>## Actions <del> <del>**[reduxactions/redux-actions](https://github.com/reduxactions/redux-actions)** <br /> <del>Flux Standard Action utilities for Redux <del> <del>```js <del>const increment = createAction('INCREMENT') <del>const reducer = handleActions({ [increment]: (state, action) => state + 1 }, 0) <del>const store = createStore(reducer) <del>store.dispatch(increment()) <del>``` <del> <del>**[BerkeleyTrue/redux-create-types](https://github.com/BerkeleyTrue/redux-create-types)** <br /> <del>Creates standard and async action types based on namespaces <del> <del>```js <del>export const types = createTypes( <del> ['openModal', createAsyncTypes('fetch')], <del> 'app' <del>) <del>// { openModal : "app.openModal", fetch : { start : "app.fetch.start", complete: 'app.fetch.complete' } } <del>``` <del> <del>**[maxhallinan/kreighter](https://github.com/maxhallinan/kreighter)** <br /> <del>Generates action creators based on types and expected fields <del> <del>```js <del>const formatTitle = (id, title) => ({ <del> id, <del> title: toTitleCase(title) <del>}) <del>const updateBazTitle = fromType('UPDATE_BAZ_TITLE', formatTitle) <del>updateBazTitle(1, 'foo bar baz') <del>// -> { type: 'UPDATE_BAZ_TITLE', id: 1, title: 'Foo Bar Baz', } <del>``` <del> <ide> ## Utilities <ide> <ide> **[reduxjs/reselect](https://github.com/reduxjs/reselect)** <br /> <ide> Store enhancer that can debounce subscription notifications <ide> <ide> ```js <ide> const debounceNotify = _.debounce(notify => notify()) <del>const store = createStore( <del> reducer, <del> initialState, <del> batchedSubscribe(debounceNotify) <del>) <add>const store = configureStore({ reducer, enhancers: [ batchedSubscribe(debounceNotify) ] }) <ide> ``` <ide> <ide> **[manaflair/redux-batch](https://github.com/manaflair/redux-batch)** <br /> <ide> Store enhancer that allows dispatching arrays of actions <ide> <ide> ```js <del>const store = createStore(reducer, reduxBatch) <add>const store = configureStore({ reducer, enhancers: [ reduxBatch ] }) <ide> store.dispatch([{ type: 'INCREMENT' }, { type: 'INCREMENT' }]) <ide> ``` <ide> <ide> **[laysent/redux-batch-actions-enhancer](https://github.com/laysent/redux-batch-actions-enhancer)** <br /> <ide> Store enhancer that accepts batched actions <ide> <ide> ```js <del>const store = createStore(reducer, initialState, batch().enhancer) <add>const store = configureStore({ reducer, enhancers: [ batch().enhancer ] }) <ide> store.dispatch(createAction({ type: 'INCREMENT' }, { type: 'INCREMENT' })) <ide> ``` <ide> <ide> **[tshelburne/redux-batched-actions](https://github.com/tshelburne/redux-batched-actions)** <br /> <ide> Higher-order reducer that handles batched actions <ide> <ide> ```js <del>const store = createStore(enableBatching(reducer), initialState) <add>const store = configureStore({ reducer: enableBatching(rootReducer) }) <ide> store.dispatch(batchActions([{ type: 'INCREMENT' }, { type: 'INCREMENT' }])) <ide> ``` <ide> <ide> store.dispatch(batchActions([{ type: 'INCREMENT' }, { type: 'INCREMENT' }])) <ide> Persist and rehydrate a Redux store, with many extensible options <ide> <ide> ```js <del>const store = createStore(reducer, autoRehydrate()) <del>persistStore(store) <add>const persistConfig = { key: 'root', version: 1, storage } <add>const persistedReducer = persistReducer(persistConfig, rootReducer) <add>export const store = configureStore({ <add> reducer: persistedReducer, <add> middleware: (getDefaultMiddleware) => <add> getDefaultMiddleware({ <add> serializableCheck: { <add> ignoredActions: [FLUSH, REHYDRATE, PAUSE, PERSIST, PURGE, REGISTER], <add> }, <add> }), <add>}) <add>export const persistor = persistStore(store) <ide> ``` <ide> <ide> **[react-stack/redux-storage](https://github.com/react-stack/redux-storage)** <br /> <ide> Persistence layer for Redux with flexible backends <ide> const reducer = storage.reducer(combineReducers(reducers)) <ide> const engine = createEngineLocalStorage('my-save-key') <ide> const storageMiddleware = storage.createMiddleware(engine) <del>const store = createStore(reducer, applyMiddleware(storageMiddleware)) <add>const store = configureStore({ <add> reducer, <add> middleware: getDefaultMiddleware => getDefaultMiddleware.concat(storageMiddleware) <add>}) <ide> ``` <ide> <ide> **[redux-offline/redux-offline](https://github.com/redux-offline/redux-offline)** <br /> <ide> Persistent store for Offline-First apps, with support for optimistic UIs <ide> <ide> ```js <del>const store = createStore(reducer, offline(offlineConfig)) <add>const store = configureStore({ reducer, enhancer: [ offline(offlineConfig) ] }) <ide> store.dispatch({ <ide> type: 'FOLLOW_USER_REQUEST', <ide> meta: { offline: { effect: {}, commit: {}, rollback: {} } } <ide> function addTodosIfAllowed(todoText) { <ide> } <ide> ``` <ide> <add>**[listenerMiddleware (Redux Toolkit)](https://redux-toolkit.js.org/api/createListenerMiddleware)** <br /> <add>listenerMiddleware is intended to be a lightweight alternative to more widely used Redux async middleware like sagas and observables. While similar to thunks in level of complexity and concept, it can be used to replicate some common saga usage patterns. <add> <add>```js <add>listenerMiddleware.startListening({ <add> matcher: isAnyOf(action1, action2, action3), <add> effect: (action, listenerApi) => { <add> const user = selectUserDetails(listenerApi.getState()) <add> <add> const { specialData } = action.meta <add> <add> analyticsApi.trackUsage(action.type, user, specialData) <add> }, <add>}) <add>``` <add> <ide> **[redux-saga/redux-saga](https://github.com/redux-saga/redux-saga)** <br /> <ide> Handle async logic using synchronous-looking generator functions. Sagas return descriptions of effects, which are executed by the saga middleware, and act like "background threads" for JS applications. <ide> <ide> const fetchUsers = () => ({ <ide> An opinionated connector between socket.io and redux. <ide> <ide> ```js <del>const store = createStore(reducer, applyMiddleware(socketIoMiddleware)) <add>const store = configureStore({ <add> reducer, <add> middleware: getDefaultMiddleware => getDefaultMiddleware.concat(socketIoMiddleware) <add>}) <ide> store.dispatch({ type: 'server/hello', data: 'Hello!' }) <ide> ``` <ide>
1
Text
Text
remove lineheight property from legendtitlt
988464323f2bc3d60e608676eefef3526e61a58c
<ide><path>docs/docs/configuration/legend.md <ide> The legend label configuration is nested below the legend configuration using th <ide> | ---- | ---- | ------- | ----------- <ide> | `boxWidth` | `number` | `40` | Width of coloured box. <ide> | `boxHeight` | `number` | fontSize | Height of the coloured box. <del>| `font` | `Font` | `defaults.font` | See [Fonts](fonts.md) <add>| `font` | `Font` | `defaults.font` | See [Fonts](../general/fonts.md) <ide> | `padding` | `number` | `10` | Padding between rows of colored boxes. <ide> | `generateLabels` | `function` | | Generates legend items for each thing in the legend. Default implementation returns the text + styling for the color box. See [Legend Item](#legend-item-interface) for details. <ide> | `filter` | `function` | `null` | Filters legend items out of the legend. Receives 2 parameters, a [Legend Item](#legend-item-interface) and the chart data. <ide> The legend title configuration is nested below the legend configuration using th <ide> | Name | Type | Default | Description <ide> | ---- | ---- | ------- | ----------- <ide> | `display` | `boolean` | `false` | Is the legend title displayed. <del>| `font` | `Font` | `defaults.font` | See [Fonts](fonts.md) <del>| `lineHeight` | `number` | | Line height of the text. If unset, is computed from the font size. <add>| `font` | `Font` | `defaults.font` | See [Fonts](../general/fonts.md) <ide> | `padding` | <code>number&#124;object</code> | `0` | Padding around the title. If specified as a number, it applies evenly to all sides. <ide> | `text` | `string` | | The string title. <ide>
1
Mixed
Javascript
improve assert.fail() api
758b8b6e5d1aa171827759e51847bcc4f2eea7a3
<ide><path>doc/api/assert.md <ide> If the values are not equal, an `AssertionError` is thrown with a `message` <ide> property set equal to the value of the `message` parameter. If the `message` <ide> parameter is undefined, a default error message is assigned. <ide> <add>## assert.fail(message) <ide> ## assert.fail(actual, expected, message, operator) <ide> <!-- YAML <ide> added: v0.1.21 <ide> --> <ide> * `actual` {any} <ide> * `expected` {any} <ide> * `message` {any} <del>* `operator` {string} <add>* `operator` {string} (default: '!=') <ide> <ide> Throws an `AssertionError`. If `message` is falsy, the error message is set as <ide> the values of `actual` and `expected` separated by the provided `operator`. <ide> assert.fail(1, 2, undefined, '>'); <ide> <ide> assert.fail(1, 2, 'whoops', '>'); <ide> // AssertionError: whoops <add> <add>assert.fail('boom'); <add>// AssertionError: boom <add> <add>assert.fail('a', 'b'); <add>// AssertionError: 'a' != 'b' <ide> ``` <ide> <ide> ## assert.ifError(value) <ide><path>lib/assert.js <ide> function getMessage(self) { <ide> // display purposes. <ide> <ide> function fail(actual, expected, message, operator, stackStartFunction) { <add> if (arguments.length === 1) <add> message = actual; <add> if (arguments.length === 2) <add> operator = '!='; <ide> throw new assert.AssertionError({ <ide> message: message, <ide> actual: actual, <ide><path>test/parallel/test-assert-fail.js <add>'use strict'; <add>require('../common'); <add>const assert = require('assert'); <add> <add>// no args <add>assert.throws( <add> () => { assert.fail(); }, <add> /^AssertionError: undefined undefined undefined$/ <add>); <add> <add>// one arg = message <add>assert.throws( <add> () => { assert.fail('custom message'); }, <add> /^AssertionError: custom message$/ <add>); <add> <add>// two args only, operator defaults to '!=' <add>assert.throws( <add> () => { assert.fail('first', 'second'); }, <add> /^AssertionError: 'first' != 'second'$/ <add>); <add> <add>// three args <add>assert.throws( <add> () => { assert.fail('ignored', 'ignored', 'another custom message'); }, <add> /^AssertionError: another custom message$/ <add>); <add> <add>// no third arg (but a fourth arg) <add>assert.throws( <add> () => { assert.fail('first', 'second', undefined, 'operator'); }, <add> /^AssertionError: 'first' operator 'second'$/ <add>);
3
Text
Text
add a ; to the end of the example code
994ee89a86b2050bec056ddd7e0d3cb9f6bb457e
<ide><path>curriculum/challenges/english/02-javascript-algorithms-and-data-structures/es6/use-destructuring-assignment-to-assign-variables-from-objects.english.md <ide> Consider the following ES5 code: <ide> Here's the same assignment statement with ES6 destructuring syntax: <ide> <blockquote>const { x, y, z } = voxel; // x = 3.6, y = 7.4, z = 6.54</blockquote> <ide> If instead you want to store the values of <code>voxel.x</code> into <code>a</code>, <code>voxel.y</code> into <code>b</code>, and <code>voxel.z</code> into <code>c</code>, you have that freedom as well. <del><blockquote>const { x : a, y : b, z : c } = voxel // a = 3.6, b = 7.4, c = 6.54</blockquote> <add><blockquote>const { x : a, y : b, z : c } = voxel; // a = 3.6, b = 7.4, c = 6.54</blockquote> <ide> You may read it as "get the field <code>x</code> and copy the value into <code>a</code>," and so on. <ide> </section> <ide>
1
Java
Java
fix issue with incorrect class import
cf147a82ef0c5592041ddfdde3d1340fd47e89aa
<ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/ExceptionHandlerExceptionResolver.java <ide> <ide> import java.lang.reflect.Method; <ide> import java.util.ArrayList; <add>import java.util.Collections; <ide> import java.util.HashMap; <ide> import java.util.LinkedHashMap; <ide> import java.util.List; <ide> import org.springframework.web.servlet.View; <ide> import org.springframework.web.servlet.handler.AbstractHandlerMethodExceptionResolver; <ide> <del>import edu.emory.mathcs.backport.java.util.Collections; <del> <ide> /** <ide> * An {@link AbstractHandlerMethodExceptionResolver} that resolves exceptions <ide> * through {@code @ExceptionHandler} methods.
1
Text
Text
link v3 and v4 ticksizeinner & ticksizeouter
d2278c73e514b475e308be937e18c31429780184
<ide><path>CHANGES.md <ide> As before, you can customize the axis appearance either by applying stylesheets <ide> <ide> There’s now an [*axis*.tickArguments](https://github.com/d3/d3-axis#axis_tickArguments) method, as an alternative to [*axis*.ticks](https://github.com/d3/d3-axis#axis_ticks) that also allows the axis tick arguments to be inspected. The [*axis*.tickSize](https://github.com/d3/d3-axis#axis_tickSize) method has been changed to only allow a single argument when setting the tick size; use [*axis*.tickSizeInner](https://github.com/d3/d3-axis#axis_tickSizeInner) or [*axis*.tickSizeOuter](https://github.com/d3/d3-axis#axis_tickSizeOuter) to set the inner and outer tick size separately. <ide> <add>* *axis*.innerTickSize ↦ [*axis*.tickSizeInner](https://github.com/d3/d3-axis#axis_tickSizeInner) <add>* *axis*.outerTickSize ↦ [*axis*.tickSizeOuter](https://github.com/d3/d3-axis#axis_tickSizeOuter) <add> <ide> ## [Brushes (d3-brush)](https://github.com/d3/d3-brush/blob/master/README.md) <ide> <ide> Replacing d3.svg.brush, there are now three classes of brush for brushing along the *x*-dimension, the *y*-dimension, or both: [d3.brushX](https://github.com/d3/d3-brush#brushX), [d3.brushY](https://github.com/d3/d3-brush#brushY), [d3.brush](https://github.com/d3/d3-brush#brush). Brushes are no longer dependent on [scales](#scales-d3-scale); instead, each brush defines a selection in screen coordinates. This selection can be [inverted](https://github.com/d3/d3-scale#continuous_invert) if you want to compute the corresponding data domain. And rather than rely on the scales’ ranges to determine the brushable area, there is now a [*brush*.extent](https://github.com/d3/d3-brush#brush_extent) method for setting it. If you do not set the brush extent, it defaults to the full extent of the owner SVG element. The *brush*.clamp method has also been eliminated; brushing is always restricted to the brushable area defined by the brush extent.
1
Python
Python
fix mixed precision parameter
80af2a7b97f30ef8d82f88f050404fa698d6171d
<ide><path>official/vision/image_classification/resnet_imagenet_main.py <ide> from official.vision.image_classification import imagenet_preprocessing <ide> from official.vision.image_classification import resnet_model <ide> from official.vision.image_classification import trivial_model <del> <add>import pdb <ide> <ide> LR_SCHEDULE = [ # (multiplier, epoch to start) tuples <ide> (1.0, 5), (0.1, 30), (0.01, 60), (0.001, 80) <ide> def run(flags_obj): <ide> <ide> dtype = flags_core.get_tf_dtype(flags_obj) <ide> if dtype == 'float16': <del> # Mixed precision training via graph rewrite should not be used in conjunction <del> # with tf.keras.mixed_precision <del> if flags_obj["fp16_implementation"] != "graph_rewrite": <del> policy = tf.keras.mixed_precision.experimental.Policy('infer_float32_vars') <del> tf.keras.mixed_precision.experimental.set_policy(policy) <add> policy = tf.keras.mixed_precision.experimental.Policy('infer_float32_vars') <add> tf.keras.mixed_precision.experimental.set_policy(policy) <ide> <ide> data_format = flags_obj.data_format <ide> if data_format is None: <ide> def run(flags_obj): <ide> if dtype == 'float16': <ide> # TODO(reedwm): Remove manually wrapping optimizer once mixed precision <ide> # can be enabled with a single line of code. <del> if flags_dict["fp16_implementation"] == "graph_rewrite": <del> optimizer = tf.compat.v1.train.experimental.enable_mixed_precision_graph_rewrite(optimizer) <del> else: <del> optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer( <del> optimizer, loss_scale=flags_core.get_loss_scale(flags_obj, <del> default_for_fp16=128)) <del> <add> optimizer = tf.keras.mixed_precision.experimental.LossScaleOptimizer( <add> optimizer, loss_scale=flags_core.get_loss_scale(flags_obj, <add> default_for_fp16=128)) <add> pdb.set_trace() <add> if flags_obj.fp16_implementation == "graph_rewrite": <add> # Note: when flags_obj["fp16_implementation"] == "graph_rewrite", <add> # dtype as determined by flags_core.get_tf_dtype(flags_obj) would be 'float32' <add> # <add> optimizer = tf.train.experimental.enable_mixed_precision_graph_rewrite(optimizer) <add> <ide> if flags_obj.use_trivial_model: <ide> model = trivial_model.trivial_model( <ide> imagenet_preprocessing.NUM_CLASSES, dtype)
1
Javascript
Javascript
fix default encoding of lazytransform
443691a5aef8a908bc4c6f63b1710eb988235674
<ide><path>lib/internal/streams/lazy_transform.js <ide> <ide> const stream = require('stream'); <ide> const util = require('util'); <add>const crypto = require('crypto'); <ide> <ide> module.exports = LazyTransform; <ide> <ide> util.inherits(LazyTransform, stream.Transform); <ide> get: function() { <ide> stream.Transform.call(this, this._options); <ide> this._writableState.decodeStrings = false; <del> this._writableState.defaultEncoding = 'latin1'; <add> <add> if (!this._options || !this._options.defaultEncoding) { <add> this._writableState.defaultEncoding = crypto.DEFAULT_ENCODING; <add> } <add> <ide> return this[prop]; <ide> }, <ide> set: function(val) { <ide><path>test/parallel/test-crypto.js <ide> console.log(crypto.randomBytes(16)); <ide> assert.throws(function() { <ide> tls.createSecureContext({ crl: 'not a CRL' }); <ide> }, /^Error: Failed to parse CRL$/); <add> <add>/** <add> * Check if the stream function uses utf8 as a default encoding. <add> **/ <add> <add>function testEncoding(options, assertionHash) { <add> const hash = crypto.createHash('sha256', options); <add> let hashValue = ''; <add> <add> hash.on('data', (data) => { <add> hashValue += data.toString('hex'); <add> }); <add> <add> hash.on('end', common.mustCall(() => { <add> assert.strictEqual(hashValue, assertionHash); <add> })); <add> <add> hash.write('öäü'); <add> hash.end(); <add>} <add> <add>// Hash of "öäü" in utf8 format <add>const assertionHashUtf8 = <add> '4f53d15bee524f082380e6d7247cc541e7cb0d10c64efdcc935ceeb1e7ea345c'; <add> <add>// Hash of "öäü" in latin1 format <add>const assertionHashLatin1 = <add> 'cd37bccd5786e2e76d9b18c871e919e6eb11cc12d868f5ae41c40ccff8e44830'; <add> <add>testEncoding(undefined, assertionHashUtf8); <add>testEncoding({}, assertionHashUtf8); <add> <add>testEncoding({ <add> defaultEncoding: 'utf8' <add>}, assertionHashUtf8); <add> <add>testEncoding({ <add> defaultEncoding: 'latin1' <add>}, assertionHashLatin1);
2
Python
Python
do the safety check before inserting eol
30579e0f53264f3accf92697f1d243848f45cc88
<ide><path>official/nlp/transformer/utils/tokenizer.py <ide> def encode(self, raw_string, add_eos=False): <ide> for token in tokens: <ide> ret.extend(self._token_to_subtoken_ids(token)) <ide> if add_eos: <add> assert EOS in self.subtoken_list, \ <add> "Can't append 'EOS' because it is not in list of known subtokens." <ide> ret.append(EOS_ID) <ide> return ret <ide>
1
Text
Text
add tniessen to collaborators
9b730620a741644d2df7b8c64559a6d2eea7c283
<ide><path>README.md <ide> more information about the governance of the Node.js project, see <ide> **Thorsten Lorenz** &lt;[email protected]&gt; <ide> * [TimothyGu](https://github.com/TimothyGu) - <ide> **Timothy Gu** &lt;[email protected]&gt; (he/him) <add>* [tniessen](https://github.com/tniessen) - <add>**Tobias Nießen** &lt;[email protected]&gt; <ide> * [tunniclm](https://github.com/tunniclm) - <ide> **Mike Tunnicliffe** &lt;[email protected]&gt; <ide> * [vkurchatkin](https://github.com/vkurchatkin) -
1
Javascript
Javascript
add a deprecatedcallback helper
8eddead8687f0acc68fb2e857a00260912600f06
<ide><path>Libraries/Utilities/deprecatedCallback.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * All rights reserved. <add> * <add> * This source code is licensed under the BSD-style license found in the <add> * LICENSE file in the root directory of this source tree. An additional grant <add> * of patent rights can be found in the PATENTS file in the same directory. <add> * <add> * Helper for deprecated callback pattern <add> * <add> * @providesModule deprecatedCallback <add> * @flow <add> */ <add> <add>'use strict'; <add> <add>module.exports = function(promise: Promise, callbacks: Array<Function>, type: string, warning: string): Promise { <add> if (callbacks.length === 0) { <add> return promise; <add> } <add> <add> let success, error; <add> <add> console.warn(warning); <add> <add> switch (type) { <add> case 'success-first': // handles func(success, error), func(success) <add> [ success, error ] = callbacks; <add> return promise.then( <add> res => success(res), <add> err => error && error(err) <add> ); <add> case 'error-first': // handles func(error, success) <add> [ error, success ] = callbacks; <add> return promise.then( <add> res => success(res), <add> err => error(err) <add> ); <add> case 'node': // handles func(callback) <add> const [ callback ] = callbacks; <add> return promise.then( <add> res => callback(null, res), <add> err => callback(err) <add> ); <add> default: <add> throw new Error(`Type of callbacks not specified. Must be one of 'success-first', 'error-first', or 'node'`); <add> } <add>};
1
Python
Python
add python 3.6 to package support
4ec92c2bb68c29b04fa0da9195a4b3ed5fac8324
<ide><path>setup.py <ide> def run(self): <ide> 'Programming Language :: Python :: 3.3', <ide> 'Programming Language :: Python :: 3.4', <ide> 'Programming Language :: Python :: 3.5', <add> 'Programming Language :: Python :: 3.6', <ide> 'Programming Language :: Python :: Implementation :: CPython', <ide> 'Programming Language :: Python :: Implementation :: PyPy'] <ide> )
1
Go
Go
define downloadoption type
047e032461f9ef6226143b4768331b7f893d2572
<ide><path>distribution/xfer/download.go <ide> func (ldm *LayerDownloadManager) SetConcurrency(concurrency int) { <ide> } <ide> <ide> // NewLayerDownloadManager returns a new LayerDownloadManager. <del>func NewLayerDownloadManager(layerStore layer.Store, concurrencyLimit int, options ...func(*LayerDownloadManager)) *LayerDownloadManager { <add>func NewLayerDownloadManager(layerStore layer.Store, concurrencyLimit int, options ...DownloadOption) *LayerDownloadManager { <ide> manager := LayerDownloadManager{ <ide> layerStore: layerStore, <ide> tm: newTransferManager(concurrencyLimit), <ide> func NewLayerDownloadManager(layerStore layer.Store, concurrencyLimit int, optio <ide> return &manager <ide> } <ide> <add>// DownloadOption set options for the LayerDownloadManager. <add>type DownloadOption func(*LayerDownloadManager) <add> <ide> // WithMaxDownloadAttempts configures the maximum number of download <ide> // attempts for a download manager. <del>func WithMaxDownloadAttempts(max int) func(*LayerDownloadManager) { <add>func WithMaxDownloadAttempts(max int) DownloadOption { <ide> return func(dlm *LayerDownloadManager) { <ide> dlm.maxDownloadAttempts = max <ide> }
1
Python
Python
fix norm tests for single prec
1cf5461fb0867179846c5f89bc5e91aba57fb242
<ide><path>numpy/linalg/tests/test_linalg.py <ide> def do(self, a): <ide> <ide> class _TestNorm(TestCase): <ide> dt = None <add> dec = None <ide> def test_empty(self): <ide> assert_equal(norm([]), 0.0) <ide> assert_equal(norm(array([], dtype=self.dt)), 0.0) <ide> def test_vector(self): <ide> a = [1.0,2.0,3.0,4.0] <ide> b = [-1.0,-2.0,-3.0,-4.0] <ide> c = [-1.0, 2.0,-3.0, 4.0] <del> for v in (a,array(a, dtype=self.dt),b,array(b, dtype=self.dt),c,array(c, <del> dtype=self.dt)): <del> assert_almost_equal(norm(v), 30**0.5) <del> assert_almost_equal(norm(v,inf), 4.0) <del> assert_almost_equal(norm(v,-inf), 1.0) <del> assert_almost_equal(norm(v,1), 10.0) <del> assert_almost_equal(norm(v,-1), 12.0/25) <del> assert_almost_equal(norm(v,2), 30**0.5) <del> assert_almost_equal(norm(v,-2), (205./144)**-0.5) <del> <del> @dec.knownfailureif(True, "#786: FIXME") <add> <add> def _test(v): <add> np.testing.assert_almost_equal(norm(v), 30**0.5, decimal=self.dec) <add> np.testing.assert_almost_equal(norm(v,inf), 4.0, decimal=self.dec) <add> np.testing.assert_almost_equal(norm(v,-inf), 1.0, decimal=self.dec) <add> np.testing.assert_almost_equal(norm(v,1), 10.0, decimal=self.dec) <add> np.testing.assert_almost_equal(norm(v,-1), 12.0/25, <add> decimal=self.dec) <add> np.testing.assert_almost_equal(norm(v,2), 30**0.5, <add> decimal=self.dec) <add> np.testing.assert_almost_equal(norm(v,-2), ((205./144)**-0.5), <add> decimal=self.dec) <add> <add> for v in (a, b, c,): <add> _test(v) <add> <add> for v in (array(a, dtype=self.dt), array(b, dtype=self.dt), <add> array(c, dtype=self.dt)): <add> _test(v) <add> <add> @np.testing.dec.knownfailureif(True, "#786: FIXME") <ide> def test_vector_badarg(self): <ide> """Regression for #786: Froebenius norm for vectors raises <ide> TypeError.""" <ide> def test_matrix(self): <ide> <ide> class TestNormDouble(_TestNorm): <ide> dt = np.double <add> dec= 12 <add> <add>class TestNormSingle(_TestNorm): <add> dt = np.float32 <add> dec = 6 <ide> <ide> if __name__ == "__main__": <ide> run_module_suite()
1
Javascript
Javascript
add regex check in test-buffer-bad-overload
bae695f4f32cde9f21bc9c78c20ff253b2881b7a
<ide><path>test/parallel/test-buffer-bad-overload.js <ide> assert.doesNotThrow(function() { <ide> <ide> assert.throws(function() { <ide> Buffer.from(10, 'hex'); <del>}); <add>}, /^TypeError: "value" argument must not be a number$/); <ide> <ide> assert.doesNotThrow(function() { <ide> Buffer.from('deadbeaf', 'hex');
1
Text
Text
remove trailing *
6e4b37fffdfc324141aae9f86268db4e8ed7aeb0
<ide><path>docs/sources/introduction/understanding-docker.md <ide> Docker's portability and lightweight nature also make dynamically managing <ide> workloads easy. You can use Docker to quickly scale up or tear down applications <ide> and services. Docker's speed means that scaling can be near real time. <ide> <del>*Achieving higher density and running more workloads** <add>*Achieving higher density and running more workloads* <ide> <ide> Docker is lightweight and fast. It provides a viable, cost-effective alternative <ide> to hypervisor-based virtual machines. This is especially useful in high density
1
Python
Python
enhance magic methods on xcomarg for ux
1956f38276b982f7f0f72944d4fb8615ee55459f
<ide><path>airflow/models/xcom_arg.py <ide> def __init__(self, operator: "Operator", key: str = XCOM_RETURN_KEY): <ide> def __eq__(self, other): <ide> return self.operator == other.operator and self.key == other.key <ide> <del> def __getitem__(self, item): <add> def __getitem__(self, item: str) -> "XComArg": <ide> """Implements xcomresult['some_result_key']""" <add> if not isinstance(item, str): <add> raise ValueError(f"XComArg only supports str lookup, received {type(item).__name__}") <ide> return XComArg(operator=self.operator, key=item) <ide> <add> def __iter__(self): <add> """Override iterable protocol to raise error explicitly. <add> <add> The default ``__iter__`` implementation in Python calls ``__getitem__`` <add> with 0, 1, 2, etc. until it hits an ``IndexError``. This does not work <add> well with our custom ``__getitem__`` implementation, and results in poor <add> DAG-writing experience since a misplaced ``*`` expansion would create an <add> infinite loop consuming the entire DAG parser. <add> <add> This override catches the error eagerly, so an incorrectly implemented <add> DAG fails fast and avoids wasting resources on nonsensical iterating. <add> """ <add> raise TypeError(f"{self.__class__.__name__!r} object is not iterable") <add> <ide> def __str__(self): <ide> """ <ide> Backward compatibility for old-style jinja used in Airflow Operators <ide><path>tests/models/test_xcom_arg.py <ide> def test_xcom_arg_property_of_base_operator(self, dag_maker): <ide> <ide> assert op_a.output == XComArg(op_a) <ide> <add> def test_xcom_key_getitem_not_str(self, dag_maker): <add> python_op = build_python_op(dag_maker) <add> actual = XComArg(python_op) <add> with pytest.raises(ValueError) as ctx: <add> actual[1] <add> assert str(ctx.value) == "XComArg only supports str lookup, received int" <add> <ide> def test_xcom_key_getitem(self, dag_maker): <ide> python_op = build_python_op(dag_maker) <ide> actual = XComArg(python_op, key="another_key") <ide> assert actual.key == "another_key" <ide> actual_new_key = actual["another_key_2"] <ide> assert actual_new_key.key == "another_key_2" <ide> <add> def test_xcom_not_iterable(self, dag_maker): <add> python_op = build_python_op(dag_maker) <add> actual = XComArg(python_op) <add> with pytest.raises(TypeError) as ctx: <add> list(actual) <add> assert str(ctx.value) == "'XComArg' object is not iterable" <add> <ide> <ide> @pytest.mark.system("core") <ide> class TestXComArgRuntime:
2
PHP
PHP
add type to userfactory for generic parent class
7bff7c6c61aa5865966cdd6cea93409771f97413
<ide><path>database/factories/UserFactory.php <ide> <ide> namespace Database\Factories; <ide> <add>use App\Models\User; <ide> use Illuminate\Database\Eloquent\Factories\Factory; <ide> use Illuminate\Support\Str; <ide> <add>/** <add> * @implements Factory<User> <add> */ <ide> class UserFactory extends Factory <ide> { <ide> /**
1
Python
Python
check textcat values for validity
75bb7ad541a94c74127b57ffd6d674841767478c
<ide><path>spacy/errors.py <ide> class Errors(metaclass=ErrorsWithCodes): <ide> "during training, make sure to include it in 'annotating components'") <ide> <ide> # New errors added in v3.x <add> E851 = ("The 'textcat' component labels should only have values of 0 or 1, " <add> "but found value of '{val}'.") <ide> E852 = ("The tar file pulled from the remote attempted an unsafe path " <ide> "traversal.") <ide> E853 = ("Unsupported component factory name '{name}'. The character '.' is " <ide><path>spacy/pipeline/textcat.py <ide> def rehearse( <ide> bp_scores(gradient) <ide> if sgd is not None: <ide> self.finish_update(sgd) <del> losses[self.name] += (gradient**2).sum() <add> losses[self.name] += (gradient ** 2).sum() <ide> return losses <ide> <ide> def _examples_to_truth( <ide> def get_loss(self, examples: Iterable[Example], scores) -> Tuple[float, float]: <ide> not_missing = self.model.ops.asarray(not_missing) # type: ignore <ide> d_scores = scores - truths <ide> d_scores *= not_missing <del> mean_square_error = (d_scores**2).mean() <add> mean_square_error = (d_scores ** 2).mean() <ide> return float(mean_square_error), d_scores <ide> <ide> def add_label(self, label: str) -> int: <ide> def initialize( <ide> def _validate_categories(self, examples: Iterable[Example]): <ide> """Check whether the provided examples all have single-label cats annotations.""" <ide> for ex in examples: <del> if list(ex.reference.cats.values()).count(1.0) > 1: <add> vals = list(ex.reference.cats.values()) <add> if vals.count(1.0) > 1: <ide> raise ValueError(Errors.E895.format(value=ex.reference.cats)) <add> for val in vals: <add> if not (val == 1.0 or val == 0.0): <add> raise ValueError(Errors.E851.format(val=val)) <ide><path>spacy/pipeline/textcat_multilabel.py <ide> def initialize( # type: ignore[override] <ide> for label in labels: <ide> self.add_label(label) <ide> subbatch = list(islice(get_examples(), 10)) <add> self._validate_categories(subbatch) <add> <ide> doc_sample = [eg.reference for eg in subbatch] <ide> label_sample, _ = self._examples_to_truth(subbatch) <ide> self._require_labels() <ide> def initialize( # type: ignore[override] <ide> def _validate_categories(self, examples: Iterable[Example]): <ide> """This component allows any type of single- or multi-label annotations. <ide> This method overwrites the more strict one from 'textcat'.""" <del> pass <add> # check that annotation values are valid <add> for ex in examples: <add> for val in ex.reference.cats.values(): <add> if not (val == 1.0 or val == 0.0): <add> raise ValueError(Errors.E851.format(val=val)) <ide><path>spacy/tests/pipeline/test_textcat.py <ide> def test_label_types(name): <ide> nlp.initialize() <ide> <ide> <add>@pytest.mark.parametrize( <add> "name,get_examples", <add> [ <add> ("textcat", make_get_examples_single_label), <add> ("textcat_multilabel", make_get_examples_multi_label), <add> ], <add>) <add>def test_invalid_label_value(name, get_examples): <add> nlp = Language() <add> textcat = nlp.add_pipe(name) <add> example_getter = get_examples(nlp) <add> <add> def invalid_examples(): <add> # make one example with an invalid score <add> examples = example_getter() <add> ref = examples[0].reference <add> key = list(ref.cats.keys())[0] <add> ref.cats[key] = 2.0 <add> return examples <add> <add> with pytest.raises(ValueError): <add> nlp.initialize(get_examples=invalid_examples) <add> <add> <ide> @pytest.mark.parametrize("name", ["textcat", "textcat_multilabel"]) <ide> def test_no_label(name): <ide> nlp = Language()
4
Ruby
Ruby
increase needs_network timeout and retry
844fa3bbd5c1114a2edc1d28eb3e54e263b2b18b
<ide><path>Library/Homebrew/test/spec_helper.rb <ide> config.default_retry_count = 2 <ide> <ide> config.around(:each, :needs_network) do |example| <del> example.run_with_retry retry: 3, retry_wait: 3 <add> example.metadata[:timeout] ||= 120 <add> example.run_with_retry retry: 5, retry_wait: 5 <ide> end <ide> end <ide>
1
Text
Text
add powershell oneliner to get windows version
5fa7c2ab940789065383fb9aa879a9f12506eb67
<ide><path>.github/ISSUE_TEMPLATE/1-bug-report.md <ide> repo. https://github.com/nodejs/help <ide> Please fill in as much of the template below as you're able. <ide> <ide> Version: output of `node -v` <del>Platform: output of `uname -a` (UNIX), or version and 32 or 64-bit (Windows) <add>Platform: output of `uname -a` (UNIX), or output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in PowerShell console (Windows) <ide> Subsystem: if known, please specify affected core module name <ide> --> <ide>
1
Text
Text
fix createclass reference
8333b891e4fb8cc5f125b756372738100debc893
<ide><path>docs/docs/reference-react.md <ide> React components let you split the UI into independent, reusable pieces, and thi <ide> <ide> If you don't use ES6 classes, you may use this helper instead. <ide> <del> - [`createClass()`](#react.createclass) <add> - [`createClass()`](#createclass) <ide> <ide> ### Creating React Elements <ide>
1
Python
Python
allow "falsey" default arguments in cli parser
37473d2dad584e2156c2a9804f4f3539d23d0a9a
<ide><path>airflow/cli/cli_parser.py <ide> def error(self, message): <ide> self.exit(2, f'\n{self.prog} command error: {message}, see help above.\n') <ide> <ide> <add># Used in Arg to enable `None' as a distinct value from "not passed" <add>_UNSET = object() <add> <add> <ide> class Arg: <ide> """Class to keep information about command line argument""" <del> # pylint: disable=redefined-builtin <del> def __init__(self, flags=None, help=None, action=None, default=None, nargs=None, <del> type=None, choices=None, required=None, metavar=None): <add> # pylint: disable=redefined-builtin,unused-argument <add> def __init__(self, flags=_UNSET, help=_UNSET, action=_UNSET, default=_UNSET, nargs=_UNSET, type=_UNSET, <add> choices=_UNSET, required=_UNSET, metavar=_UNSET): <ide> self.flags = flags <del> self.help = help <del> self.action = action <del> self.default = default <del> self.nargs = nargs <del> self.type = type <del> self.choices = choices <del> self.required = required <del> self.metavar = metavar <del> # pylint: enable=redefined-builtin <add> self.kwargs = {} <add> for k, v in locals().items(): <add> if v is _UNSET: <add> continue <add> if k in ("self", "flags"): <add> continue <add> <add> self.kwargs[k] = v <add> # pylint: enable=redefined-builtin,unused-argument <add> <add> def add_to_parser(self, parser: argparse.ArgumentParser): <add> """Add this argument to an ArgumentParser""" <add> parser.add_argument(*self.flags, **self.kwargs) <ide> <ide> <ide> # Shared <ide> def __init__(self, flags=None, help=None, action=None, default=None, nargs=None, <ide> ("-t", "--migration-wait-timeout"), <ide> help="timeout to wait for db to migrate ", <ide> type=int, <del> default="0", <add> default=0, <ide> ) <ide> <ide> # webserver <ide> def _add_command( <ide> <ide> def _add_action_command(sub: ActionCommand, sub_proc: argparse.ArgumentParser) -> None: <ide> for arg in _sort_args(sub.args): <del> kwargs = { <del> k: v for k, v in vars(arg).items() if k != 'flags' and v <del> } <del> sub_proc.add_argument(*arg.flags, **kwargs) <add> arg.add_to_parser(sub_proc) <ide> sub_proc.set_defaults(func=sub.func) <ide> <ide> <ide><path>tests/cli/test_cli_parser.py <ide> # specific language governing permissions and limitations <ide> # under the License. <ide> <add>import argparse <ide> import re <ide> from collections import Counter <ide> from unittest import TestCase <ide> def test_subcommand_arg_flag_conflict(self): <ide> self.assertEqual([], conflict_short_option, <ide> f"Command group {group} function {com.name} have conflict " <ide> f"short option flags {conflict_short_option}") <add> <add> def test_falsy_default_value(self): <add> arg = cli_parser.Arg(("--test",), default=0, type=int) <add> parser = argparse.ArgumentParser() <add> arg.add_to_parser(parser) <add> <add> args = parser.parse_args(['--test', '10']) <add> self.assertEqual(args.test, 10) <add> <add> args = parser.parse_args([]) <add> self.assertEqual(args.test, 0)
2
Javascript
Javascript
remove redundant assignment of isdestroyed
42fcb478c6fb0e42678836df2eee2896eaebd382
<ide><path>packages/container/lib/main.js <ide> define("container", <ide> @method destroy <ide> */ <ide> destroy: function() { <del> this.isDestroyed = true; <ide> <ide> for (var i=0, l=this.children.length; i<l; i++) { <ide> this.children[i].destroy();
1
PHP
PHP
fix error where checkboxes could not be unchecked
6911d4e565d4419c11f1baba198ff7ce6646ced2
<ide><path>src/View/Widget/Checkbox.php <ide> public function render(array $data, ContextInterface $context) { <ide> 'name' => '', <ide> 'value' => 1, <ide> 'val' => null, <del> 'checked' => false, <ide> 'disabled' => false, <ide> ]; <ide> if ($this->_isChecked($data)) { <ide> public function render(array $data, ContextInterface $context) { <ide> * @return bool <ide> */ <ide> protected function _isChecked($data) { <del> if (!empty($data['checked'])) { <del> return true; <add> if (array_key_exists('checked', $data)) { <add> return (bool)$data['checked']; <ide> } <ide> if ((string)$data['val'] === (string)$data['value']) { <ide> return true; <ide><path>tests/TestCase/View/Widget/CheckboxTest.php <ide> public function testRenderCheckedValue($checked) { <ide> $this->assertTags($result, $expected); <ide> } <ide> <add>/** <add> * Data provider for checkbox values <add> * <add> * @return array <add> */ <add> public static function uncheckedProvider() { <add> return [ <add> [''], <add> ['0'], <add> [0], <add> [false], <add> [null], <add> ]; <add> } <add> <add>/** <add> * Test rendering unchecked checkboxes <add> * <add> * @dataProvider uncheckedProvider <add> * @return void <add> */ <add> public function testRenderUnCheckedValue($checked) { <add> $checkbox = new Checkbox($this->templates); <add> $data = [ <add> 'name' => 'Comment[spam]', <add> 'value' => 1, <add> 'val' => 1, <add> 'checked' => $checked, <add> ]; <add> $result = $checkbox->render($data, $this->context); <add> $expected = [ <add> 'input' => [ <add> 'type' => 'checkbox', <add> 'name' => 'Comment[spam]', <add> 'value' => 1, <add> ] <add> ]; <add> $this->assertTags($result, $expected); <add> } <add> <ide> }
2
Ruby
Ruby
add original_fullpath to env
482ec2ac3f518c91544a3b3e92765eef41b91419
<ide><path>railties/lib/rails/application.rb <ide> def helpers_paths #:nodoc: <ide> config.helpers_paths <ide> end <ide> <add> def call(env) <add> env["ORIGINAL_FULLPATH"] = build_original_fullpath(env) <add> super(env) <add> end <add> <ide> protected <ide> <ide> alias :build_middleware_stack :app <ide> def initialize_console #:nodoc: <ide> require "rails/console/app" <ide> require "rails/console/helpers" <ide> end <add> <add> def build_original_fullpath(env) <add> path_info = env["PATH_INFO"] <add> query_string = env["QUERY_STRING"] <add> script_name = env["SCRIPT_NAME"] <add> <add> if query_string.present? <add> "#{script_name}#{path_info}?#{query_string}" <add> else <add> "#{script_name}#{path_info}" <add> end <add> end <ide> end <ide> end <ide><path>railties/test/application/build_original_fullpath_test.rb <add>require "abstract_unit" <add> <add>module ApplicationTests <add> class BuildOriginalPathTest < Test::Unit::TestCase <add> def test_include_original_PATH_info_in_ORIGINAL_FULLPATH <add> env = { 'PATH_INFO' => '/foo/' } <add> assert_equal "/foo/", Rails.application.send(:build_original_fullpath, env) <add> end <add> <add> def test_include_SCRIPT_NAME <add> env = { <add> 'SCRIPT_NAME' => '/foo', <add> 'PATH_INFO' => '/bar' <add> } <add> <add> assert_equal "/foo/bar", Rails.application.send(:build_original_fullpath, env) <add> end <add> <add> def test_include_QUERY_STRING <add> env = { <add> 'PATH_INFO' => '/foo', <add> 'QUERY_STRING' => 'bar', <add> } <add> assert_equal "/foo?bar", Rails.application.send(:build_original_fullpath, env) <add> end <add> end <add>end <ide><path>railties/test/application/middleware_test.rb <ide> require 'isolation/abstract_unit' <ide> require 'stringio' <add>require 'rack/test' <ide> <ide> module ApplicationTests <ide> class MiddlewareTest < Test::Unit::TestCase <ide> def app <ide> add_to_config "config.force_ssl = true" <ide> add_to_config "config.ssl_options = { :host => 'example.com' }" <ide> boot! <del> <add> <ide> assert_equal AppTemplate::Application.middleware.first.args, [{:host => 'example.com'}] <ide> end <ide> <ide> def index <ide> assert_equal nil, last_response.headers["Etag"] <ide> end <ide> <add> test "ORIGINAL_FULLPATH is passed to env" do <add> boot! <add> env = ::Rack::MockRequest.env_for("/foo/?something") <add> Rails.application.call(env) <add> <add> assert_equal "/foo/?something", env["ORIGINAL_FULLPATH"] <add> end <add> <ide> private <ide> <ide> def boot!
3
Javascript
Javascript
fix arguments order of comparsion functions
d6cc6ab0376d5dd6a644313154ad84d94be58b2d
<ide><path>test/parallel/test-http2-origin.js <ide> const ca = readKey('fake-startcom-root-cert.pem', 'binary'); <ide> client.on('origin', mustCall((origins) => { <ide> const check = checks.shift(); <ide> originSet.push(...check); <del> deepStrictEqual(originSet, client.originSet); <add> deepStrictEqual(client.originSet, originSet); <ide> deepStrictEqual(origins, check); <ide> countdown.dec(); <ide> }, 2)); <ide> const ca = readKey('fake-startcom-root-cert.pem', 'binary'); <ide> <ide> client.on('origin', mustCall((origins) => { <ide> originSet.push(...check); <del> deepStrictEqual(originSet, client.originSet); <add> deepStrictEqual(client.originSet, originSet); <ide> deepStrictEqual(origins, check); <ide> client.close(); <ide> server.close(); <ide> const ca = readKey('fake-startcom-root-cert.pem', 'binary'); <ide> const client = connect(origin, { ca }); <ide> <ide> client.on('origin', mustCall((origins) => { <del> deepStrictEqual([origin, 'https://foo.org'], client.originSet); <add> deepStrictEqual(client.originSet, [origin, 'https://foo.org']); <ide> const req = client.request({ ':authority': 'foo.org' }); <ide> req.on('response', mustCall((headers) => { <del> strictEqual(421, headers[':status']); <del> deepStrictEqual([origin], client.originSet); <add> strictEqual(headers[':status'], 421); <add> deepStrictEqual(client.originSet, [origin]); <ide> })); <ide> req.resume(); <ide> req.on('close', mustCall(() => {
1
Text
Text
encourage use of ruby 1.9 syntax [ci skip]
65be1a0e7a93b2f4e308739d55516be0c58543df
<ide><path>guides/source/contributing_to_ruby_on_rails.md <ide> TIP: Changes that are cosmetic in nature and do not add anything substantial to <ide> <ide> Rails follows a simple set of coding style conventions. <ide> <add>* Use Ruby >= 1.9 syntax. Prefer `{ a: :b }` over `{ :a => :b }`. <ide> * Two spaces, no tabs (for indentation). <ide> * No trailing whitespace. Blank lines should not have any spaces. <ide> * Indent after private/protected.
1
Ruby
Ruby
remove unused require
d7d2f2bb15646d5ed80e6aa225dfc0426b6a88ef
<ide><path>activesupport/lib/active_support/core_ext/date/conversions.rb <ide> require 'date' <ide> require 'active_support/inflector/methods' <ide> require 'active_support/core_ext/date/zones' <del>require 'active_support/core_ext/module/remove_method' <ide> <ide> class Date <ide> DATE_FORMATS = {
1
Python
Python
set version to v2.2.0.dev12
d05eb56ce20195d841fbe817ab6aaf350bc5935a
<ide><path>spacy/about.py <ide> # fmt: off <ide> __title__ = "spacy" <del>__version__ = "2.2.0.dev11" <add>__version__ = "2.2.0.dev12" <ide> __summary__ = "Industrial-strength Natural Language Processing (NLP) in Python" <ide> __uri__ = "https://spacy.io" <ide> __author__ = "Explosion"
1
Text
Text
fix typo in changelog
9f9e3c0653cdeef597e0f7bc2f5db9d85f8f2ffc
<ide><path>CHANGELOG.md <ide> release. <ide> <tr> <ide> <td valign="top"> <ide> <b><a href="doc/changelogs/CHANGELOG_V8.md#8.1.2">8.1.2</a></b><br/> <del><a href="doc/changelogs/CHANGELOG_V8.md#8.1.1">8.1.1</a>><br/> <add><a href="doc/changelogs/CHANGELOG_V8.md#8.1.1">8.1.1</a><br/> <ide> <a href="doc/changelogs/CHANGELOG_V8.md#8.1.0">8.1.0</a><br/> <ide> <a href="doc/changelogs/CHANGELOG_V8.md#8.0.0">8.0.0</a><br/> <ide> </td>
1
PHP
PHP
copy the connection option onto a property
2b0aadad7ef7c09080ec27ef39a1535ff4ba29f2
<ide><path>src/Console/Command/Task/BakeTask.php <ide> class BakeTask extends Shell { <ide> */ <ide> public $connection = null; <ide> <del>/** <del> * Flag for interactive mode <del> * <del> * @var boolean <del> */ <del> public $interactive = false; <del> <ide> /** <ide> * Disable caching and enable debug for baking. <ide> * This forces the most current database schema to be used. <ide> public function execute() { <ide> if (isset($this->params['plugin'])) { <ide> $this->plugin = $this->params['plugin']; <ide> } <add> if (isset($this->params['connection'])) { <add> $this->connection = $this->params['connection']; <add> } <ide> } <ide> <ide> }
1
Text
Text
complete the document content
1ebb68053e17f9b88f1265390acdce20214f2afc
<ide><path>docs/reference/commandline/plugin_create.md <ide> $ ls -ls /home/pluginDir <ide> $ docker plugin create plugin /home/pluginDir <ide> plugin <ide> <del>NAME TAG DESCRIPTION ENABLED <del>plugin latest A sample plugin for Docker true <add>$ docker plugin ls <add>ID NAME TAG DESCRIPTION ENABLED <add>672d8144ec02 plugin latest A sample plugin for Docker false <ide> ``` <ide> <ide> The plugin can subsequently be enabled for local use or pushed to the public registry.
1
Mixed
Javascript
remove unnecessary await with return instances
929f343bb3c2f7847711fed7ddb79ec5cf97f9b7
<ide><path>doc/api/util.md <ide> For example: <ide> const util = require('util'); <ide> <ide> async function fn() { <del> return await Promise.resolve('hello world'); <add> return 'hello world'; <ide> } <ide> const callbackFunction = util.callbackify(fn); <ide> <ide><path>test/common/inspector-helper.js <ide> class NodeInstance { <ide> console.log('[test]', 'Connecting to a child Node process'); <ide> const response = await this.httpGet(null, '/json/list'); <ide> const url = response[0]['webSocketDebuggerUrl']; <del> return await this.wsHandshake(url); <add> return this.wsHandshake(url); <ide> } <ide> <ide> expectShutdown() { <ide><path>test/parallel/test-util-callbackify.js <ide> const values = [ <ide> for (const value of values) { <ide> // Test and `async function` <ide> async function asyncFn() { <del> return await Promise.resolve(value); <add> return value; <ide> } <ide> <ide> const cbAsyncFn = callbackify(asyncFn); <ide> const values = [ <ide> for (const value of values) { <ide> // Test an `async function` <ide> async function asyncFn() { <del> return await Promise.reject(value); <add> return Promise.reject(value); <ide> } <ide> <ide> const cbAsyncFn = callbackify(asyncFn); <ide> const values = [ <ide> for (const value of values) { <ide> async function asyncFn(arg) { <ide> assert.strictEqual(arg, value); <del> return await Promise.resolve(arg); <add> return arg; <ide> } <ide> <ide> const cbAsyncFn = callbackify(asyncFn); <ide> const values = [ <ide> const iAmThat = { <ide> async fn(arg) { <ide> assert.strictEqual(this, iAmThat); <del> return await Promise.resolve(arg); <add> return arg; <ide> }, <ide> }; <ide> iAmThat.cbFn = callbackify(iAmThat.fn); <ide> const values = [ <ide> <ide> { <ide> async function asyncFn() { <del> return await Promise.resolve(42); <add> return 42; <ide> } <ide> <ide> const cb = callbackify(asyncFn);
3
Ruby
Ruby
use latches rather than mucking with the scheduler
e5f0c0c548fd68a35902d4106130f099427aa98b
<ide><path>activerecord/test/cases/base_test.rb <ide> require "cases/helper" <add>require 'active_support/concurrency/latch' <ide> require 'models/post' <ide> require 'models/author' <ide> require 'models/topic' <ide> def test_default_values_are_deeply_dupped <ide> orig_handler = klass.connection_handler <ide> new_handler = ActiveRecord::ConnectionAdapters::ConnectionHandler.new <ide> after_handler = nil <del> is_set = false <add> latch1 = ActiveSupport::Concurrency::Latch.new <add> latch2 = ActiveSupport::Concurrency::Latch.new <ide> <ide> t = Thread.new do <ide> klass.connection_handler = new_handler <del> is_set = true <del> Thread.stop <add> latch1.release <add> latch2.await <ide> after_handler = klass.connection_handler <ide> end <ide> <del> while(!is_set) <del> Thread.pass <del> end <add> latch1.await <ide> <ide> klass.connection_handler = orig_handler <del> t.wakeup <add> latch2.release <ide> t.join <ide> <ide> assert_equal after_handler, new_handler
1
Text
Text
fix typo in code example of 'path' module
14b0b4463e9dd2ba81e7c67b1c4a66f429ed7f0a
<ide><path>doc/api/path.md <ide> On POSIX: <ide> <ide> ```js <ide> path.basename('C:\\temp\\myfile.html'); <del>// Returns: 'C:\temp\myfile.html' <add>// Returns: 'C:\\temp\\myfile.html' <ide> ``` <ide> <ide> On Windows:
1
PHP
PHP
fix cs errors
7ba2e7d3cea2e4979d182f736a0d6acce5523e2b
<ide><path>src/I18n/DateFormatTrait.php <ide> public function __debugInfo(): array <ide> return [ <ide> 'time' => $this->format('Y-m-d\TH:i:s.uP'), <ide> 'timezone' => $this->getTimezone()->getName(), <del> 'fixedNowTime' => static::hasTestNow() ? static::getTestNow()->format('Y-m-d\TH:i:s.uP') : false <add> 'fixedNowTime' => static::hasTestNow() ? static::getTestNow()->format('Y-m-d\TH:i:s.uP') : false, <ide> ]; <ide> } <ide> } <ide><path>tests/TestCase/Database/Schema/MysqlSchemaTest.php <ide> public static function convertColumnProvider() <ide> ], <ide> [ <ide> 'VARCHAR(255)', <del> ['type' => 'string', 'length' => 255, 'collate' => 'utf8_general_ci'] <add> ['type' => 'string', 'length' => 255, 'collate' => 'utf8_general_ci'], <ide> ], <ide> [ <ide> 'CHAR(25)', <del> ['type' => 'string', 'length' => 25, 'fixed' => true, 'collate' => 'utf8_general_ci'] <add> ['type' => 'string', 'length' => 25, 'fixed' => true, 'collate' => 'utf8_general_ci'], <ide> ], <ide> [ <ide> 'CHAR(36)', <ide> public static function convertColumnProvider() <ide> ], <ide> [ <ide> 'TEXT', <del> ['type' => 'text', 'length' => null, 'collate' => 'utf8_general_ci'] <add> ['type' => 'text', 'length' => null, 'collate' => 'utf8_general_ci'], <ide> ], <ide> [ <ide> 'TINYTEXT', <del> ['type' => 'text', 'length' => TableSchema::LENGTH_TINY, 'collate' => 'utf8_general_ci'] <add> ['type' => 'text', 'length' => TableSchema::LENGTH_TINY, 'collate' => 'utf8_general_ci'], <ide> ], <ide> [ <ide> 'MEDIUMTEXT', <del> ['type' => 'text', 'length' => TableSchema::LENGTH_MEDIUM, 'collate' => 'utf8_general_ci'] <add> ['type' => 'text', 'length' => TableSchema::LENGTH_MEDIUM, 'collate' => 'utf8_general_ci'], <ide> ], <ide> [ <ide> 'LONGTEXT', <del> ['type' => 'text', 'length' => TableSchema::LENGTH_LONG, 'collate' => 'utf8_general_ci'] <add> ['type' => 'text', 'length' => TableSchema::LENGTH_LONG, 'collate' => 'utf8_general_ci'], <ide> ], <ide> [ <ide> 'TINYBLOB', <ide><path>tests/TestCase/Database/Schema/PostgresSchemaTest.php <ide> public static function convertColumnProvider() <ide> // String <ide> [ <ide> ['type' => 'VARCHAR'], <del> ['type' => 'string', 'length' => null, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'length' => null, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'VARCHAR(10)'], <del> ['type' => 'string', 'length' => 10, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'length' => 10, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'CHARACTER VARYING'], <del> ['type' => 'string', 'length' => null, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'length' => null, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'CHARACTER VARYING(10)'], <del> ['type' => 'string', 'length' => 10, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'length' => 10, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'CHARACTER VARYING(255)', 'default' => 'NULL::character varying'], <del> ['type' => 'string', 'length' => 255, 'default' => null, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'length' => 255, 'default' => null, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'CHAR(10)'], <del> ['type' => 'string', 'fixed' => true, 'length' => 10, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'fixed' => true, 'length' => 10, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'CHAR(36)'], <del> ['type' => 'string', 'fixed' => true, 'length' => 36, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'fixed' => true, 'length' => 36, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'CHARACTER(10)'], <del> ['type' => 'string', 'fixed' => true, 'length' => 10, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'string', 'fixed' => true, 'length' => 10, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> [ <ide> ['type' => 'MONEY'], <ide> public static function convertColumnProvider() <ide> // Text <ide> [ <ide> ['type' => 'TEXT'], <del> ['type' => 'text', 'length' => null, 'collate' => 'ja_JP.utf8'] <add> ['type' => 'text', 'length' => null, 'collate' => 'ja_JP.utf8'], <ide> ], <ide> // Blob <ide> [ <ide><path>tests/TestCase/Database/Schema/SqlserverSchemaTest.php <ide> public static function convertColumnProvider() <ide> null, <ide> null, <ide> null, <del> ['type' => 'string', 'length' => 255, 'collate' => 'Japanese_Unicode_CI_AI'] <add> ['type' => 'string', 'length' => 255, 'collate' => 'Japanese_Unicode_CI_AI'], <ide> ], <ide> [ <ide> 'VARCHAR', <ide> 10, <ide> null, <ide> null, <del> ['type' => 'string', 'length' => 10, 'collate' => 'Japanese_Unicode_CI_AI'] <add> ['type' => 'string', 'length' => 10, 'collate' => 'Japanese_Unicode_CI_AI'], <ide> ], <ide> [ <ide> 'NVARCHAR', <ide> 50, <ide> null, <ide> null, <ide> // Sqlserver returns double lengths for unicode columns <del> ['type' => 'string', 'length' => 25, 'collate' => 'Japanese_Unicode_CI_AI'] <add> ['type' => 'string', 'length' => 25, 'collate' => 'Japanese_Unicode_CI_AI'], <ide> ], <ide> [ <ide> 'CHAR', <ide> 10, <ide> null, <ide> null, <del> ['type' => 'string', 'fixed' => true, 'length' => 10, 'collate' => 'Japanese_Unicode_CI_AI'] <add> ['type' => 'string', 'fixed' => true, 'length' => 10, 'collate' => 'Japanese_Unicode_CI_AI'], <ide> ], <ide> [ <ide> 'NCHAR', <ide> 10, <ide> null, <ide> null, <ide> // SQLServer returns double length for unicode columns. <del> ['type' => 'string', 'fixed' => true, 'length' => 5, 'collate' => 'Japanese_Unicode_CI_AI'] <add> ['type' => 'string', 'fixed' => true, 'length' => 5, 'collate' => 'Japanese_Unicode_CI_AI'], <ide> ], <ide> [ <ide> 'UNIQUEIDENTIFIER', <ide> public static function convertColumnProvider() <ide> null, <ide> null, <ide> null, <del> ['type' => 'text', 'length' => null, 'collate' => 'Japanese_Unicode_CI_AI'] <add> ['type' => 'text', 'length' => null, 'collate' => 'Japanese_Unicode_CI_AI'], <ide> ], <ide> [ <ide> 'REAL', <ide> public static function convertColumnProvider() <ide> -1, <ide> null, <ide> null, <del> ['type' => 'text', 'length' => null, 'collate' => 'Japanese_Unicode_CI_AI'] <add> ['type' => 'text', 'length' => null, 'collate' => 'Japanese_Unicode_CI_AI'], <ide> ], <ide> [ <ide> 'IMAGE', <ide> public static function convertColumnProvider() <ide> -1, <ide> null, <ide> null, <del> ['type' => 'binary', 'length' => TableSchema::LENGTH_LONG] <add> ['type' => 'binary', 'length' => TableSchema::LENGTH_LONG], <ide> ], <ide> ]; <ide> } <ide><path>tests/TestCase/I18n/TimeTest.php <ide> public function testDebugInfo($class) <ide> $expected = [ <ide> 'time' => '2014-04-20T10:10:10.000000+00:00', <ide> 'timezone' => 'UTC', <del> 'fixedNowTime' => $class::getTestNow()->format('Y-m-d\TH:i:s.uP') <add> 'fixedNowTime' => $class::getTestNow()->format('Y-m-d\TH:i:s.uP'), <ide> ]; <ide> $this->assertEquals($expected, $time->__debugInfo()); <ide> }
5
Javascript
Javascript
delete an unused argument
2e91d8d4839f589159d438402cb164d3409ca919
<ide><path>tools/jslint.js <ide> if (cluster.isMaster) { <ide> <ide> if (showProgress) { <ide> // Start the progress display update timer when the first worker is ready <del> cluster.once('online', function(worker) { <add> cluster.once('online', function() { <ide> startTime = process.hrtime(); <ide> setInterval(printProgress, 1000).unref(); <ide> printProgress();
1
Javascript
Javascript
remove modifcation to common.port
bddfe63049c4c0664ef6c7c4458bd373f1e02e4a
<ide><path>test/fixtures/clustered-server/app.js <ide> if (cluster.isMaster) { <ide> } <ide> } else { <ide> var server = http.createServer(handleRequest); <del> server.listen(common.PORT+1000); <add> server.listen(common.PORT); <ide> } <ide><path>test/parallel/test-debug-port-cluster.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const spawn = require('child_process').spawn; <ide> <del>const PORT_MIN = common.PORT; <add>const PORT_MIN = common.PORT + 1; // The fixture uses common.PORT. <ide> const PORT_MAX = PORT_MIN + 2; <ide> <ide> const args = [ <ide><path>test/parallel/test-debug-signal-cluster.js <ide> var common = require('../common'); <ide> var assert = require('assert'); <ide> var spawn = require('child_process').spawn; <ide> <del>var port = common.PORT + 42; <add>var port = common.PORT + 1; // The fixture uses common.PORT. <ide> var args = ['--debug-port=' + port, <ide> common.fixturesDir + '/clustered-server/app.js']; <ide> var options = { stdio: ['inherit', 'inherit', 'pipe', 'ipc'] };
3
PHP
PHP
get debugger tests passing with minor updates
65123b3aa097ff863cd8873ca690e200cb67e051
<ide><path>src/Error/Debugger.php <ide> use Cake\Error\DumpNode\PropertyNode; <ide> use Cake\Error\DumpNode\ReferenceNode; <ide> use Cake\Error\DumpNode\ScalarNode; <add>use Cake\Error\DumpNode\SpecialNode; <ide> use Cake\Log\Log; <ide> use Cake\Utility\Hash; <ide> use Cake\Utility\Security; <ide> protected static function _highlight(string $str): string <ide> public static function exportVar($var, int $maxDepth = 3): string <ide> { <ide> $context = new DumpContext($maxDepth); <del> $node = static::_export($var, $context); <add> $node = static::export($var, $context); <ide> <ide> $formatter = new TextFormatter(); <ide> return $formatter->dump($node); <ide> public static function exportVar($var, int $maxDepth = 3): string <ide> * @param \Cake\Error\DumpContext $context Dump context <ide> * @return \Cake\Error\DumpNode\NodeInterface The dumped variable. <ide> */ <del> protected static function _export($var, DumpContext $context): NodeInterface <add> protected static function export($var, DumpContext $context): NodeInterface <ide> { <ide> switch (static::getType($var)) { <ide> case 'boolean': <ide> protected static function _export($var, DumpContext $context): NodeInterface <ide> case 'string': <ide> return new ScalarNode('string', $var); <ide> case 'array': <del> return static::_array($var, $context->withAddedDepthAndIndent()); <add> return static::exportArray($var, $context->withAddedDepth()); <ide> case 'resource': <ide> return new ScalarNode('resource', gettype($var)); <ide> case 'null': <ide> return new ScalarNode('null', null); <ide> case 'unknown': <del> return new ScalarNode('unknown', null); <add> return new SpecialNode('(unknown)'); <ide> default: <del> return static::_object($var, $context->withAddedDepthAndIndent()); <add> return static::exportObject($var, $context->withAddedDepth()); <ide> } <ide> } <ide> <ide> protected static function _export($var, DumpContext $context): NodeInterface <ide> * @param \Cake\Error\DumpContext $context The current dump context. <ide> * @return \Cake\Error\DumpNode\ArrayNode Exported array. <ide> */ <del> protected static function _array(array $var, DumpContext $context): ArrayNode <add> protected static function exportArray(array $var, DumpContext $context): ArrayNode <ide> { <ide> $items = []; <ide> <ide> $remaining = $context->remainingDepth(); <ide> if ($remaining >= 0) { <ide> $outputMask = (array)static::outputMask(); <ide> foreach ($var as $key => $val) { <del> // Sniff for globals as !== explodes in < 5.4 <del> if ($key === 'GLOBALS' && is_array($val) && isset($val['GLOBALS'])) { <del> $node = new ScalarNode('string', '[recursion]'); <del> } elseif (array_key_exists($key, $outputMask)) { <add> if (array_key_exists($key, $outputMask)) { <ide> $node = new ScalarNode('string', $outputMask[$key]); <ide> } elseif ($val !== $var) { <del> $node = static::_export($val, $context); <add> // Dump all the items without increasing depth. <add> $node = static::export($val, $context); <add> } else { <add> // Likely recursion, so we increase depth. <add> $node = static::export($val, $context->withAddedDepth()); <ide> } <del> $items[] = new ItemNode($key, $node); <add> $items[] = new ItemNode(static::export($key, $context), $node); <ide> } <ide> } else { <del> $items[] = new ItemNode('', new ScalarNode('string', '[maximum depth reached]')); <add> $items[] = new ItemNode( <add> new ScalarNode('string', ''), <add> new SpecialNode('[maximum depth reached]') <add> ); <ide> } <ide> <ide> return new ArrayNode($items); <ide> protected static function _array(array $var, DumpContext $context): ArrayNode <ide> * <ide> * @param object $var Object to convert. <ide> * @param \Cake\Error\DumpContext $context The dump context. <del> * @return ClassNode|ReferenceNode <add> * @return \Cake\Error\DumpNode\NodeInterface <ide> * @see \Cake\Error\Debugger::exportVar() <ide> */ <del> protected static function _object(object $var, DumpContext $context): NodeInterface <add> protected static function exportObject(object $var, DumpContext $context): NodeInterface <ide> { <ide> $isRef = $context->hasReference($var); <ide> $refNum = $context->getReferenceId($var); <ide> protected static function _object(object $var, DumpContext $context): NodeInterf <ide> $node = new ClassNode($className, $refNum); <ide> <ide> $remaining = $context->remainingDepth(); <del> if ($remaining > 0 && $isRef === false) { <add> if ($remaining > 0) { <ide> if (method_exists($var, '__debugInfo')) { <ide> try { <ide> foreach ($var->__debugInfo() as $key => $val) { <del> $node->addProperty(new PropertyNode($key, null, static::_export($val, $context))); <add> $node->addProperty(new PropertyNode("'{$key}'", null, static::export($val, $context))); <ide> } <ide> } catch (Exception $e) { <ide> $message = $e->getMessage(); <ide> <del> return new ScalarNode('string', "(unable to export object: $message)"); <add> return new SpecialNode("(unable to export object: $message)"); <ide> } <ide> } <ide> <ide> protected static function _object(object $var, DumpContext $context): NodeInterf <ide> $value = $outputMask[$key]; <ide> } <ide> $node->addProperty( <del> new PropertyNode($key, 'public', static::_export($value, $context->withAddedDepth())) <add> new PropertyNode($key, 'public', static::export($value, $context->withAddedDepth())) <ide> ); <ide> } <ide> <ide> protected static function _object(object $var, DumpContext $context): NodeInterf <ide> $value = $reflectionProperty->getValue($var); <ide> <ide> $node->addProperty( <del> new PropertyNode($key, $visibility, static::_export($value, $context->withAddedDepth())) <add> new PropertyNode( <add> $reflectionProperty->getName(), <add> $visibility, <add> static::export($value, $context->withAddedDepth()) <add> ) <ide> ); <ide> } <ide> } <ide><path>src/Error/DumpContext.php <ide> /** <ide> * Context tracking for Debugger::exportVar() <ide> * <del> * This class is used by Debugger to track element depth, <del> * and indentation. In the longer term indentation should be extracted <del> * into a formatter (cli/html). <add> * This class is used by Debugger to track element depth, and <add> * prevent cyclic references from being traversed multiple times. <ide> * <ide> * @internal <ide> */ <ide> class DumpContext <ide> */ <ide> private $depth = 0; <ide> <del> /** <del> * @var int <del> */ <del> private $indent = 0; <del> <ide> /** <ide> * @var \SplObjectStorage <ide> */ <ide> public function withAddedDepth() <ide> return $new; <ide> } <ide> <del> /** <del> * Return a clone with increased depth and indent <del> * <del> * @return static <del> */ <del> public function withAddedDepthAndIndent() <del> { <del> $new = clone $this; <del> $new->depth += 1; <del> $new->indent += 1; <del> <del> return $new; <del> } <del> <del> /** <del> * Get the current indent level. <del> * <del> * @return int <del> */ <del> public function getIndent(): int <del> { <del> return $this->indent; <del> } <del> <ide> /** <ide> * Get the remaining depth levels <ide> * <ide><path>src/Error/DumpFormatter/TextFormatter.php <ide> use Cake\Error\DumpNode\NodeInterface; <ide> use Cake\Error\DumpNode\ReferenceNode; <ide> use Cake\Error\DumpNode\ScalarNode; <add>use Cake\Error\DumpNode\SpecialNode; <ide> use RuntimeException; <ide> <ide> class TextFormatter <ide> public function dump(NodeInterface $node): string <ide> return $this->_export($node, $indent); <ide> } <ide> <del> protected function _export($var, int $indent): string <add> protected function _export(NodeInterface $var, int $indent): string <ide> { <ide> if ($var instanceof ScalarNode) { <ide> switch ($var->getType()) { <ide> protected function _export($var, int $indent): string <ide> if ($var instanceof ClassNode || $var instanceof ReferenceNode) { <ide> return $this->_object($var, $indent + 1); <ide> } <add> if ($var instanceof SpecialNode) { <add> return (string)$var->getValue(); <add> } <ide> throw new RuntimeException('Unknown node received ' . get_class($var)); <ide> } <ide> <ide> protected function _export($var, int $indent): string <ide> protected function _array(ArrayNode $var, int $indent): string <ide> { <ide> $out = '['; <del> $break = $end = ''; <del> if (!empty($var)) { <del> $break = "\n" . str_repeat("\t", $indent); <del> $end = "\n" . str_repeat("\t", $indent - 1); <del> } <add> $break = "\n" . str_repeat(" ", $indent); <add> $end = "\n" . str_repeat(" ", $indent - 1); <ide> $vars = []; <ide> <ide> foreach ($var->getChildren() as $item) { <ide> $val = $item->getValue(); <del> // Sniff for globals as !== explodes in < 5.4 <del> if ($item->getKey() === 'GLOBALS' && is_array($val) && isset($val['GLOBALS'])) { <del> $val = '[recursion]'; <del> } <del> $vars[] = $break . $item->getKey() . ' => ' . $this->_export($val, $indent); <add> $vars[] = $break . $this->_export($item->getKey(), $indent) . ' => ' . $this->_export($val, $indent); <add> } <add> if (count($vars)) { <add> return $out . implode(',', $vars) . $end . ']'; <ide> } <ide> <del> return $out . implode(',', $vars) . $end . ']'; <add> return $out . ']'; <ide> } <ide> <ide> /** <ide> protected function _object($var, int $indent): string <ide> } <ide> <ide> /* @var \Cake\Error\DumpNode\ClassNode $var */ <del> $out .= "object({$var->getClass()}) id:{$var->getId()} {"; <del> $break = "\n" . str_repeat("\t", $indent); <del> $end = "\n" . str_repeat("\t", $indent - 1); <add> $out .= "object({$var->getValue()}) id:{$var->getId()} {"; <add> $break = "\n" . str_repeat(" ", $indent); <add> $end = "\n" . str_repeat(" ", $indent - 1) . '}'; <ide> <ide> foreach ($var->getChildren() as $property) { <ide> $visibility = $property->getVisibility(); <ide> $name = $property->getName(); <del> if ($visibility) { <add> if ($visibility && $visibility !== 'public') { <ide> $props[] = "[{$visibility}] {$name} => " . $this->_export($property->getValue(), $indent); <ide> } else { <ide> $props[] = "{$name} => " . $this->_export($property->getValue(), $indent); <ide> } <ide> } <add> if (count($props)) { <add> return $out . $break . implode($break, $props) . $end; <add> } <ide> <del> return $out . $break . implode($break, $props) . $end; <add> return $out . '}'; <ide> } <ide> } <ide><path>src/Error/DumpNode/ArrayNode.php <ide> <ide> class ArrayNode implements NodeInterface <ide> { <add> /** <add> * @var \Cake\Error\DumpNode\ItemNode[] <add> */ <ide> private $items; <ide> <ide> public function __construct(array $items = []) <ide> public function __construct(array $items = []) <ide> } <ide> } <ide> <del> public function add(ItemNode $node) <add> public function add(ItemNode $node): void <ide> { <ide> $this->items[] = $node; <del> <del> return $this; <ide> } <ide> <del> public function getValue() <add> public function getValue(): array <ide> { <ide> return $this->items; <ide> } <ide><path>src/Error/DumpNode/ClassNode.php <ide> <ide> class ClassNode implements NodeInterface <ide> { <add> /** <add> * @var string <add> */ <ide> private $class; <add> <add> /** <add> * @var int <add> */ <ide> private $id; <add> <add> /** <add> * @var \Cake\Error\DumpNode\PropertyNode[] <add> */ <ide> private $properties = []; <ide> <ide> public function __construct(string $class, int $id) <ide> public function __construct(string $class, int $id) <ide> public function addProperty(PropertyNode $node) <ide> { <ide> $this->properties[] = $node; <del> <del> return $this; <ide> } <ide> <del> public function getClass(): string <add> public function getValue(): string <ide> { <ide> return $this->class; <ide> } <ide><path>src/Error/DumpNode/ItemNode.php <ide> class ItemNode implements NodeInterface <ide> private $key; <ide> private $value; <ide> <del> public function __construct($key, NodeInterface $value) <add> public function __construct(NodeInterface $key, NodeInterface $value) <ide> { <ide> $this->key = $key; <ide> $this->value = $value; <ide><path>src/Error/DumpNode/NodeInterface.php <ide> interface NodeInterface <ide> { <ide> public function getChildren(): array; <add> <add> public function getValue(); <ide> } <ide><path>src/Error/DumpNode/PropertyNode.php <ide> public function __construct(string $name, ?string $visibility, NodeInterface $va <ide> $this->value = $value; <ide> } <ide> <del> public function getValue() <add> public function getValue(): NodeInterface <ide> { <ide> return $this->value; <ide> } <ide><path>src/Error/DumpNode/ScalarNode.php <ide> <ide> class ScalarNode implements NodeInterface <ide> { <add> /** <add> * @var string <add> */ <ide> private $type; <add> <add> /** <add> * @var string|int|float|bool|null <add> */ <ide> private $value; <ide> <ide> public function __construct(string $type, $value) <ide><path>src/Error/DumpNode/SpecialNode.php <add><?php <add>declare(strict_types=1); <add> <add>namespace Cake\Error\DumpNode; <add> <add>class SpecialNode implements NodeInterface <add>{ <add> /** <add> * @var string <add> */ <add> private $value; <add> <add> public function __construct(string $value) <add> { <add> $this->value = $value; <add> } <add> <add> public function getValue(): string <add> { <add> return $this->value; <add> } <add> <add> public function getChildren(): array <add> { <add> return []; <add> } <add>} <ide><path>tests/TestCase/Error/DebuggerTest.php <ide> public function testExportVar() <ide> $result = Debugger::exportVar($View); <ide> $expected = <<<TEXT <ide> object(Cake\View\View) id:0 { <del> Html => object(Cake\View\Helper\HtmlHelper) id:1 {} <del> Form => object(Cake\View\Helper\FormHelper) id:2 {} <del> int => (int) 2 <del> float => (float) 1.333 <del> string => ' ' <del> [protected] _helpers => object(Cake\View\HelperRegistry) id:3 {} <del> [protected] Blocks => object(Cake\View\ViewBlock) id:4 {} <del> [protected] plugin => null <del> [protected] name => '' <del> [protected] helpers => [ <del> (int) 0 => 'Html', <del> (int) 1 => 'Form' <del> ] <del> [protected] templatePath => null <del> [protected] template => null <del> [protected] layout => 'default' <del> [protected] layoutPath => '' <del> [protected] autoLayout => true <del> [protected] viewVars => [] <del> [protected] _ext => '.php' <del> [protected] subDir => '' <del> [protected] theme => null <del> [protected] request => object(Cake\Http\ServerRequest) id:5 {} <del> [protected] response => object(Cake\Http\Response) id:6 {} <del> [protected] elementCache => 'default' <del> [protected] _passedVars => [ <del> (int) 0 => 'viewVars', <del> (int) 1 => 'autoLayout', <del> (int) 2 => 'helpers', <del> (int) 3 => 'template', <del> (int) 4 => 'layout', <del> (int) 5 => 'name', <del> (int) 6 => 'theme', <del> (int) 7 => 'layoutPath', <del> (int) 8 => 'templatePath', <del> (int) 9 => 'plugin' <del> ] <del> [protected] _defaultConfig => [] <del> [protected] _paths => [] <del> [protected] _pathsForPlugin => [] <del> [protected] _parents => [] <del> [protected] _current => null <del> [protected] _currentType => '' <del> [protected] _stack => [] <del> [protected] _viewBlockClass => 'Cake\View\ViewBlock' <del> [protected] _eventManager => object(Cake\Event\EventManager) id:7 {} <del> [protected] _eventClass => 'Cake\Event\Event' <del> [protected] _config => [] <del> [protected] _configInitialized => true <add> Html => object(Cake\View\Helper\HtmlHelper) id:1 {} <add> Form => object(Cake\View\Helper\FormHelper) id:2 {} <add> int => (int) 2 <add> float => (float) 1.333 <add> string => ' ' <add> [protected] _helpers => object(Cake\View\HelperRegistry) id:3 {} <add> [protected] Blocks => object(Cake\View\ViewBlock) id:4 {} <add> [protected] plugin => null <add> [protected] name => '' <add> [protected] helpers => [ <add> (int) 0 => 'Html', <add> (int) 1 => 'Form' <add> ] <add> [protected] templatePath => null <add> [protected] template => null <add> [protected] layout => 'default' <add> [protected] layoutPath => '' <add> [protected] autoLayout => true <add> [protected] viewVars => [] <add> [protected] _ext => '.php' <add> [protected] subDir => '' <add> [protected] theme => null <add> [protected] request => object(Cake\Http\ServerRequest) id:5 {} <add> [protected] response => object(Cake\Http\Response) id:6 {} <add> [protected] elementCache => 'default' <add> [protected] _passedVars => [ <add> (int) 0 => 'viewVars', <add> (int) 1 => 'autoLayout', <add> (int) 2 => 'helpers', <add> (int) 3 => 'template', <add> (int) 4 => 'layout', <add> (int) 5 => 'name', <add> (int) 6 => 'theme', <add> (int) 7 => 'layoutPath', <add> (int) 8 => 'templatePath', <add> (int) 9 => 'plugin' <add> ] <add> [protected] _defaultConfig => [] <add> [protected] _paths => [] <add> [protected] _pathsForPlugin => [] <add> [protected] _parents => [] <add> [protected] _current => null <add> [protected] _currentType => '' <add> [protected] _stack => [] <add> [protected] _viewBlockClass => 'Cake\View\ViewBlock' <add> [protected] _eventManager => object(Cake\Event\EventManager) id:7 {} <add> [protected] _eventClass => 'Cake\Event\Event' <add> [protected] _config => [] <add> [protected] _configInitialized => true <ide> } <ide> TEXT; <ide> $this->assertTextEquals($expected, $result); <ide> public function testExportVar() <ide> $result = Debugger::exportVar($data); <ide> $expected = <<<TEXT <ide> [ <del> (int) 1 => 'Index one', <del> (int) 5 => 'Index five' <add> (int) 1 => 'Index one', <add> (int) 5 => 'Index five' <ide> ] <ide> TEXT; <ide> $this->assertTextEquals($expected, $result); <ide> public function testExportVar() <ide> $result = Debugger::exportVar($data, 1); <ide> $expected = <<<TEXT <ide> [ <del> 'key' => [ <del> [maximum depth reached] <del> ] <add> 'key' => [ <add> '' => [maximum depth reached] <add> ] <ide> ] <ide> TEXT; <ide> $this->assertTextEquals($expected, $result); <ide> public function testExportVar() <ide> $file = fopen('php://output', 'w'); <ide> fclose($file); <ide> $result = Debugger::exportVar($file); <del> $this->assertTextEquals('unknown', $result); <add> $this->assertTextEquals('(unknown)', $result); <ide> } <ide> <ide> /** <ide> public function testExportVarZero() <ide> $result = Debugger::exportVar($data); <ide> $expected = <<<TEXT <ide> [ <del> 'nothing' => '', <del> 'null' => null, <del> 'false' => false, <del> 'szero' => '0', <del> 'zero' => (int) 0 <add> 'nothing' => '', <add> 'null' => null, <add> 'false' => false, <add> 'szero' => '0', <add> 'zero' => (int) 0 <ide> ] <ide> TEXT; <ide> $this->assertTextEquals($expected, $result); <ide> public function testExportVarCyclicRef() <ide> $result = Debugger::exportVar($parent, 6); <ide> $expected = <<<TEXT <ide> object(stdClass) id:0 { <del> name => 'cake' <del> child => object(stdClass) id:1 { <del> name => 'php' <del> child => object(stdClass) id:0 {} <del> } <add> name => 'cake' <add> child => object(stdClass) id:1 { <add> name => 'php' <add> child => object(stdClass) id:0 {} <add> } <ide> } <ide> TEXT; <ide> $this->assertTextEquals($expected, $result); <ide> public function testDump() <ide> $close = "\n\n"; <ide> $expected = <<<TEXT <ide> {$open}[ <del> 'People' => [ <del> (int) 0 => [ <del> 'name' => 'joeseph', <del> 'coat' => 'technicolor', <del> 'hair_color' => 'brown' <del> ], <del> (int) 1 => [ <del> 'name' => 'Shaft', <del> 'coat' => 'black', <del> 'hair' => 'black' <del> ] <del> ] <add> 'People' => [ <add> (int) 0 => [ <add> 'name' => 'joeseph', <add> 'coat' => 'technicolor', <add> 'hair_color' => 'brown' <add> ], <add> (int) 1 => [ <add> 'name' => 'Shaft', <add> 'coat' => 'black', <add> 'hair' => 'black' <add> ] <add> ] <ide> ]{$close} <ide> TEXT; <ide> $this->assertTextEquals($expected, $result); <ide> public function testDump() <ide> <ide> $expected = <<<TEXT <ide> {$open}[ <del> 'People' => [ <del> [maximum depth reached] <del> ] <add> 'People' => [ <add> '' => [maximum depth reached] <add> ] <ide> ]{$close} <ide> TEXT; <ide> $this->assertTextEquals($expected, $result); <ide> public function testGetInstance() <ide> } <ide> <ide> /** <del> * Test that exportVar() doesn't loop through recursive structures. <add> * Test that exportVar() will stop traversing recursive arrays like GLOBALS. <ide> * <ide> * @return void <ide> */ <ide> public function testExportVarRecursion() <ide> { <ide> $output = Debugger::exportVar($GLOBALS); <del> $this->assertStringContainsString("'GLOBALS' => [recursion]", $output); <add> $this->assertRegExp("/'GLOBALS' => \[\s+'' \=\> \[maximum depth reached\]/", $output); <ide> } <ide> <ide> /** <ide> public function testDebugInfo() <ide> $result = Debugger::exportVar($object, 2); <ide> $expected = <<<eos <ide> object(TestApp\Error\Thing\DebuggableThing) id:0 { <del> <del> 'foo' => 'bar', <del> 'inner' => object(TestApp\Error\Thing\DebuggableThing) id:1 {} <del> <add> 'foo' => 'bar' <add> 'inner' => object(TestApp\Error\Thing\DebuggableThing) id:1 {} <ide> } <ide> eos; <ide> $this->assertEquals($expected, $result); <ide> public function testMaskArray() <ide> { <ide> Debugger::setOutputMask(['password' => '[**********]']); <ide> $result = Debugger::exportVar(['password' => 'pass1234']); <del> $expected = "['password'=>[**********]]"; <add> $expected = "['password'=>'[**********]']"; <ide> $this->assertEquals($expected, preg_replace('/\s+/', '', $result)); <ide> } <ide> <ide> public function testMaskObject() <ide> Debugger::setOutputMask(['password' => '[**********]']); <ide> $object = new SecurityThing(); <ide> $result = Debugger::exportVar($object); <del> $expected = 'object(TestApp\\Error\\Thing\\SecurityThing)id:0{password=>[**********]}'; <add> $expected = "object(TestApp\\Error\\Thing\\SecurityThing)id:0{password=>'[**********]'}"; <ide> $this->assertEquals($expected, preg_replace('/\s+/', '', $result)); <ide> } <ide>
11
Text
Text
fix typo in model name
a4faeceaedb30f08fd75fc1cb67709dc8076fbd4
<ide><path>README.md <ide> To download and use any of the pretrained models on your given task, you just ne <ide> >>> from transformers import AutoTokenizer, AutoModel <ide> <ide> >>> tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") <del>>>> model = AutoModel.from_pretrained("bert_base_uncased") <add>>>> model = AutoModel.from_pretrained("bert-base-uncased") <ide> <ide> >>> inputs = tokenizer("Hello world!", return_tensors="pt") <ide> >>> outputs = model(**inputs) <ide> or for TensorFlow: <ide> >>> from transformers import AutoTokenizer, TFAutoModel <ide> <ide> >>> tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") <del>>>> model = TFAutoModel.from_pretrained("bert_base_uncased") <add>>>> model = TFAutoModel.from_pretrained("bert-base-uncased") <ide> <ide> >>> inputs = tokenizer("Hello world!", return_tensors="tf") <ide> >>> outputs = model(**inputs)
1
PHP
PHP
start tests for csrfcomponent
d2bd36b9f50ca15f8d57dc63501d1e6c820d91ad
<ide><path>Cake/Test/TestCase/Controller/Component/CsrfComponentTest.php <add><?php <add>/** <add> * CakePHP(tm) : Rapid Development Framework (http://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * For full copyright and license information, please see the LICENSE.txt <add> * Redistributions of files must retain the above copyright notice. <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) <add> * @link http://cakephp.org CakePHP(tm) Project <add> * @since CakePHP(tm) v3.0.0 <add> * @license http://www.opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\Test\TestCase\Controller\Component; <add> <add>use Cake\Controller\Component\CsrfComponent; <add>use Cake\Controller\ComponentRegistry; <add>use Cake\Event\Event; <add>use Cake\Network\Request; <add>use Cake\Network\Response; <add>use Cake\TestSuite\TestCase; <add> <add>/** <add> * CsrfComponent test. <add> */ <add>class CsrfComponentTest extends TestCase { <add> <add>/** <add> * setup <add> * <add> * @return void <add> */ <add> public function setUp() { <add> parent::setUp(); <add> <add> $controller = $this->getMock('Cake\Controller\Controller'); <add> $this->registry = new ComponentRegistry($controller); <add> $this->component = new CsrfComponent($this->registry); <add> } <add> <add>/** <add> * teardown <add> * <add> * @return void <add> */ <add> public function tearDown() { <add> parent::tearDown(); <add> unset($this->component); <add> } <add> <add>/** <add> * Test setting the cookie value <add> * <add> * @return void <add> */ <add> public function testSettingCookie() { <add> $_SERVER['REQUEST_METHOD'] = 'GET'; <add> <add> $controller = $this->getMock('Cake\Controller\Controller'); <add> $controller->request = new Request(['base' => '/dir']); <add> $controller->response = new Response(); <add> <add> $event = new Event('Controller.startup', $controller); <add> $this->component->startUp($event); <add> <add> $cookie = $controller->response->cookie('csrfToken'); <add> $this->assertNotEmpty($cookie, 'Should set a token.'); <add> $this->assertRegExp('/^[a-f0-9]+$/', $cookie['value'], 'Should look like a hash.'); <add> $this->assertEquals(0, $cookie['expiry'], 'session duration.'); <add> } <add> <add>}
1
Javascript
Javascript
fix build failures
638bb78ecbe3ae9379f362ef30a5f36fd56a556f
<ide><path>spec/text-editor-element-spec.js <ide> describe('TextEditorElement', () => { <ide> ) <ide> <ide> describe('::getDefaultCharacterWidth', () => { <del> it('returns null before the element is attached', () => { <add> it('returns 0 before the element is attached', () => { <ide> const element = buildTextEditorElement({attach: false}) <del> expect(element.getDefaultCharacterWidth()).toBeNull() <add> expect(element.getDefaultCharacterWidth()).toBe(0) <ide> }) <ide> <ide> it('returns the width of a character in the root scope', () => { <ide><path>src/text-editor-element.js <ide> class TextEditorElement extends HTMLElement { <ide> const end = this.pixelPositionForScreenPosition(range.end) <ide> const lineHeight = this.getComponent().getLineHeight() <ide> <del> console.log(start, end); <del> <ide> return { <ide> top: start.top, <ide> left: start.left,
2
Javascript
Javascript
require wintersmith when needed
cffe0fd15a446150ccddcd7241a8665663e0a019
<ide><path>make.js <ide> require('./external/shelljs/make'); <ide> var builder = require('./external/builder/builder.js'); <ide> var crlfchecker = require('./external/crlfchecker/crlfchecker.js'); <del>var wintersmith = require('wintersmith'); <ide> var path = require('path'); <ide> var fs = require('fs'); <ide> <ide> target.web = function() { <ide> cp('-R', 'test/features', GH_PAGES_DIR); <ide> cp('-R', B2G_BUILD_DIR, GH_PAGES_DIR + EXTENSION_SRC_DIR + 'b2g/'); <ide> <add> var wintersmith = require('wintersmith'); <ide> var env = wintersmith('docs/config.json'); <ide> env.build(GH_PAGES_DIR, function (error) { <ide> if (error) {
1
Go
Go
remove unnessary abstraction deepequal
fa753e67ae2bf573c9dfb1da1e1135c5ef5ef415
<ide><path>integration-cli/docker_cli_build_test.go <ide> import ( <ide> "os/exec" <ide> "path" <ide> "path/filepath" <add> "reflect" <ide> "regexp" <ide> "strings" <ide> "syscall" <ide> func TestBuildWithVolumes(t *testing.T) { <ide> t.Fatal(err) <ide> } <ide> <del> equal := deepEqual(&expected, &result) <add> equal := reflect.DeepEqual(&result, &expected) <ide> <ide> if !equal { <ide> t.Fatalf("Volumes %s, expected %s", result, expected) <ide><path>integration-cli/docker_cli_links_test.go <ide> import ( <ide> "io/ioutil" <ide> "os" <ide> "os/exec" <add> "reflect" <ide> "strings" <ide> "testing" <ide> "time" <ide> func TestLinksInspectLinksStarted(t *testing.T) { <ide> <ide> output := convertSliceOfStringsToMap(result) <ide> <del> equal := deepEqual(expected, output) <add> equal := reflect.DeepEqual(output, expected) <ide> <ide> if !equal { <ide> t.Fatalf("Links %s, expected %s", result, expected) <ide> func TestLinksInspectLinksStopped(t *testing.T) { <ide> <ide> output := convertSliceOfStringsToMap(result) <ide> <del> equal := deepEqual(expected, output) <add> equal := reflect.DeepEqual(output, expected) <ide> <ide> if !equal { <ide> t.Fatalf("Links %s, but expected %s", result, expected) <ide><path>integration-cli/utils.go <ide> func unmarshalJSON(data []byte, result interface{}) error { <ide> return nil <ide> } <ide> <del>func deepEqual(expected interface{}, result interface{}) bool { <del> return reflect.DeepEqual(result, expected) <del>} <del> <ide> func convertSliceOfStringsToMap(input []string) map[string]struct{} { <ide> output := make(map[string]struct{}) <ide> for _, v := range input {
3
Python
Python
add ms_win64 for every compile command
e09cd4df81906bbeb58f9fbe410cbf308324dc55
<ide><path>numpy/distutils/mingw32ccompiler.py <ide> def __init__ (self, <ide> # % (self.linker, entry_point)) <ide> if get_build_architecture() == 'AMD64': <ide> self.set_executables( <del> compiler='gcc -mno-cygwin -O0 -Wall', <del> compiler_so='gcc -mno-cygwin -O0 -Wall -Wstrict-prototypes', <add> compiler='gcc -DMS_WIN64 -mno-cygwin -O0 -Wall', <add> compiler_so='gcc -DMS_WIN64 -mno-cygwin -O0 -Wall -Wstrict-prototypes', <ide> linker_exe='gcc -mno-cygwin', <ide> linker_so='gcc -mno-cygwin -shared') <ide> else:
1
Python
Python
add importorskip for japanese fixture
95050201ce095e2328be383beec3025a5e64fb0a
<ide><path>spacy/tests/conftest.py <ide> def ja_tokenizer(): <ide> <ide> @pytest.fixture <ide> def japanese(): <add> pytest.importorskip("MeCab") <ide> return Japanese() <ide> <ide> @pytest.fixture
1
Python
Python
add worker component that starts actors
f1c93a6d47efbc0ec9fdbb5e293c514263ee73a4
<ide><path>celery/worker/actorsbootstrap.py <add> <add>import bootsteps <add>from cell import Actor <add>from celery.utils.imports import instantiate <add>from kombu.utils import uuid <add>from .bootsteps import StartStopComponent <add>from celery.utils.log import get_logger <add> <add>logger = get_logger(__name__) <add>info, warn, error, crit = (logger.info, logger.warn, <add> logger.error, logger.critical) <add> <add>#from examples.workflow import TestActor <add># <add># <add>class WorkerComponent(StartStopComponent): <add> """This component starts an ActorManager instance if actors support is enabled.""" <add> name = 'worker.actors-manager' <add> consumer = None <add> <add> def ActorsManager(self, w): <add> return (w.actors_manager_cls or ActorsManager) <add> <add> def include_if(self, w): <add> #return w.actors_enabled <add> return True <add> <add> def init(self, w, **kwargs): <add> w.actors_manager = None <add> <add> def create(self, w): <add> print 'create ActorsManager' <add> actor = w.actors_manager = self.instantiate(self.ActorsManager(w), <add> app = w.app) <add> actor.app = w.app <add> w.on_consumer_ready_callbacks.append(actor.on_consumer_ready) <add> return actor <add> <add> <add>class ActorProxy(object): <add> """ <add> A class that represent an actor started remotely <add> """ <add> def __init__(self, local_actor, actor_id, async_start_result): <add> self.__subject = local_actor.__copy__() <add> self.__subject.id = actor_id <add> self.async_start_result = async_start_result <add> <add> def __getattr__(self, name): <add> return getattr(self.__subject, name) <add> <add> def wait_to_start(self): <add> self.async_start_result._result <add> <add> <add> <add>class ActorsManager(Actor): <add> connection = None <add> types = ('round-robin', 'scatter') <add> actor_registry = {} <add> actors_consumer = None <add> connection = None <add> app = None <add> <add> def __init__(self, app=None, *args, **kwargs): <add> self.app = app <add> super(ActorsManager, self).__init__(*args, **kwargs) <add> <add> def contribute_to_state(self, state): <add> state.actor_registry = self.actor_registry <add> state.connection = self.connection <add> conninfo = self.app.connection() <add> state.connection_errors = conninfo.connection_errors <add> state.channel_errors = conninfo.channel_errors <add> state.reset() <add> return Actor.contribute_to_state(self, state) <add> <add> class state(Actor.state): <add> def add_actor(self, name, id = None): <add> """Add actor to the actor registry and start the actor's main method""" <add> try: <add> actor = instantiate(name, connection = self.connection, <add> id = id) <add> consumer = actor.Consumer(self.connection.channel()) <add> consumer.consume() <add> if actor.id not in self.actor_registry: <add> self.actor_registry[actor.id] = consumer <add> else: <add> warn('Actor with the same id already exists') <add> print 'Register actor in the actor registry: %s' % name <add> return actor.id <add> except Exception as exc: <add> error('Start actor error: %r', exc, exc_info=True) <add> <add> def stop_all(self): <add> for _, consumer in self.actor_registry.items(): <add> self.maybe_conn_error(consumer.cancel) <add> self.actor_registry.clear() <add> <add> def reset(self): <add> for _, consumer in self.actor_registry.items(): <add> self.maybe_conn_error(consumer.cancel) <add> consumer.consume() <add> <add> def stop_actor(self, actor_id): <add> if actor_id in self.actor_registry: <add> consumer = self.actor_registry.pop(actor_id) <add> self.maybe_conn_error(consumer.cancel) <add> <add> def maybe_conn_error(self, fun): <add> """Applies function but ignores any connection or channel <add> errors raised.""" <add> try: <add> fun() <add> except (AttributeError, ) + \ <add> self.connection_errors + \ <add> self.channel_errors: <add> pass <add> <add> def add_actor(self, actor, nowait=False): <add> name = "%s.%s"%(actor.__class__.__module__, <add> actor.__class__.__name__) <add> actor_id = uuid() <add> res = self.call('add_actor', {'name': name, 'id' : actor_id}, <add> type = 'round-robin', nowait = 'True') <add> actor_proxy = ActorProxy(actor, actor_id, res) <add> return actor_proxy <add> <add> <add> def stop_actor_by_name(self, name, nowait=False): <add> return self.scatter('stop_actor', {'name' : name}, nowait=nowait) <add> <add> def stop_actor_by_id(self, actor_id, nowait=False): <add> return self.call('stop_actor', {'actor_id' : actor_id}, <add> type = 'round-robin', <add> nowait=nowait) <add> <add> def start(self): <add> print 'Starting ActorsManager' <add> <add> def stop(self): <add> if self.actors_consumer: <add> self.actors_consumer.stop() <add> <add> def on_start(self, connection): <add> print 'establishing connection for ActorsManager' <add> self.connection = connection <add> actor_consumer = self.Consumer(self.connection.channel()) <add> actor_consumer.consume() <add> print 'Start consuming' <add> self.actor_consumer = actor_consumer <add> self.contribute_to_state(self.state) <add> <add> def on_consumer_ready(self, consumer): <add> print 'ActorsManager in On consumer ready' <add> if consumer.connection: <add> raise Exception('Consumer is ready.') <add> consumer.on_reset_connection.append(self.on_start) <add> consumer.on_close_connection.append(self.stop) <add> <ide>\ No newline at end of file
1
Ruby
Ruby
fix sqlite migrations with custom primary keys
5ac4f4d2563e7f9c5ffaecce4be4b9e2c5b0c081
<ide><path>activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb <ide> def move_table(from, to, options = {}, &block) <ide> <ide> def copy_table(from, to, options = {}) <ide> from_primary_key = primary_key(from) <add> from_primary_key_column = columns(from).select { |column| column.name == from_primary_key }.first <ide> options[:id] = false <ide> create_table(to, options) do |definition| <ide> @definition = definition <del> @definition.primary_key(from_primary_key) if from_primary_key.present? <add> @definition.primary_key(from_primary_key, from_primary_key_column.type) if from_primary_key.present? <ide> columns(from).each do |column| <ide> column_name = options[:rename] ? <ide> (options[:rename][column.name] || <ide> def copy_table(from, to, options = {}) <ide> def copy_table_indexes(from, to, rename = {}) <ide> indexes(from).each do |index| <ide> name = index.name <add> # indexes sqlite creates for internal use start with `sqlite_` and <add> # don't need to be copied <add> next if name.starts_with?("sqlite_") <ide> if to == "a#{from}" <ide> name = "t#{name}" <ide> elsif from == "a#{to}" <ide><path>activerecord/test/cases/adapters/sqlite3/sqlite3_adapter_test.rb <ide> def test_no_primary_key <ide> end <ide> end <ide> <add> class Barcode < ActiveRecord::Base <add> end <add> <add> def test_existing_records_have_custom_primary_key <add> connection = Barcode.connection <add> connection.create_table(:barcodes, primary_key: "code", id: :string, limit: 42, force: true) do |t| <add> t.text :other_attr <add> end <add> code = "214fe0c2-dd47-46df-b53b-66090b3c1d40" <add> Barcode.create! code: code, other_attr: "xxx" <add> <add> connection.change_table "barcodes" do |t| <add> connection.remove_column("barcodes", "other_attr") <add> end <add> <add> assert_equal code, Barcode.first.id <add> end <add> <ide> def test_supports_extensions <ide> assert_not @conn.supports_extensions?, "does not support extensions" <ide> end
2
Mixed
Javascript
invoke callback before emitting error always
3de5eae6dbe503485b95bdeb8bddbd67e4613d59
<ide><path>doc/api/stream.md <ide> The `writable.write()` method writes some data to the stream, and calls the <ide> supplied `callback` once the data has been fully handled. If an error <ide> occurs, the `callback` *may or may not* be called with the error as its <ide> first argument. To reliably detect write errors, add a listener for the <del>`'error'` event. <add>`'error'` event. If `callback` is called with an error, it will be called <add>before the `'error'` event is emitted. <ide> <ide> The return value is `true` if the internal buffer is less than the <ide> `highWaterMark` configured when the stream was created after admitting `chunk`. <ide><path>lib/_stream_writable.js <ide> function WritableState(options, stream, isDuplex) { <ide> // Should .destroy() be called after 'finish' (and potentially 'end') <ide> this.autoDestroy = !!(options && options.autoDestroy); <ide> <add> // Indicates whether the stream has errored. When true all write() calls <add> // should return false. This is needed since when autoDestroy <add> // is disabled we need a way to tell whether the stream has failed. <add> this.errored = false; <add> <ide> // Count buffered requests <ide> this.bufferedRequestCount = 0; <ide> <ide> function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { <ide> if (!ret) <ide> state.needDrain = true; <ide> <del> if (state.writing || state.corked) { <add> if (state.writing || state.corked || state.errored) { <ide> var last = state.lastBufferedRequest; <ide> state.lastBufferedRequest = { <ide> chunk, <ide> function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { <ide> doWrite(stream, state, false, len, chunk, encoding, cb); <ide> } <ide> <del> return ret; <add> // Return false if errored or destroyed in order to break <add> // any synchronous while(stream.write(data)) loops. <add> return ret && !state.errored && !state.destroyed; <ide> } <ide> <ide> function doWrite(stream, state, writev, len, chunk, encoding, cb) { <ide> function doWrite(stream, state, writev, len, chunk, encoding, cb) { <ide> state.sync = false; <ide> } <ide> <del>function onwriteError(stream, state, sync, er, cb) { <add>function onwriteError(stream, state, er, cb) { <ide> --state.pendingcb; <ide> <del> if (sync) { <del> // Defer the callback if we are being called synchronously <del> // to avoid piling up things on the stack <del> process.nextTick(cb, er); <del> } else { <del> // The caller expect this to happen before if <del> // it is async <del> cb(er); <del> } <add> cb(er); <add> // This can emit error, but error must always follow cb. <ide> errorOrDestroy(stream, er); <ide> } <ide> <ide> function onwrite(stream, er) { <ide> state.length -= state.writelen; <ide> state.writelen = 0; <ide> <del> if (er) <del> onwriteError(stream, state, sync, er, cb); <del> else { <add> if (er) { <add> state.errored = true; <add> if (sync) { <add> process.nextTick(onwriteError, stream, state, er, cb); <add> } else { <add> onwriteError(stream, state, er, cb); <add> } <add> } else { <ide> // Check if we're actually ready to finish, but don't emit yet <ide> var finished = needFinish(state) || stream.destroyed; <ide> <ide> Object.defineProperty(Writable.prototype, 'writableLength', { <ide> function needFinish(state) { <ide> return (state.ending && <ide> state.length === 0 && <del> !state.errorEmitted && <add> !state.errored && <ide> state.bufferedRequest === null && <ide> !state.finished && <ide> !state.writing); <ide><path>lib/internal/streams/destroy.js <ide> function destroy(err, cb) { <ide> const r = this._readableState; <ide> const w = this._writableState; <ide> <add> if (w && err) { <add> w.errored = true; <add> } <add> <ide> if ((w && w.destroyed) || (r && r.destroyed)) { <ide> if (cb) { <ide> cb(err); <ide> function destroy(err, cb) { <ide> this._destroy(err || null, (err) => { <ide> const emitClose = (w && w.emitClose) || (r && r.emitClose); <ide> if (cb) { <add> // Invoke callback before scheduling emitClose so that callback <add> // can schedule before. <add> cb(err); <ide> if (emitClose) { <ide> process.nextTick(emitCloseNT, this); <ide> } <del> cb(err); <ide> } else if (needError(this, err)) { <ide> process.nextTick(emitClose ? emitErrorCloseNT : emitErrorNT, this, err); <ide> } else if (emitClose) { <ide> function undestroy() { <ide> <ide> if (w) { <ide> w.destroyed = false; <add> w.errored = false; <ide> w.ended = false; <ide> w.ending = false; <ide> w.finalCalled = false; <ide> function errorOrDestroy(stream, err) { <ide> const r = stream._readableState; <ide> const w = stream._writableState; <ide> <add> if (w & err) { <add> w.errored = true; <add> } <add> <ide> if ((r && r.autoDestroy) || (w && w.autoDestroy)) <ide> stream.destroy(err); <ide> else if (needError(stream, err)) <ide><path>test/parallel/test-http2-reset-flood.js <ide> const worker = new Worker(__filename).on('message', common.mustCall((port) => { <ide> h2header.writeIntBE(1, 0, 3); // Length: 1 <ide> h2header.writeIntBE(i, 5, 4); // Stream ID <ide> // 0x88 = :status: 200 <del> conn.write(Buffer.concat([h2header, Buffer.from([0x88])])); <add> if (!conn.write(Buffer.concat([h2header, Buffer.from([0x88])]))) { <add> process.nextTick(writeRequests); <add> break; <add> } <ide> } <ide> } <ide> <ide><path>test/parallel/test-stream-writable-destroy.js <ide> const assert = require('assert'); <ide> assert.strictEqual(write.destroyed, true); <ide> } <ide> <add>{ <add> const write = new Writable({ <add> write(chunk, enc, cb) { <add> this.destroy(new Error('asd')); <add> cb(); <add> } <add> }); <add> <add> write.on('error', common.mustCall()); <add> write.on('finish', common.mustNotCall()); <add> write.end('asd'); <add> assert.strictEqual(write.destroyed, true); <add>} <add> <ide> { <ide> const write = new Writable({ <ide> write(chunk, enc, cb) { cb(); } <ide><path>test/parallel/test-stream-writable-write-cb-error.js <add>'use strict'; <add>const common = require('../common'); <add>const { Writable } = require('stream'); <add>const assert = require('assert'); <add> <add>// Ensure callback is always invoked before <add>// error is emitted. Regardless if error was <add>// sync or async. <add> <add>{ <add> let callbackCalled = false; <add> // Sync Error <add> const writable = new Writable({ <add> write: common.mustCall((buf, enc, cb) => { <add> cb(new Error()); <add> }) <add> }); <add> writable.on('error', common.mustCall(() => { <add> assert.strictEqual(callbackCalled, true); <add> })); <add> writable.write('hi', common.mustCall(() => { <add> callbackCalled = true; <add> })); <add>} <add> <add>{ <add> let callbackCalled = false; <add> // Async Error <add> const writable = new Writable({ <add> write: common.mustCall((buf, enc, cb) => { <add> process.nextTick(cb, new Error()); <add> }) <add> }); <add> writable.on('error', common.mustCall(() => { <add> assert.strictEqual(callbackCalled, true); <add> })); <add> writable.write('hi', common.mustCall(() => { <add> callbackCalled = true; <add> })); <add>} <add> <add>{ <add> // Sync Error <add> const writable = new Writable({ <add> write: common.mustCall((buf, enc, cb) => { <add> cb(new Error()); <add> }) <add> }); <add> <add> writable.on('error', common.mustCall()); <add> <add> let cnt = 0; <add> // Ensure we don't live lock on sync error <add> while (writable.write('a')) <add> cnt++; <add> <add> assert.strictEqual(cnt, 0); <add>} <ide><path>test/parallel/test-wrap-js-stream-exceptions.js <ide> const socket = new JSStreamWrap(new Duplex({ <ide> }) <ide> })); <ide> <del>assert.throws(() => socket.end('foo'), /Error: write EPROTO/); <add>socket.end('foo'); <add>socket.on('error', common.expectsError({ <add> type: Error, <add> message: 'write EPROTO' <add>})); <ide><path>test/parallel/test-zlib-write-after-close.js <ide> const zlib = require('zlib'); <ide> zlib.gzip('hello', common.mustCall(function(err, out) { <ide> const unzip = zlib.createGunzip(); <ide> unzip.close(common.mustCall()); <del> common.expectsError( <del> () => unzip.write(out), <del> { <del> code: 'ERR_STREAM_DESTROYED', <del> type: Error, <del> message: 'Cannot call write after a stream was destroyed' <del> } <del> ); <add> <add> unzip.write(out); <add> unzip.on('error', common.expectsError({ <add> code: 'ERR_STREAM_DESTROYED', <add> type: Error <add> })); <ide> }));
8
Go
Go
remove version package from api types
9961816adead89622f5d4201a0b5cb33845fa852
<ide><path>api/client/version.go <ide> import ( <ide> Cli "github.com/docker/docker/cli" <ide> "github.com/docker/docker/dockerversion" <ide> flag "github.com/docker/docker/pkg/mflag" <del> "github.com/docker/docker/pkg/version" <ide> "github.com/docker/docker/utils" <ide> ) <ide> <ide> func (cli *DockerCli) CmdVersion(args ...string) (err error) { <ide> vd := types.VersionResponse{ <ide> Client: &types.Version{ <ide> Version: dockerversion.Version, <del> APIVersion: version.Version(cli.client.ClientVersion()), <add> APIVersion: cli.client.ClientVersion(), <ide> GoVersion: runtime.Version(), <ide> GitCommit: dockerversion.GitCommit, <ide> BuildTime: dockerversion.BuildTime, <ide><path>api/server/router/system/system_routes.go <ide> func (s *systemRouter) getInfo(ctx context.Context, w http.ResponseWriter, r *ht <ide> <ide> func (s *systemRouter) getVersion(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { <ide> info := s.backend.SystemVersion() <del> info.APIVersion = api.DefaultVersion <add> info.APIVersion = api.DefaultVersion.String() <ide> <ide> return httputils.WriteJSON(w, http.StatusOK, info) <ide> } <ide><path>api/types/types.go <ide> import ( <ide> "github.com/docker/docker/api/types/container" <ide> "github.com/docker/docker/api/types/network" <ide> "github.com/docker/docker/api/types/registry" <del> "github.com/docker/docker/pkg/version" <ide> "github.com/docker/go-connections/nat" <ide> ) <ide> <ide> type ContainerProcessList struct { <ide> // GET "/version" <ide> type Version struct { <ide> Version string <del> APIVersion version.Version `json:"ApiVersion"` <add> APIVersion string `json:"ApiVersion"` <ide> GitCommit string <ide> GoVersion string <ide> Os string
3
Javascript
Javascript
fix redeclared vars in sequential tests
5ea1e7d55f53ced843ff7fcb3d2d266ed117d802
<ide><path>test/sequential/test-child-process-execsync.js <ide> var msgBuf = new Buffer(msg + '\n'); <ide> <ide> cmd = `"${process.execPath}" -e "console.log(\'${msg}\');"`; <ide> <del>var ret = execSync(cmd); <add>ret = execSync(cmd); <ide> <ide> assert.strictEqual(ret.length, msgBuf.length); <ide> assert.deepEqual(ret, msgBuf, 'execSync result buffer should match'); <ide><path>test/sequential/test-require-cache-without-stat.js <ide> var counterBefore = counter; <ide> <ide> // Now load the module a bunch of times with equivalent paths. <ide> // stat should not be called. <del>for (var i = 0; i < 100; i++) { <add>for (let i = 0; i < 100; i++) { <ide> require(common.fixturesDir + '/a'); <ide> require('../fixtures/a.js'); <ide> require('./../fixtures/a.js'); <ide> } <ide> <ide> // Do the same with a built-in module <del>for (var i = 0; i < 100; i++) { <add>for (let i = 0; i < 100; i++) { <ide> require('http'); <ide> } <ide> <ide><path>test/sequential/test-tcp-wrap-listen.js <ide> server.onconnection = function(err, client) { <ide> <ide> var req = new WriteWrap(); <ide> req.async = false; <del> var err = client.writeBuffer(req, buffer); <del> assert.equal(err, 0); <add> const returnCode = client.writeBuffer(req, buffer); <add> assert.equal(returnCode, 0); <ide> client.pendingWrites.push(req); <ide> <ide> console.log('client.writeQueueSize: ' + client.writeQueueSize);
3
Python
Python
add tests for dtype and out
d7bc29c1382cc31f3bc177f9ff6672d3f73e3076
<ide><path>numpy/typing/tests/data/pass/random.py <ide> D_2D_like: List[List[float]] = [[1, 2], [2, 3], [3, 4], [4, 5.1]] <ide> D_2D: np.ndarray[Any, np.dtype[np.float64]] = np.array(D_2D_like) <ide> <add>S_out: np.ndarray[Any, np.dtype[np.float32]] = np.empty(1, dtype=np.float32) <add>D_out: np.ndarray[Any, np.dtype[np.float64]] = np.empty(1) <add> <ide> def_gen.standard_normal() <add>def_gen.standard_normal(dtype=np.float32) <add>def_gen.standard_normal(dtype="float32") <add>def_gen.standard_normal(dtype="double") <add>def_gen.standard_normal(dtype=np.float64) <ide> def_gen.standard_normal(size=None) <ide> def_gen.standard_normal(size=1) <add>def_gen.standard_normal(size=1, dtype=np.float32) <add>def_gen.standard_normal(size=1, dtype="f4") <add>def_gen.standard_normal(size=1, dtype="float32", out=S_out) <add>def_gen.standard_normal(dtype=np.float32, out=S_out) <add>def_gen.standard_normal(size=1, dtype=np.float64) <add>def_gen.standard_normal(size=1, dtype="float64") <add>def_gen.standard_normal(size=1, dtype="f8") <add>def_gen.standard_normal(out=D_out) <add>def_gen.standard_normal(size=1, dtype="float64") <add>def_gen.standard_normal(size=1, dtype="float64", out=D_out) <ide> <ide> def_gen.random() <add>def_gen.random(dtype=np.float32) <add>def_gen.random(dtype="float32") <add>def_gen.random(dtype="double") <add>def_gen.random(dtype=np.float64) <ide> def_gen.random(size=None) <ide> def_gen.random(size=1) <add>def_gen.random(size=1, dtype=np.float32) <add>def_gen.random(size=1, dtype="f4") <add>def_gen.random(size=1, dtype="float32", out=S_out) <add>def_gen.random(dtype=np.float32, out=S_out) <add>def_gen.random(size=1, dtype=np.float64) <add>def_gen.random(size=1, dtype="float64") <add>def_gen.random(size=1, dtype="f8") <add>def_gen.random(out=D_out) <add>def_gen.random(size=1, dtype="float64") <add>def_gen.random(size=1, dtype="float64", out=D_out) <ide> <ide> def_gen.standard_cauchy() <ide> def_gen.standard_cauchy(size=None) <ide> def_gen.standard_cauchy(size=1) <ide> <ide> def_gen.standard_exponential() <add>def_gen.standard_exponential(method="inv") <add>def_gen.standard_exponential(dtype=np.float32) <add>def_gen.standard_exponential(dtype="float32") <add>def_gen.standard_exponential(dtype="double") <add>def_gen.standard_exponential(dtype=np.float64) <ide> def_gen.standard_exponential(size=None) <del>def_gen.standard_exponential(size=1) <add>def_gen.standard_exponential(size=None, method="inv") <add>def_gen.standard_exponential(size=1, method="inv") <add>def_gen.standard_exponential(size=1, dtype=np.float32) <add>def_gen.standard_exponential(size=1, dtype="f4", method="inv") <add>def_gen.standard_exponential(size=1, dtype="float32", out=S_out) <add>def_gen.standard_exponential(dtype=np.float32, out=S_out) <add>def_gen.standard_exponential(size=1, dtype=np.float64, method="inv") <add>def_gen.standard_exponential(size=1, dtype="float64") <add>def_gen.standard_exponential(size=1, dtype="f8") <add>def_gen.standard_exponential(out=D_out) <add>def_gen.standard_exponential(size=1, dtype="float64") <add>def_gen.standard_exponential(size=1, dtype="float64", out=D_out) <ide> <ide> def_gen.zipf(1.5) <ide> def_gen.zipf(1.5, size=None) <ide> <ide> def_gen.standard_gamma(0.5) <ide> def_gen.standard_gamma(0.5, size=None) <add>def_gen.standard_gamma(0.5, dtype="float32") <add>def_gen.standard_gamma(0.5, size=None, dtype="float32") <ide> def_gen.standard_gamma(0.5, size=1) <ide> def_gen.standard_gamma(D_arr_0p5) <add>def_gen.standard_gamma(D_arr_0p5, dtype="f4") <add>def_gen.standard_gamma(0.5, size=1, dtype="float32", out=S_out) <add>def_gen.standard_gamma(D_arr_0p5, dtype=np.float32, out=S_out) <ide> def_gen.standard_gamma(D_arr_0p5, size=1) <ide> def_gen.standard_gamma(D_arr_like_0p5) <ide> def_gen.standard_gamma(D_arr_like_0p5, size=1) <add>def_gen.standard_gamma(0.5, out=D_out) <add>def_gen.standard_gamma(D_arr_like_0p5, out=D_out) <add>def_gen.standard_gamma(D_arr_like_0p5, size=1) <add>def_gen.standard_gamma(D_arr_like_0p5, size=1, out=D_out, dtype=np.float64) <ide> <ide> def_gen.vonmises(0.5, 0.5) <ide> def_gen.vonmises(0.5, 0.5, size=None) <ide><path>numpy/typing/tests/data/reveal/random.py <ide> I_arr_like_20: List[int] = [20] <ide> D_2D_like: List[List[float]] = [[1, 2], [2, 3], [3, 4], [4, 5.1]] <ide> D_2D: np.ndarray[Any, np.dtype[np.float64]] = np.array(D_2D_like) <add>S_out: np.ndarray[Any, np.dtype[np.float32]] = np.empty(1, dtype=np.float32) <add>D_out: np.ndarray[Any, np.dtype[np.float64]] = np.empty(1) <ide> <ide> reveal_type(def_gen.standard_normal()) # E: float <ide> reveal_type(def_gen.standard_normal(dtype=np.float32)) # E: float <ide> reveal_type(def_gen.standard_normal(size=1)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_normal(size=1, dtype=np.float32)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <ide> reveal_type(def_gen.standard_normal(size=1, dtype="f4")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.standard_normal(size=1, dtype="float32", out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.standard_normal(dtype=np.float32, out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <ide> reveal_type(def_gen.standard_normal(size=1, dtype=np.float64)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_normal(size=1, dtype="float64")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_normal(size=1, dtype="f8")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_normal(out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_normal(size=1, dtype="float64")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_normal(size=1, dtype="float64", out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> <ide> reveal_type(def_gen.random()) # E: float <ide> reveal_type(def_gen.random(dtype=np.float32)) # E: float <ide> reveal_type(def_gen.random(size=1)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.random(size=1, dtype=np.float32)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <ide> reveal_type(def_gen.random(size=1, dtype="f4")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.random(size=1, dtype="float32", out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.random(dtype=np.float32, out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <ide> reveal_type(def_gen.random(size=1, dtype=np.float64)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.random(size=1, dtype="float64")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.random(size=1, dtype="f8")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.random(out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.random(size=1, dtype="float64")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.random(size=1, dtype="float64", out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> <ide> reveal_type(def_gen.standard_cauchy()) # E: float <ide> reveal_type(def_gen.standard_cauchy(size=None)) # E: float <ide> reveal_type(def_gen.standard_exponential(size=1, method="inv")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_exponential(size=1, dtype=np.float32)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <ide> reveal_type(def_gen.standard_exponential(size=1, dtype="f4", method="inv")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.standard_exponential(size=1, dtype="float32", out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.standard_exponential(dtype=np.float32, out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <ide> reveal_type(def_gen.standard_exponential(size=1, dtype=np.float64, method="inv")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_exponential(size=1, dtype="float64")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_exponential(size=1, dtype="f8")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_exponential(out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_exponential(size=1, dtype="float64")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_exponential(size=1, dtype="float64", out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> <ide> reveal_type(def_gen.zipf(1.5)) # E: int <ide> reveal_type(def_gen.zipf(1.5, size=None)) # E: int <ide> <ide> reveal_type(def_gen.standard_gamma(0.5)) # E: float <ide> reveal_type(def_gen.standard_gamma(0.5, size=None)) # E: float <add>reveal_type(def_gen.standard_gamma(0.5, dtype="float32")) # E: float <add>reveal_type(def_gen.standard_gamma(0.5, size=None, dtype="float32")) # E: float <ide> reveal_type(def_gen.standard_gamma(0.5, size=1)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_gamma(D_arr_0p5)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_gamma(D_arr_0p5, dtype="f4")) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.standard_gamma(0.5, size=1, dtype="float32", out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <add>reveal_type(def_gen.standard_gamma(D_arr_0p5, dtype=np.float32, out=S_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._32Bit]]] <ide> reveal_type(def_gen.standard_gamma(D_arr_0p5, size=1)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_gamma(D_arr_like_0p5)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> reveal_type(def_gen.standard_gamma(D_arr_like_0p5, size=1)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_gamma(0.5, out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_gamma(D_arr_like_0p5, out=D_out)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_gamma(D_arr_like_0p5, size=1)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <add>reveal_type(def_gen.standard_gamma(D_arr_like_0p5, size=1, out=D_out, dtype=np.float64)) # E: numpy.ndarray[Any, numpy.dtype[numpy.floating[numpy.typing._64Bit]]] <ide> <ide> reveal_type(def_gen.vonmises(0.5, 0.5)) # E: float <ide> reveal_type(def_gen.vonmises(0.5, 0.5, size=None)) # E: float
2
Javascript
Javascript
provide proper deprecation code
ba944b16972c4b248e946d4bacb2141e5ef7eba9
<ide><path>lib/async_hooks.js <ide> function showEmitBeforeAfterWarning() { <ide> process.emitWarning( <ide> 'asyncResource.emitBefore and emitAfter are deprecated. Please use ' + <ide> 'asyncResource.runInAsyncScope instead', <del> 'DeprecationWarning', 'DEP00XX'); <add> 'DeprecationWarning', 'DEP0098'); <ide> emitBeforeAfterWarning = false; <ide> } <ide> }
1
Go
Go
fix empty withversion blocking version negotiation
0041e2419a504a32b340de0b8f65870cb8542090
<ide><path>client/client_test.go <ide> func TestNegotiateAPVersionOverride(t *testing.T) { <ide> assert.Check(t, is.Equal(expected, client.version)) <ide> } <ide> <add>// TestNegotiateAPIVersionWithEmptyVersion asserts that initializing a client <add>// with an empty version string does still allow API-version negotiation <add>func TestNegotiateAPIVersionWithEmptyVersion(t *testing.T) { <add> client, err := NewClientWithOpts(WithVersion("")) <add> assert.NilError(t, err) <add> <add> client.NegotiateAPIVersionPing(types.Ping{APIVersion: "1.35"}) <add> assert.Equal(t, client.version, "1.35") <add>} <add> <add>// TestNegotiateAPIVersionWithFixedVersion asserts that initializing a client <add>// with an fixed version disables API-version negotiation <add>func TestNegotiateAPIVersionWithFixedVersion(t *testing.T) { <add> client, err := NewClientWithOpts(WithVersion("1.35")) <add> assert.NilError(t, err) <add> <add> client.NegotiateAPIVersionPing(types.Ping{APIVersion: "1.31"}) <add> assert.Equal(t, client.version, "1.35") <add>} <add> <ide> type roundTripFunc func(*http.Request) (*http.Response, error) <ide> <ide> func (rtf roundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) { <ide><path>client/options.go <ide> func WithTLSClientConfig(cacertPath, certPath, keyPath string) Opt { <ide> } <ide> } <ide> <del>// WithVersion overrides the client version with the specified one <add>// WithVersion overrides the client version with the specified one. If an empty <add>// version is specified, the value will be ignored to allow version negotiation. <ide> func WithVersion(version string) Opt { <ide> return func(c *Client) error { <del> c.version = version <del> c.manualOverride = true <add> if version != "" { <add> c.version = version <add> c.manualOverride = true <add> } <ide> return nil <ide> } <ide> }
2
Ruby
Ruby
improve the docs for ordered options
d091b9ccee20cf77153a60cf6445ff2fbfb8bb45
<ide><path>activesupport/lib/active_support/ordered_options.rb <ide> require "active_support/core_ext/object/blank" <ide> <ide> module ActiveSupport <del> # Usually key value pairs are handled something like this: <add> # +OrderedOptions+ inherits from +Hash+ and provides dynamic accessor methods. <add> # <add> # With a +Hash+, key-value pairs are typically managed like this: <ide> # <ide> # h = {} <ide> # h[:boy] = 'John' <ide> module ActiveSupport <ide> # h[:girl] # => 'Mary' <ide> # h[:dog] # => nil <ide> # <del> # Using +OrderedOptions+, the above code could be reduced to: <add> # Using +OrderedOptions+, the above code can be written as: <ide> # <ide> # h = ActiveSupport::OrderedOptions.new <ide> # h.boy = 'John'
1
PHP
PHP
add context() to cakesocket
9e725641ad50cf083924a54f74e46cf6a7e8eac0
<ide><path>lib/Cake/Network/CakeSocket.php <ide> public function connect() { <ide> } <ide> <ide> $scheme = null; <del> if (isset($this->config['request']) && $this->config['request']['uri']['scheme'] == 'https') { <add> if (isset($this->config['request']['uri']) && $this->config['request']['uri']['scheme'] == 'https') { <ide> $scheme = 'ssl://'; <ide> } <ide> <del> if ($this->config['persistent']) { <del> $this->connection = @pfsockopen($scheme . $this->config['host'], $this->config['port'], $errNum, $errStr, $this->config['timeout']); <add> if (!empty($this->config['request']['context'])) { <add> $context = stream_context_create($this->config['request']['context']); <ide> } else { <del> $this->connection = @fsockopen($scheme . $this->config['host'], $this->config['port'], $errNum, $errStr, $this->config['timeout']); <add> $context = stream_context_create(); <ide> } <ide> <add> $connectAs = STREAM_CLIENT_CONNECT; <add> if ($this->config['persistent']) { <add> $connectAs = STREAM_CLIENT_PERSISTENT; <add> } <add> $this->connection = @stream_socket_client( <add> $scheme . $this->config['host'] . ':' . $this->config['port'], <add> $errNum, <add> $errStr, <add> $this->config['timeout'], <add> $connectAs, <add> $context <add> ); <add> <ide> if (!empty($errNum) || !empty($errStr)) { <ide> $this->setLastError($errNum, $errStr); <ide> throw new SocketException($errStr, $errNum); <ide> public function connect() { <ide> return $this->connected; <ide> } <ide> <add>/** <add> * Get the connection context. <add> * <add> * @return array <add> */ <add> public function context() { <add> return stream_context_get_options($this->connection); <add> } <add> <ide> /** <ide> * Get the host name of the current connection. <ide> * <ide><path>lib/Cake/Test/Case/Network/CakeSocketTest.php <ide> public function testEnableCryptoEnableStatus() { <ide> $this->assertTrue($this->Socket->encrypted); <ide> } <ide> <add>/** <add> * test getting the context for a socket. <add> * <add> * @return void <add> */ <add> public function testGetContext() { <add> $config = array( <add> 'host' => 'smtp.gmail.com', <add> 'port' => 465, <add> 'timeout' => 5, <add> 'request' => array( <add> 'context' => array( <add> 'ssl' => array('capture_peer' => true) <add> ) <add> ) <add> ); <add> $this->Socket = new CakeSocket($config); <add> $this->Socket->connect(); <add> $result = $this->Socket->context(); <add> $this->assertEquals($config['request']['context'], $result); <add> } <add> <ide> }
2
Go
Go
fix docker cp when container source is /
171538c190ee3a1a8211946ab8fa78cdde54b47a
<ide><path>daemon/archive.go <ide> func (daemon *Daemon) containerArchivePath(container *container.Container, path <ide> if driver.Base(resolvedPath) == "." { <ide> resolvedPath += string(driver.Separator()) + "." <ide> } <del> sourceDir, sourceBase := driver.Dir(resolvedPath), driver.Base(resolvedPath) <add> <add> sourceDir := resolvedPath <add> sourceBase := "." <add> <add> if stat.Mode&os.ModeDir == 0 { // not dir <add> sourceDir, sourceBase = driver.Split(resolvedPath) <add> } <ide> opts := archive.TarResourceRebaseOpts(sourceBase, driver.Base(absPath)) <ide> <ide> data, err := archivePath(driver, sourceDir, opts, container.BaseFS.Path()) <ide> func (daemon *Daemon) containerCopy(container *container.Container, resource str <ide> d, f := driver.Split(basePath) <ide> basePath = d <ide> filter = []string{f} <del> } else { <del> filter = []string{driver.Base(basePath)} <del> basePath = driver.Dir(basePath) <ide> } <ide> archive, err := archivePath(driver, basePath, &archive.TarOptions{ <ide> Compression: archive.Uncompressed,
1
Python
Python
add visualbert type hints
9947dd077c1dd3a4e220b1846ed38f475641e21d
<ide><path>src/transformers/models/visual_bert/modeling_visual_bert.py <ide> <ide> import math <ide> from dataclasses import dataclass <del>from typing import Optional, Tuple <add>from typing import Optional, Tuple, Union <ide> <ide> import torch <ide> import torch.utils.checkpoint <ide> class PreTrainedModel <ide> @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) <ide> def forward( <ide> self, <del> input_ids=None, <del> attention_mask=None, <del> token_type_ids=None, <del> position_ids=None, <del> head_mask=None, <del> inputs_embeds=None, <del> visual_embeds=None, <del> visual_attention_mask=None, <del> visual_token_type_ids=None, <del> image_text_alignment=None, <del> output_attentions=None, <del> output_hidden_states=None, <del> return_dict=None, <del> ): <add> input_ids: Optional[torch.LongTensor] = None, <add> attention_mask: Optional[torch.LongTensor] = None, <add> token_type_ids: Optional[torch.LongTensor] = None, <add> position_ids: Optional[torch.LongTensor] = None, <add> head_mask: Optional[torch.LongTensor] = None, <add> inputs_embeds: Optional[torch.FloatTensor] = None, <add> visual_embeds: Optional[torch.FloatTensor] = None, <add> visual_attention_mask: Optional[torch.LongTensor] = None, <add> visual_token_type_ids: Optional[torch.LongTensor] = None, <add> image_text_alignment: Optional[torch.LongTensor] = None, <add> output_attentions: Optional[bool] = None, <add> output_hidden_states: Optional[bool] = None, <add> return_dict: Optional[bool] = None, <add> ) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPooling]: <ide> r""" <ide> <ide> Returns: <ide> def set_output_embeddings(self, new_embeddings): <ide> @replace_return_docstrings(output_type=VisualBertForPreTrainingOutput, config_class=_CONFIG_FOR_DOC) <ide> def forward( <ide> self, <del> input_ids=None, <del> attention_mask=None, <del> token_type_ids=None, <del> position_ids=None, <del> head_mask=None, <del> inputs_embeds=None, <del> visual_embeds=None, <del> visual_attention_mask=None, <del> visual_token_type_ids=None, <del> image_text_alignment=None, <del> output_attentions=None, <del> output_hidden_states=None, <del> return_dict=None, <del> labels=None, <del> sentence_image_labels=None, <del> ): <add> input_ids: Optional[torch.LongTensor] = None, <add> attention_mask: Optional[torch.LongTensor] = None, <add> token_type_ids: Optional[torch.LongTensor] = None, <add> position_ids: Optional[torch.LongTensor] = None, <add> head_mask: Optional[torch.LongTensor] = None, <add> inputs_embeds: Optional[torch.FloatTensor] = None, <add> visual_embeds: Optional[torch.FloatTensor] = None, <add> visual_attention_mask: Optional[torch.LongTensor] = None, <add> visual_token_type_ids: Optional[torch.LongTensor] = None, <add> image_text_alignment: Optional[torch.LongTensor] = None, <add> output_attentions: Optional[bool] = None, <add> output_hidden_states: Optional[bool] = None, <add> return_dict: Optional[bool] = None, <add> labels: Optional[torch.LongTensor] = None, <add> sentence_image_labels: Optional[torch.LongTensor] = None, <add> ) -> Union[Tuple[torch.Tensor], VisualBertForPreTrainingOutput]: <ide> r""" <ide> labels (`torch.LongTensor` of shape `(batch_size, total_sequence_length)`, *optional*): <ide> Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ..., <ide> def __init__(self, config): <ide> @replace_return_docstrings(output_type=MultipleChoiceModelOutput, config_class=_CONFIG_FOR_DOC) <ide> def forward( <ide> self, <del> input_ids=None, <del> attention_mask=None, <del> token_type_ids=None, <del> position_ids=None, <del> head_mask=None, <del> inputs_embeds=None, <del> visual_embeds=None, <del> visual_attention_mask=None, <del> visual_token_type_ids=None, <del> image_text_alignment=None, <del> output_attentions=None, <del> output_hidden_states=None, <del> return_dict=None, <del> labels=None, <del> ): <add> input_ids: Optional[torch.LongTensor] = None, <add> attention_mask: Optional[torch.LongTensor] = None, <add> token_type_ids: Optional[torch.LongTensor] = None, <add> position_ids: Optional[torch.LongTensor] = None, <add> head_mask: Optional[torch.LongTensor] = None, <add> inputs_embeds: Optional[torch.FloatTensor] = None, <add> visual_embeds: Optional[torch.FloatTensor] = None, <add> visual_attention_mask: Optional[torch.LongTensor] = None, <add> visual_token_type_ids: Optional[torch.LongTensor] = None, <add> image_text_alignment: Optional[torch.LongTensor] = None, <add> output_attentions: Optional[bool] = None, <add> output_hidden_states: Optional[bool] = None, <add> return_dict: Optional[bool] = None, <add> labels: Optional[torch.LongTensor] = None, <add> ) -> Union[Tuple[torch.Tensor], MultipleChoiceModelOutput]: <ide> r""" <ide> labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): <ide> Labels for computing the multiple choice classification loss. Indices should be in `[0, ..., <ide> def __init__(self, config): <ide> @replace_return_docstrings(output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC) <ide> def forward( <ide> self, <del> input_ids=None, <del> attention_mask=None, <del> token_type_ids=None, <del> position_ids=None, <del> head_mask=None, <del> inputs_embeds=None, <del> visual_embeds=None, <del> visual_attention_mask=None, <del> visual_token_type_ids=None, <del> image_text_alignment=None, <del> output_attentions=None, <del> output_hidden_states=None, <del> return_dict=None, <del> labels=None, <del> ): <add> input_ids: Optional[torch.LongTensor] = None, <add> attention_mask: Optional[torch.LongTensor] = None, <add> token_type_ids: Optional[torch.LongTensor] = None, <add> position_ids: Optional[torch.LongTensor] = None, <add> head_mask: Optional[torch.LongTensor] = None, <add> inputs_embeds: Optional[torch.FloatTensor] = None, <add> visual_embeds: Optional[torch.FloatTensor] = None, <add> visual_attention_mask: Optional[torch.LongTensor] = None, <add> visual_token_type_ids: Optional[torch.LongTensor] = None, <add> image_text_alignment: Optional[torch.LongTensor] = None, <add> output_attentions: Optional[bool] = None, <add> output_hidden_states: Optional[bool] = None, <add> return_dict: Optional[bool] = None, <add> labels: Optional[torch.LongTensor] = None, <add> ) -> Union[Tuple[torch.Tensor], SequenceClassifierOutput]: <ide> r""" <ide> labels (`torch.LongTensor` of shape `(batch_size, total_sequence_length)`, *optional*): <ide> Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., <ide> def __init__(self, config): <ide> @replace_return_docstrings(output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC) <ide> def forward( <ide> self, <del> input_ids=None, <del> attention_mask=None, <del> token_type_ids=None, <del> position_ids=None, <del> head_mask=None, <del> inputs_embeds=None, <del> visual_embeds=None, <del> visual_attention_mask=None, <del> visual_token_type_ids=None, <del> image_text_alignment=None, <del> output_attentions=None, <del> output_hidden_states=None, <del> return_dict=None, <del> labels=None, <del> ): <add> input_ids: Optional[torch.LongTensor] = None, <add> attention_mask: Optional[torch.LongTensor] = None, <add> token_type_ids: Optional[torch.LongTensor] = None, <add> position_ids: Optional[torch.LongTensor] = None, <add> head_mask: Optional[torch.LongTensor] = None, <add> inputs_embeds: Optional[torch.FloatTensor] = None, <add> visual_embeds: Optional[torch.FloatTensor] = None, <add> visual_attention_mask: Optional[torch.LongTensor] = None, <add> visual_token_type_ids: Optional[torch.LongTensor] = None, <add> image_text_alignment: Optional[torch.LongTensor] = None, <add> output_attentions: Optional[bool] = None, <add> output_hidden_states: Optional[bool] = None, <add> return_dict: Optional[bool] = None, <add> labels: Optional[torch.LongTensor] = None, <add> ) -> Union[Tuple[torch.Tensor], SequenceClassifierOutput]: <ide> r""" <ide> labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*): <ide> Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., <ide> def __init__(self, config): <ide> @replace_return_docstrings(output_type=SequenceClassifierOutput, config_class=_CONFIG_FOR_DOC) <ide> def forward( <ide> self, <del> input_ids=None, <del> attention_mask=None, <del> token_type_ids=None, <del> position_ids=None, <del> head_mask=None, <del> inputs_embeds=None, <del> visual_embeds=None, <del> visual_attention_mask=None, <del> visual_token_type_ids=None, <del> image_text_alignment=None, <del> output_attentions=None, <del> output_hidden_states=None, <del> return_dict=None, <del> region_to_phrase_position=None, <del> labels=None, <del> ): <add> input_ids: Optional[torch.LongTensor] = None, <add> attention_mask: Optional[torch.LongTensor] = None, <add> token_type_ids: Optional[torch.LongTensor] = None, <add> position_ids: Optional[torch.LongTensor] = None, <add> head_mask: Optional[torch.LongTensor] = None, <add> inputs_embeds: Optional[torch.FloatTensor] = None, <add> visual_embeds: Optional[torch.FloatTensor] = None, <add> visual_attention_mask: Optional[torch.LongTensor] = None, <add> visual_token_type_ids: Optional[torch.LongTensor] = None, <add> image_text_alignment: Optional[torch.LongTensor] = None, <add> output_attentions: Optional[bool] = None, <add> output_hidden_states: Optional[bool] = None, <add> return_dict: Optional[bool] = None, <add> region_to_phrase_position: Optional[torch.LongTensor] = None, <add> labels: Optional[torch.LongTensor] = None, <add> ) -> Union[Tuple[torch.Tensor], SequenceClassifierOutput]: <ide> r""" <ide> region_to_phrase_position (`torch.LongTensor` of shape `(batch_size, total_sequence_length)`, *optional*): <ide> The positions depicting the position of the image embedding corresponding to the textual tokens.
1
Javascript
Javascript
fix typo in a comment
5cec4612c49dc729df2b82f5f8c0d60b6e9f60d5
<ide><path>src/ng/directive/ngRepeat.js <ide> var ngRepeatDirective = ['$parse', '$animate', function($parse, $animate) { <ide> nextBlockMap[trackById] = block; <ide> nextBlockOrder[index] = block; <ide> } else if (nextBlockMap[trackById]) { <del> // id collision detected. restore lastBlockMap and throw an error <add> // if collision detected. restore lastBlockMap and throw an error <ide> forEach(nextBlockOrder, function (block) { <ide> if (block && block.scope) lastBlockMap[block.id] = block; <ide> });
1
Go
Go
improve client output
8742649aa7f3524bbfa99b68c8d87ffc5aba0af9
<ide><path>commands.go <ide> func (cli *DockerCli) CmdBuild(args ...string) error { <ide> // FIXME: ProgressReader shouldn't be this annoyning to use <ide> if context != nil { <ide> sf := utils.NewStreamFormatter(false) <del> body = utils.ProgressReader(ioutil.NopCloser(context), 0, cli.err, sf.FormatProgress("Uploading context", "%v bytes%0.0s%0.0s", ""), sf) <add> body = utils.ProgressReader(ioutil.NopCloser(context), 0, cli.err, sf.FormatProgress("", "Uploading context", "%v bytes%0.0s%0.0s"), sf) <ide> } <ide> // Upload the build context <ide> v := &url.Values{} <ide> func (cli *DockerCli) stream(method, path string, in io.Reader, out io.Writer) e <ide> <ide> if resp.Header.Get("Content-Type") == "application/json" { <ide> dec := json.NewDecoder(resp.Body) <add> jm := utils.JSONMessage{} <ide> for { <del> var jm utils.JSONMessage <ide> if err := dec.Decode(&jm); err == io.EOF { <ide> break <ide> } else if err != nil { <ide> return err <ide> } <ide> jm.Display(out) <ide> } <add> if jm.Progress != "" { <add> fmt.Fprintf(out, "\n") <add> } <ide> } else { <ide> if _, err := io.Copy(out, resp.Body); err != nil { <ide> return err <ide><path>graph.go <ide> func (graph *Graph) TempLayerArchive(id string, compression Compression, sf *uti <ide> if err != nil { <ide> return nil, err <ide> } <del> return NewTempArchive(utils.ProgressReader(ioutil.NopCloser(archive), 0, output, sf.FormatProgress("Buffering to disk", "%v/%v (%v)", ""), sf), tmp.Root) <add> return NewTempArchive(utils.ProgressReader(ioutil.NopCloser(archive), 0, output, sf.FormatProgress("", "Buffering to disk", "%v/%v (%v)"), sf), tmp.Root) <ide> } <ide> <ide> // Mktemp creates a temporary sub-directory inside the graph's filesystem. <ide><path>server.go <ide> func (srv *Server) ImageInsert(name, url, path string, out io.Writer, sf *utils. <ide> return "", err <ide> } <ide> <del> if err := c.Inject(utils.ProgressReader(file.Body, int(file.ContentLength), out, sf.FormatProgress("Downloading", "%8v/%v (%v)", ""), sf), path); err != nil { <add> if err := c.Inject(utils.ProgressReader(file.Body, int(file.ContentLength), out, sf.FormatProgress("", "Downloading", "%8v/%v (%v)"), sf), path); err != nil { <ide> return "", err <ide> } <ide> // FIXME: Handle custom repo, tag comment, author <ide> img, err = b.Commit(c, "", "", img.Comment, img.Author, nil) <ide> if err != nil { <ide> return "", err <ide> } <del> out.Write(sf.FormatStatus(img.ID)) <add> out.Write(sf.FormatStatus("", img.ID)) <ide> return img.ShortID(), nil <ide> } <ide> <ide> func (srv *Server) pullImage(r *registry.Registry, out io.Writer, imgID, endpoin <ide> // FIXME: Launch the getRemoteImage() in goroutines <ide> for _, id := range history { <ide> if !srv.runtime.graph.Exists(id) { <del> out.Write(sf.FormatStatus("Pulling %s metadata", id)) <add> out.Write(sf.FormatStatus(utils.TruncateID(id), "Pulling metadata")) <ide> imgJSON, imgSize, err := r.GetRemoteImageJSON(id, endpoint, token) <ide> if err != nil { <ide> // FIXME: Keep goging in case of error? <ide> func (srv *Server) pullImage(r *registry.Registry, out io.Writer, imgID, endpoin <ide> } <ide> <ide> // Get the layer <del> out.Write(sf.FormatStatus("Pulling %s fs layer", id)) <add> out.Write(sf.FormatStatus(utils.TruncateID(id), "Pulling fs layer")) <ide> layer, err := r.GetRemoteImageLayer(img.ID, endpoint, token) <ide> if err != nil { <ide> return err <ide> } <ide> defer layer.Close() <del> if err := srv.runtime.graph.Register(utils.ProgressReader(layer, imgSize, out, sf.FormatProgress("Downloading", "%8v/%v (%v)", id), sf), false, img); err != nil { <add> if err := srv.runtime.graph.Register(utils.ProgressReader(layer, imgSize, out, sf.FormatProgress(utils.TruncateID(id), "Downloading", "%8v/%v (%v)"), sf), false, img); err != nil { <ide> return err <ide> } <ide> } <ide> func (srv *Server) pullImage(r *registry.Registry, out io.Writer, imgID, endpoin <ide> } <ide> <ide> func (srv *Server) pullRepository(r *registry.Registry, out io.Writer, localName, remoteName, askedTag, indexEp string, sf *utils.StreamFormatter) error { <del> out.Write(sf.FormatStatus("Pulling repository %s", localName)) <add> out.Write(sf.FormatStatus("", "Pulling repository %s", localName)) <ide> <ide> repoData, err := r.GetRepositoryData(indexEp, remoteName) <ide> if err != nil { <ide> func (srv *Server) pullRepository(r *registry.Registry, out io.Writer, localName <ide> errors <- nil <ide> return <ide> } <del> out.Write(sf.FormatStatus("Pulling image %s (%s) from %s", img.ID, img.Tag, localName)) <add> out.Write(sf.FormatStatus(utils.TruncateID(img.ID), "Pulling image (%s) from %s", img.Tag, localName)) <ide> success := false <ide> for _, ep := range repoData.Endpoints { <ide> if err := srv.pullImage(r, out, img.ID, ep, repoData.Tokens, sf); err != nil { <del> out.Write(sf.FormatStatus("Error while retrieving image for tag: %s (%s); checking next endpoint", askedTag, err)) <add> out.Write(sf.FormatStatus(utils.TruncateID(img.ID), "Error while retrieving image for tag: %s (%s); checking next endpoint", askedTag, err)) <ide> continue <ide> } <ide> success = true <ide> func (srv *Server) getImageList(localRepo map[string]string) ([]*registry.ImgDat <ide> <ide> func (srv *Server) pushRepository(r *registry.Registry, out io.Writer, localName, remoteName string, localRepo map[string]string, indexEp string, sf *utils.StreamFormatter) error { <ide> out = utils.NewWriteFlusher(out) <del> out.Write(sf.FormatStatus("Processing checksums")) <add> out.Write(sf.FormatStatus("", "Processing checksums")) <ide> imgList, err := srv.getImageList(localRepo) <ide> if err != nil { <ide> return err <ide> } <del> out.Write(sf.FormatStatus("Sending image list")) <add> out.Write(sf.FormatStatus("", "Sending image list")) <ide> <ide> var repoData *registry.RepositoryData <ide> repoData, err = r.PushImageJSONIndex(indexEp, remoteName, imgList, false, nil) <ide> func (srv *Server) pushRepository(r *registry.Registry, out io.Writer, localName <ide> } <ide> <ide> for _, ep := range repoData.Endpoints { <del> out.Write(sf.FormatStatus("Pushing repository %s (%d tags)", localName, len(localRepo))) <add> out.Write(sf.FormatStatus("", "Pushing repository %s (%d tags)", localName, len(localRepo))) <ide> // For each image within the repo, push them <ide> for _, elem := range imgList { <ide> if _, exists := repoData.ImgList[elem.ID]; exists { <del> out.Write(sf.FormatStatus("Image %s already pushed, skipping", elem.ID)) <add> out.Write(sf.FormatStatus("", "Image %s already pushed, skipping", elem.ID)) <ide> continue <ide> } else if r.LookupRemoteImage(elem.ID, ep, repoData.Tokens) { <del> out.Write(sf.FormatStatus("Image %s already pushed, skipping", elem.ID)) <add> out.Write(sf.FormatStatus("", "Image %s already pushed, skipping", elem.ID)) <ide> continue <ide> } <ide> if err := srv.pushImage(r, out, remoteName, elem.ID, ep, repoData.Tokens, sf); err != nil { <ide> // FIXME: Continue on error? <ide> return err <ide> } <del> out.Write(sf.FormatStatus("Pushing tags for rev [%s] on {%s}", elem.ID, ep+"repositories/"+remoteName+"/tags/"+elem.Tag)) <add> out.Write(sf.FormatStatus("", "Pushing tags for rev [%s] on {%s}", elem.ID, ep+"repositories/"+remoteName+"/tags/"+elem.Tag)) <ide> if err := r.PushRegistryTag(remoteName, elem.ID, elem.Tag, ep, repoData.Tokens); err != nil { <ide> return err <ide> } <ide> func (srv *Server) pushImage(r *registry.Registry, out io.Writer, remote, imgID, <ide> if err != nil { <ide> return fmt.Errorf("Error while retreiving the path for {%s}: %s", imgID, err) <ide> } <del> out.Write(sf.FormatStatus("Pushing %s", imgID)) <add> out.Write(sf.FormatStatus("", "Pushing %s", imgID)) <ide> <ide> // Make sure we have the image's checksum <ide> checksum, err := srv.getChecksum(imgID) <ide> func (srv *Server) pushImage(r *registry.Registry, out io.Writer, remote, imgID, <ide> // Send the json <ide> if err := r.PushImageJSONRegistry(imgData, jsonRaw, ep, token); err != nil { <ide> if err == registry.ErrAlreadyExists { <del> out.Write(sf.FormatStatus("Image %s already pushed, skipping", imgData.ID)) <add> out.Write(sf.FormatStatus("", "Image %s already pushed, skipping", imgData.ID)) <ide> return nil <ide> } <ide> return err <ide> func (srv *Server) pushImage(r *registry.Registry, out io.Writer, remote, imgID, <ide> } <ide> <ide> // Send the layer <del> if err := r.PushImageLayerRegistry(imgData.ID, utils.ProgressReader(layerData, int(layerData.Size), out, sf.FormatProgress("Pushing", "%8v/%v (%v)", ""), sf), ep, token); err != nil { <add> if err := r.PushImageLayerRegistry(imgData.ID, utils.ProgressReader(layerData, int(layerData.Size), out, sf.FormatProgress("", "Pushing", "%8v/%v (%v)"), sf), ep, token); err != nil { <ide> return err <ide> } <ide> return nil <ide> func (srv *Server) ImagePush(localName string, out io.Writer, sf *utils.StreamFo <ide> <ide> if err != nil { <ide> reposLen := len(srv.runtime.repositories.Repositories[localName]) <del> out.Write(sf.FormatStatus("The push refers to a repository [%s] (len: %d)", localName, reposLen)) <add> out.Write(sf.FormatStatus("", "The push refers to a repository [%s] (len: %d)", localName, reposLen)) <ide> // If it fails, try to get the repository <ide> if localRepo, exists := srv.runtime.repositories.Repositories[localName]; exists { <ide> if err := srv.pushRepository(r, out, localName, remoteName, localRepo, endpoint, sf); err != nil { <ide> func (srv *Server) ImagePush(localName string, out io.Writer, sf *utils.StreamFo <ide> } <ide> <ide> var token []string <del> out.Write(sf.FormatStatus("The push refers to an image: [%s]", localName)) <add> out.Write(sf.FormatStatus("", "The push refers to an image: [%s]", localName)) <ide> if err := srv.pushImage(r, out, remoteName, img.ID, endpoint, token, sf); err != nil { <ide> return err <ide> } <ide> func (srv *Server) ImageImport(src, repo, tag string, in io.Reader, out io.Write <ide> u.Host = src <ide> u.Path = "" <ide> } <del> out.Write(sf.FormatStatus("Downloading from %s", u)) <add> out.Write(sf.FormatStatus("", "Downloading from %s", u)) <ide> // Download with curl (pretty progress bar) <ide> // If curl is not available, fallback to http.Get() <ide> resp, err = utils.Download(u.String(), out) <ide> if err != nil { <ide> return err <ide> } <del> archive = utils.ProgressReader(resp.Body, int(resp.ContentLength), out, sf.FormatProgress("Importing", "%8v/%v (%v)", ""), sf) <add> archive = utils.ProgressReader(resp.Body, int(resp.ContentLength), out, sf.FormatProgress("", "Importing", "%8v/%v (%v)"), sf) <ide> } <ide> img, err := srv.runtime.graph.Create(archive, nil, "Imported from "+src, "", nil) <ide> if err != nil { <ide> func (srv *Server) ImageImport(src, repo, tag string, in io.Reader, out io.Write <ide> return err <ide> } <ide> } <del> out.Write(sf.FormatStatus(img.ShortID())) <add> out.Write(sf.FormatStatus("", img.ShortID())) <ide> return nil <ide> } <ide> <ide><path>utils/utils.go <ide> func (r *progressReader) Read(p []byte) (n int, err error) { <ide> } <ide> r.lastUpdate = r.readProgress <ide> } <del> // Send newline when complete <del> if err != nil { <del> r.output.Write(r.sf.FormatStatus("")) <del> } <del> <ide> return read, err <ide> } <ide> func (r *progressReader) Close() error { <ide> type JSONMessage struct { <ide> } <ide> <ide> func (jm *JSONMessage) Display(out io.Writer) (error) { <add> if jm.Error != "" { <add> return fmt.Errorf(jm.Error) <add> } <ide> if jm.Time != 0 { <ide> fmt.Fprintf(out, "[%s] ", time.Unix(jm.Time, 0)) <ide> } <del> if jm.Progress != "" && jm.ID != ""{ <del> fmt.Fprintf(out, "\n%s %s %s\r", jm.Status, jm.ID, jm.Progress) <del> } else if jm.Progress != "" { <add> if jm.ID != "" { <add> fmt.Fprintf(out, "%s: ", jm.ID) <add> } <add> if jm.Progress != "" { <ide> fmt.Fprintf(out, "%s %s\r", jm.Status, jm.Progress) <del> } else if jm.Error != "" { <del> return fmt.Errorf(jm.Error) <del> } else if jm.ID != "" { <del> fmt.Fprintf(out, "%s: %s\n", jm.ID, jm.Status) <ide> } else { <ide> fmt.Fprintf(out, "%s\n", jm.Status) <ide> } <ide> return nil <ide> } <ide> <del> <ide> type StreamFormatter struct { <ide> json bool <ide> used bool <ide> func NewStreamFormatter(json bool) *StreamFormatter { <ide> return &StreamFormatter{json, false} <ide> } <ide> <del>func (sf *StreamFormatter) FormatStatus(format string, a ...interface{}) []byte { <add>func (sf *StreamFormatter) FormatStatus(id, format string, a ...interface{}) []byte { <ide> sf.used = true <ide> str := fmt.Sprintf(format, a...) <ide> if sf.json { <del> b, err := json.Marshal(&JSONMessage{Status: str}) <add> b, err := json.Marshal(&JSONMessage{ID: id, Status: str}) <ide> if err != nil { <ide> return sf.FormatError(err) <ide> } <ide> func (sf *StreamFormatter) FormatError(err error) []byte { <ide> return []byte("Error: " + err.Error() + "\r\n") <ide> } <ide> <del>func (sf *StreamFormatter) FormatProgress(action, str, id string) []byte { <add>func (sf *StreamFormatter) FormatProgress(id, action, progress string) []byte { <ide> sf.used = true <ide> if sf.json { <del> b, err := json.Marshal(&JSONMessage{Status: action, Progress: str, ID:id}) <add> b, err := json.Marshal(&JSONMessage{Status: action, Progress: progress, ID:id}) <ide> if err != nil { <ide> return nil <ide> } <ide> return b <ide> } <del> return []byte(action + " " + str + "\r") <add> return []byte(action + " " + progress + "\r") <ide> } <ide> <ide> func (sf *StreamFormatter) Used() bool {
4
PHP
PHP
reduce options for bootstrapping
caa6b8d13ac0067d57e4f44444e344c80aa89540
<ide><path>lib/Cake/Core/Plugin.php <ide> class Plugin { <ide> * <ide> * Will load routes.php file but not bootstrap.php <ide> * <del> * `Plugin::load('DebugKit', ['bootstrap' => ['config1', 'config2']]) <del> * <del> * Will load config1.php and config2.php files <del> * <del> * `Plugin::load('DebugKit', ['bootstrap' => '\DebugKit\SomeClass::bootstrap'])` <del> * <del> * Will run the \DebugKit\SomeClass::bootstrap() function to initialize it <del> * <ide> * `Plugin::load('DebugKit', ['namespace' => 'Cake\DebugKit'])` <ide> * <ide> * Will load files on APP/Plugin/Cake/DebugKit/... <ide> class Plugin { <ide> * <ide> * ## Configuration options <ide> * <del> * - `bootstrap` - array|boolean - Whether or not you want the $plugin/Config/bootstrap.php file loaded. <del> * Can also be an array of files to load from $plugin/Config. <add> * - `bootstrap` - array - Whether or not you want the $plugin/Config/bootstrap.php file loaded. <ide> * - `routes` - boolean - Whether or not you want to load the $plugin/Config/routes.php file. <ide> * - `namespace` - string - A custom namespace for the plugin. It will default to the plugin name. <ide> * - `ignoreMissing` - boolean - Set to true to ignore missing bootstrap/routes files. <ide> public static function bootstrap($plugin) { <ide> if ($config['bootstrap'] === false) { <ide> return false; <ide> } <del> if (is_callable($config['bootstrap'])) { <del> $cb = $config['bootstrap']; <del> return $cb($plugin, $config); <del> } <del> <ide> $path = static::path($plugin); <ide> if ($config['bootstrap'] === true) { <ide> return static::_includeFile( <ide> $path . 'Config/bootstrap.php', <ide> $config['ignoreMissing'] <ide> ); <ide> } <del> <del> $bootstrap = (array)$config['bootstrap']; <del> foreach ($bootstrap as $file) { <del> static::_includeFile( <del> $path . 'Config' . DS . $file . '.php', <del> $config['ignoreMissing'] <del> ); <del> } <del> <del> return true; <ide> } <ide> <ide> /** <ide><path>lib/Cake/Test/TestCase/Core/PluginTest.php <ide> public function testLoadMultipleWithDefaultsAndOverride() { <ide> $this->assertEquals(null, Configure::read('PluginTest.test_plugin_two.bootstrap')); <ide> } <ide> <del>/** <del> * Tests that it is possible to load multiple bootstrap files at once <del> * <del> * @return void <del> */ <del> public function testMultipleBootstrapFiles() { <del> Plugin::load('TestPlugin', array('bootstrap' => array('bootstrap', 'custom_config'))); <del> $this->assertTrue(Plugin::loaded('TestPlugin')); <del> $this->assertEquals('loaded plugin bootstrap', Configure::read('PluginTest.test_plugin.bootstrap')); <del> } <del> <del>/** <del> * Tests that it is possible to load plugin bootstrap by calling a callback function <del> * <del> * @return void <del> */ <del> public function testCallbackBootstrap() { <del> Plugin::load('TestPlugin', array('bootstrap' => array($this, 'pluginBootstrap'))); <del> $this->assertTrue(Plugin::loaded('TestPlugin')); <del> $this->assertEquals('called plugin bootstrap callback', Configure::read('PluginTest.test_plugin.bootstrap')); <del> } <del> <ide> /** <ide> * Tests that loading a missing routes file throws a warning <ide> * <ide> public function testLoadAllWithDefaultsAndOverride() { <ide> $this->assertEquals('loaded plugin two bootstrap', Configure::read('PluginTest.test_plugin_two.bootstrap')); <ide> } <ide> <del>/** <del> * Auxiliary function to test plugin bootstrap callbacks <del> * <del> * @return void <del> */ <del> public function pluginBootstrap() { <del> Configure::write('PluginTest.test_plugin.bootstrap', 'called plugin bootstrap callback'); <del> } <ide> }
2
Ruby
Ruby
avoid multiple default paths to server.pid file
221b4aead579ebae28477e889a7ee68a6668b0fa
<ide><path>railties/lib/rails/commands/server.rb <ide> module Rails <ide> class Server < ::Rack::Server <ide> class Options <add> DEFAULT_PID_PATH = File.expand_path("tmp/pids/server.pid").freeze <add> <ide> def parse!(args) <ide> args, options = args.dup, {} <ide> <ide> def default_options <ide> environment: (ENV['RAILS_ENV'] || ENV['RACK_ENV'] || "development").dup, <ide> daemonize: false, <ide> caching: false, <del> pid: File.expand_path("tmp/pids/server.pid") <add> pid: Options::DEFAULT_PID_PATH <ide> }) <ide> end <ide> <ide><path>railties/test/commands/server_test.rb <ide> def test_log_stdout <ide> end <ide> end <ide> end <add> <add> def test_default_options <add> server = Rails::Server.new <add> old_default_options = server.default_options <add> <add> Dir.chdir("..") do <add> assert_equal old_default_options, server.default_options <add> end <add> end <ide> end
2
Mixed
Javascript
add fundamentals overview page
cc26cd50c75da96a1655118b5b14877864c2ea7b
<ide><path>docs/tutorials/fundamentals/part-1-overview.md <add>--- <add>id: part-1-overview <add>title: 'Redux Fundamentals, Part 1: Redux Overview' <add>sidebar_label: 'Redux Overview' <add>hide_title: true <add>description: 'The official Fundamentals tutorial for Redux: learn the fundamentals of using Redux' <add>--- <add> <add>import { DetailedExplanation } from '../../components/DetailedExplanation' <add> <add># Redux Fundamentals, Part 1: Redux Overview <add> <add>:::tip What You'll Learn <add> <add>- What Redux is and why you might want to use it <add>- The basic pieces that make up a Redux app <add> <add>::: <add> <add>## Introduction <add> <add>Welcome to the Redux Fundamentals tutorial! **This tutorial will introduce you to the core concepts, principles, and patterns for using Redux**. By the time you finish, you should understand the different pieces that make up a Redux app, how data flows when using Redux, and our standard recommended patterns for building Redux apps. <add> <add>In Part 1 of this tutorial, we'll briefly look at a minimal example of a working Redux app to see what the pieces are, and in [Part 2](./part-2-concept-data-flow.md) we'll look at those pieces in more detail and how data flows in a Redux application. <add> <add>Starting in [Part 3](./part-3-actions-reducers.md), we'll use that knowledge to build a small example app that demonstrates how these pieces fit together and talk about how Redux works in practice. After we finish building the working example app "by hand" so that you can see exactly what's happening, we'll talk about some of the standard patterns and abstractions typically used with Redux. Finally, we'll see how these lower-level examples translate into the higher-level patterns that we recommend for actual usage in real applications. <add> <add>### How to Read This Tutorial <add> <add>This tutorial focuses on the lower-level concepts you need to know to understand Redux. However, **most of the code samples in this tutorial are only meant as examples of concepts to explain "how Redux works"**. They do not include our recommended patterns for efficiently using Redux, and so **these code samples should not be used for writing real applications**. <add> <add>:::info <add> <add>If you're looking to learn more about how Redux is used to write real-world applications, please see: <add> <add>- [**The "Modern Redux" page in this tutorial**](./part-9-modern-redux.md), which shows how to convert the low-level examples into the modern patterns we do recommend for real-world usage <add>- [**The "Redux Essentials" tutorial**](../essentials/part-1-overview-concepts.md), which teaches "how to use Redux, the right way" for real-world apps, using our latest recommended patterns and practices. <add> <add>::: <add> <add>We've tried to keep these explanations beginner-friendly, but we do need to make some assumptions about what you know already so that we can focus on explaining Redux itself. **This tutorial assumes that you know**: <add> <add>:::important Prerequisites <add> <add>- Familiarity with [HTML & CSS](https://internetingishard.com/). <add>- Familiarity with [ES6 syntax and features](https://www.taniarascia.com/es6-syntax-and-feature-overview/) <add>- Understanding of [the array and object spread operators](https://javascript.info/rest-parameters-spread#spread-syntax) <add>- Knowledge of React terminology: [JSX](https://reactjs.org/docs/introducing-jsx.html), [State](https://reactjs.org/docs/state-and-lifecycle.html), [Function Components, Props](https://reactjs.org/docs/components-and-props.html), and [Hooks](https://reactjs.org/docs/hooks-intro.html) <add>- Knowledge of [asynchronous JavaScript](https://javascript.info/promise-basics) and [making AJAX requests](https://javascript.info/fetch) <add> <add>::: <add> <add>**If you're not already comfortable with those topics, we encourage you to take some time to become comfortable with them first, and then come back to learn about Redux**. We'll be here when you're ready! <add> <add>Finally, you should make sure that you have the React and Redux DevTools extensions installed in your browser: <add> <add>- React DevTools Extension: <add> - [React DevTools Extension for Chrome](https://chrome.google.com/webstore/detail/react-developer-tools/fmkadmapgofadopljbjfkapdkoienihi?hl=en) <add> - [React DevTools Extension for Firefox](https://addons.mozilla.org/en-US/firefox/addon/react-devtools/) <add>- Redux DevTools Extension: <add> - [Redux DevTools Extension for Chrome](https://chrome.google.com/webstore/detail/redux-devtools/lmhkpmbekcpmknklioeibfkpmmfibljd?hl=en) <add> - [Redux DevTools Extension for Firefox](https://addons.mozilla.org/en-US/firefox/addon/reduxdevtools/) <add> <add>## What is Redux? <add> <add>It helps to understand what this "Redux" thing is in the first place. What does it do? What problems does it help me solve? Why would I want to use it? <add> <add>**Redux is a pattern and library for managing and updating application state, using events called "actions".** It serves as a centralized store for state that needs to be used across your entire application, with rules ensuring that the state can only be updated in a predictable fashion. <add> <add>### Why Should I Use Redux? <add> <add>Redux helps you manage "global" state - state that is needed across many parts of your application. <add> <add>**The patterns and tools provided by Redux make it easier to understand when, where, why, and how the state in your application is being updated, and how your application logic will behave when those changes occur**. Redux guides you towards writing code that is predictable and testable, which helps give you confidence that your application will work as expected. <add> <add>### When Should I Use Redux? <add> <add>Redux helps you deal with shared state management, but like any tool, it has tradeoffs. There's more concepts to learn, and more code to write. It also adds some indirection to your code, and asks you to follow certain restrictions. It's a trade-off between short term and long term productivity. <add> <add>Redux is more useful when: <add> <add>- You have large amounts of application state that are needed in many places in the app <add>- The app state is updated frequently over time <add>- The logic to update that state may be complex <add>- The app has a medium or large-sized codebase, and might be worked on by many people <add> <add>**Not all apps need Redux. Take some time to think about the kind of app you're building, and decide what tools would be best to help solve the problems you're working on.** <add> <add>:::info Want to Know More? <add> <add>If you're not sure whether Redux is a good choice for your app, these resources give some more guidance: <add> <add>- **[Redux FAQ: When should I use Redux?](../../faq/General.md#when-should-i-use-redux)** <add>- **[You Might Not Need Redux](https://medium.com/@dan_abramov/you-might-not-need-redux-be46360cf367)** <add>- **[The Tao of Redux, Part 1 - Implementation and Intent](http://blog.isquaredsoftware.com/2017/05/idiomatic-redux-tao-of-redux-part-1/)** <add> <add>::: <add> <add>### Redux Libraries and Tools <add> <add>Redux is a small standalone JS library. However, it is commonly used with several other packages: <add> <add>#### React-Redux <add> <add>Redux can integrate with any UI framework, and is most frequently used with React. [**React-Redux**](https://react-redux.js.org/) is our official package that lets your React components interact with a Redux store by reading pieces of state and dispatching actions to update the store. <add> <add>#### Redux Toolkit <add> <add>[**Redux Toolkit**](https://redux-toolkit.js.org) is our recommended approach for writing Redux logic. It contains packages and functions that we think are essential for building a Redux app. Redux Toolkit builds in our suggested best practices, simplifies most Redux tasks, prevents common mistakes, and makes it easier to write Redux applications. <add> <add>#### Redux DevTools Extension <add> <add>The [**Redux DevTools Extension**](https://github.com/zalmoxisus/redux-devtools-extension) shows a history of the changes to the state in your Redux store over time. This allows you to debug your applications effectively, including using powerful techniques like "time-travel debugging". <ide><path>website/sidebars.js <ide> module.exports = { <ide> 'tutorials/essentials/part-6-performance-normalization' <ide> ] <ide> }, <add> { <add> type: 'category', <add> label: 'Redux Fundamentals', <add> items: ['tutorials/fundamentals/part-1-overview'] <add> }, <ide> { <ide> type: 'category', <ide> label: 'Basic Tutorial',
2
Go
Go
remove restartmanager from plugins
a452d1fccb6d515545dacd4bebfa36cbf70a6535
<ide><path>plugin/manager.go <ide> type eventLogger func(id, name, action string) <ide> <ide> // Manager controls the plugin subsystem. <ide> type Manager struct { <del> sync.RWMutex <ide> libRoot string <ide> runRoot string <ide> pluginStore *store.Store <ide> containerdClient libcontainerd.Client <ide> registryService registry.Service <ide> liveRestore bool <del> shutdown bool <ide> pluginEventLogger eventLogger <ide> } <ide> <ide> func (pm *Manager) StateChanged(id string, e libcontainerd.StateInfo) error { <ide> <ide> switch e.State { <ide> case libcontainerd.StateExit: <del> var shutdown bool <del> pm.RLock() <del> shutdown = pm.shutdown <del> pm.RUnlock() <del> if shutdown { <del> p, err := pm.pluginStore.GetByID(id) <del> if err != nil { <del> return err <del> } <add> p, err := pm.pluginStore.GetByID(id) <add> if err != nil { <add> return err <add> } <add> p.RLock() <add> if p.ExitChan != nil { <ide> close(p.ExitChan) <ide> } <add> restart := p.Restart <add> p.RUnlock() <add> p.RemoveFromDisk() <add> if restart { <add> pm.enable(p, true) <add> } <ide> } <ide> <ide> return nil <ide><path>plugin/manager_linux.go <ide> import ( <ide> "time" <ide> <ide> "github.com/Sirupsen/logrus" <del> "github.com/docker/docker/api/types/container" <del> "github.com/docker/docker/libcontainerd" <ide> "github.com/docker/docker/oci" <ide> "github.com/docker/docker/pkg/plugins" <ide> "github.com/docker/docker/plugin/v2" <del> "github.com/docker/docker/restartmanager" <ide> "github.com/opencontainers/runtime-spec/specs-go" <ide> ) <ide> <ide> func (pm *Manager) enable(p *v2.Plugin, force bool) error { <ide> if err != nil { <ide> return err <ide> } <del> <del> p.RestartManager = restartmanager.New(container.RestartPolicy{Name: "always"}, 0) <del> if err := pm.containerdClient.Create(p.GetID(), "", "", specs.Spec(*spec), libcontainerd.WithRestartManager(p.RestartManager)); err != nil { <del> if err := p.RestartManager.Cancel(); err != nil { <del> logrus.Errorf("enable: restartManager.Cancel failed due to %v", err) <del> } <add> p.Lock() <add> p.Restart = true <add> p.Unlock() <add> if err := pm.containerdClient.Create(p.GetID(), "", "", specs.Spec(*spec)); err != nil { <ide> return err <ide> } <ide> <ide> p.PClient, err = plugins.NewClient("unix://"+filepath.Join(p.RuntimeSourcePath, p.GetSocket()), nil) <ide> if err != nil { <del> if err := p.RestartManager.Cancel(); err != nil { <del> logrus.Errorf("enable: restartManager.Cancel failed due to %v", err) <del> } <add> p.Lock() <add> p.Restart = false <add> p.Unlock() <ide> return err <ide> } <ide> <ide> func (pm *Manager) enable(p *v2.Plugin, force bool) error { <ide> } <ide> <ide> func (pm *Manager) restore(p *v2.Plugin) error { <del> p.RestartManager = restartmanager.New(container.RestartPolicy{Name: "always"}, 0) <del> return pm.containerdClient.Restore(p.GetID(), libcontainerd.WithRestartManager(p.RestartManager)) <add> return pm.containerdClient.Restore(p.GetID()) <ide> } <ide> <ide> func (pm *Manager) disable(p *v2.Plugin) error { <ide> if !p.IsEnabled() { <ide> return fmt.Errorf("plugin %s is already disabled", p.Name()) <ide> } <del> if err := p.RestartManager.Cancel(); err != nil { <del> logrus.Error(err) <del> } <add> p.Lock() <add> p.Restart = false <add> p.Unlock() <ide> if err := pm.containerdClient.Signal(p.GetID(), int(syscall.SIGKILL)); err != nil { <ide> logrus.Error(err) <ide> } <del> if err := p.RemoveFromDisk(); err != nil { <del> logrus.Error(err) <del> } <ide> pm.pluginStore.SetState(p, false) <ide> return nil <ide> } <ide> <ide> // Shutdown stops all plugins and called during daemon shutdown. <ide> func (pm *Manager) Shutdown() { <del> pm.Lock() <del> pm.shutdown = true <del> pm.Unlock() <del> <del> pm.RLock() <del> defer pm.RUnlock() <ide> plugins := pm.pluginStore.GetAll() <ide> for _, p := range plugins { <ide> if pm.liveRestore && p.IsEnabled() { <ide> logrus.Debug("Plugin active when liveRestore is set, skipping shutdown") <ide> continue <ide> } <del> if p.RestartManager != nil { <del> if err := p.RestartManager.Cancel(); err != nil { <del> logrus.Error(err) <del> } <del> } <ide> if pm.containerdClient != nil && p.IsEnabled() { <ide> pluginID := p.GetID() <add> p.Lock() <ide> p.ExitChan = make(chan bool) <add> p.Restart = false <add> p.Unlock() <ide> err := pm.containerdClient.Signal(p.PluginObj.ID, int(syscall.SIGTERM)) <ide> if err != nil { <ide> logrus.Errorf("Sending SIGTERM to plugin failed with error: %v", err) <ide> func (pm *Manager) Shutdown() { <ide> } <ide> } <ide> } <del> if err := p.RemoveFromDisk(); err != nil { <del> logrus.Errorf("Remove plugin runtime failed with error: %v", err) <del> } <ide> } <ide> } <ide><path>plugin/store/store_experimental.go <ide> func (ps *Store) Add(p *v2.Plugin) { <ide> ps.Unlock() <ide> } <ide> <del>// Remove removes a plugin from memory, plugindb and disk. <add>// Remove removes a plugin from memory and plugindb. <ide> func (ps *Store) Remove(p *v2.Plugin) { <ide> ps.Lock() <ide> delete(ps.plugins, p.GetID()) <ide> delete(ps.nameToID, p.Name()) <ide> ps.updatePluginDB() <del> p.RemoveFromDisk() <ide> ps.Unlock() <ide> } <ide> <ide><path>plugin/v2/plugin.go <ide> import ( <ide> <ide> "github.com/docker/docker/api/types" <ide> "github.com/docker/docker/pkg/plugins" <del> "github.com/docker/docker/restartmanager" <ide> ) <ide> <ide> // Plugin represents an individual plugin. <ide> type Plugin struct { <ide> sync.RWMutex <del> PluginObj types.Plugin `json:"plugin"` <del> PClient *plugins.Client `json:"-"` <del> RestartManager restartmanager.RestartManager `json:"-"` <del> RuntimeSourcePath string `json:"-"` <del> ExitChan chan bool `json:"-"` <del> RefCount int `json:"-"` <add> PluginObj types.Plugin `json:"plugin"` <add> PClient *plugins.Client `json:"-"` <add> RuntimeSourcePath string `json:"-"` <add> RefCount int `json:"-"` <add> Restart bool `json:"-"` <add> ExitChan chan bool `json:"-"` <ide> }
4
Javascript
Javascript
remove added useractivity_ to global
491b9b232b73dc28946dc4cc8372c7810918e5b1
<ide><path>src/js/player.js <ide> vjs.Player.prototype.listenForUserActivity = function(){ <ide> var onMouseActivity, onMouseDown, mouseInProgress, onMouseUp, <ide> activityCheck, inactivityTimeout; <ide> <del> onMouseActivity = this.reportUserActivity; <add> onMouseActivity = vjs.bind(this, this.reportUserActivity); <ide> <ide> onMouseDown = function() { <ide> onMouseActivity(); <ide> vjs.Player.prototype.listenForUserActivity = function(){ <ide> // Setting userActivity=true now and setting the interval to the same time <ide> // as the activityCheck interval (250) should ensure we never miss the <ide> // next activityCheck <del> mouseInProgress = setInterval(vjs.bind(this, onMouseActivity), 250); <add> mouseInProgress = setInterval(this, onMouseActivity, 250); <ide> }; <ide> <ide> onMouseUp = function(event) {
1
PHP
PHP
fix tests that were failing with 3.next merge
30b9bf0db0c7d6f033c658335d1bf250030cdeef
<ide><path>src/Controller/Controller.php <ide> public function getPlugin(): ?string <ide> /** <ide> * Sets the plugin name. <ide> * <del> * @param string $name Plugin name. <add> * @param string|null $name Plugin name. <ide> * @return $this <ide> * @since 3.6.0 <ide> */ <del> public function setPlugin(string $name) <add> public function setPlugin(?string $name) <ide> { <ide> $this->plugin = $name; <ide> <ide><path>src/Validation/Validation.php <ide> public static function uploadedFile($file, array $options = []): bool <ide> * @param array $options Options to validate width and height. <ide> * @return bool <ide> */ <del> public static function imageSize(array $file, array $options): bool <add> public static function imageSize($file, array $options): bool <ide> { <ide> if (!isset($options['height']) && !isset($options['width'])) { <ide> throw new InvalidArgumentException('Invalid image size validation parameters! Missing `width` and / or `height`.'); <ide><path>tests/TestCase/Error/ExceptionRendererTest.php <ide> public function testMissingRenderSafe() <ide> $controller = $this->getMockBuilder('Cake\Controller\Controller') <ide> ->setMethods(['render']) <ide> ->getMock(); <del> $controller->helpers = ['Fail', 'Boom']; <add> $controller->viewBuilder()->setHelpers(['Fail', 'Boom']); <ide> $controller->request = new ServerRequest; <ide> $controller->expects($this->at(0)) <ide> ->method('render') <ide> public function testMissingRenderSafe() <ide> $ExceptionRenderer->setController($controller); <ide> <ide> $response = $ExceptionRenderer->render(); <del> sort($controller->helpers); <del> $this->assertEquals(['Form', 'Html'], $controller->helpers); <add> $helpers = $controller->viewBuilder()->getHelpers(); <add> sort($helpers); <add> $this->assertEquals(['Form', 'Html'], $helpers); <ide> $this->assertContains('Helper class Fail', (string)$response->getBody()); <ide> } <ide> <ide> public function testMissingLayoutPathRenderSafe() <ide> $ExceptionRenderer = new MyCustomExceptionRenderer($exception); <ide> <ide> $controller = new Controller(); <del> $controller->helpers = ['Fail', 'Boom']; <add> $controller->viewBuilder()->setHelpers(['Fail', 'Boom']); <ide> $controller->getEventManager()->on( <ide> 'Controller.beforeRender', <ide> function (EventInterface $event) { <ide><path>tests/TestCase/Http/ServerRequestTest.php <ide> public function testConstructStringUrlIgnoreServer() <ide> { <ide> $_SERVER['REQUEST_URI'] = '/some/other/path'; <ide> <del> $request = new ServerRequest('/articles/view/1'); <add> $request = new ServerRequest(['url' => '/articles/view/1']); <ide> $this->assertEquals('/articles/view/1', $request->getUri()->getPath()); <ide> <del> $request = new ServerRequest('/'); <add> $request = new ServerRequest(['url' => '/']); <ide> $this->assertEquals('/', $request->getUri()->getPath()); <ide> } <ide> /** <ide><path>tests/test_app/TestApp/Controller/Admin/ErrorController.php <ide> namespace TestApp\Controller\Admin; <ide> <ide> use Cake\Controller\Controller; <del>use Cake\Event\Event; <add>use Cake\Event\EventInterface; <ide> use Cake\Http\Response; <ide> <ide> /** <ide> public function initialize(): void <ide> * @param \Cake\Event\Event $event Event. <ide> * @return Cake\Http\Response|null <ide> */ <del> public function beforeRender(Event $event): ?Response <add> public function beforeRender(EventInterface $event): ?Response <ide> { <ide> $this->viewBuilder()->setTemplatePath('Error'); <ide>
5
Ruby
Ruby
update concerned tests
411e499677981ef649c000a36f5a805516096c3e
<ide><path>railties/test/generators/app_generator_test.rb <ide> app/assets/stylesheets <ide> app/assets/images <ide> app/controllers <add> app/controllers/concerns <ide> app/helpers <ide> app/mailers <ide> app/models <add> app/models/concerns <ide> app/views/layouts <ide> config/environments <ide> config/initializers
1
Python
Python
correct parser.py use_upper param info
5d0cc0d2ab6dab6f11d8b67282442cfbea6a0263
<ide><path>spacy/ml/models/parser.py <ide> def build_tb_parser_model( <ide> non-linearity if use_upper=False. <ide> use_upper (bool): Whether to use an additional hidden layer after the state <ide> vector in order to predict the action scores. It is recommended to set <del> this to False for large pretrained models such as transformers, and False <add> this to False for large pretrained models such as transformers, and True <ide> for smaller networks. The upper layer is computed on CPU, which becomes <ide> a bottleneck on larger GPU-based models, where it's also less necessary. <ide> nO (int or None): The number of actions the model will predict between.
1
Go
Go
use prefix naming for docker_cli_run_test.go
aa536b27a7897373384ab0335c44f44b0bb83455
<ide><path>integration-cli/docker_cli_run_test.go <ide> import ( <ide> ) <ide> <ide> // "test123" should be printed by docker run <del>func TestDockerRunEchoStdout(t *testing.T) { <add>func TestRunEchoStdout(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "busybox", "echo", "test123") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunEchoStdout(t *testing.T) { <ide> } <ide> <ide> // "test" should be printed <del>func TestDockerRunEchoStdoutWithMemoryLimit(t *testing.T) { <add>func TestRunEchoStdoutWithMemoryLimit(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "-m", "2786432", "busybox", "echo", "test") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunEchoStdoutWithMemoryLimit(t *testing.T) { <ide> } <ide> <ide> // "test" should be printed <del>func TestDockerRunEchoStdoutWitCPULimit(t *testing.T) { <add>func TestRunEchoStdoutWitCPULimit(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "-c", "1000", "busybox", "echo", "test") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunEchoStdoutWitCPULimit(t *testing.T) { <ide> } <ide> <ide> // "test" should be printed <del>func TestDockerRunEchoStdoutWithCPUAndMemoryLimit(t *testing.T) { <add>func TestRunEchoStdoutWithCPUAndMemoryLimit(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "-c", "1000", "-m", "2786432", "busybox", "echo", "test") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunEchoStdoutWithCPUAndMemoryLimit(t *testing.T) { <ide> } <ide> <ide> // "test" should be printed <del>func TestDockerRunEchoNamedContainer(t *testing.T) { <add>func TestRunEchoNamedContainer(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "--name", "testfoonamedcontainer", "busybox", "echo", "test") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunEchoNamedContainer(t *testing.T) { <ide> } <ide> <ide> // docker run should not leak file descriptors <del>func TestDockerRunLeakyFileDescriptors(t *testing.T) { <add>func TestRunLeakyFileDescriptors(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "busybox", "ls", "-C", "/proc/self/fd") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunLeakyFileDescriptors(t *testing.T) { <ide> <ide> // it should be possible to ping Google DNS resolver <ide> // this will fail when Internet access is unavailable <del>func TestDockerRunPingGoogle(t *testing.T) { <add>func TestRunPingGoogle(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "busybox", "ping", "-c", "1", "8.8.8.8") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunPingGoogle(t *testing.T) { <ide> <ide> // the exit code should be 0 <ide> // some versions of lxc might make this test fail <del>func TestDockerRunExitCodeZero(t *testing.T) { <add>func TestRunExitCodeZero(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "busybox", "true") <ide> exitCode, err := runCommand(runCmd) <ide> errorOut(err, t, fmt.Sprintf("%s", err)) <ide> func TestDockerRunExitCodeZero(t *testing.T) { <ide> <ide> // the exit code should be 1 <ide> // some versions of lxc might make this test fail <del>func TestDockerRunExitCodeOne(t *testing.T) { <add>func TestRunExitCodeOne(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "busybox", "false") <ide> exitCode, err := runCommand(runCmd) <ide> if err != nil && !strings.Contains("exit status 1", fmt.Sprintf("%s", err)) { <ide> func TestRunStdinPipe(t *testing.T) { <ide> } <ide> <ide> // the container's ID should be printed when starting a container in detached mode <del>func TestDockerRunDetachedContainerIDPrinting(t *testing.T) { <add>func TestRunDetachedContainerIDPrinting(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "-d", "busybox", "true") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunDetachedContainerIDPrinting(t *testing.T) { <ide> } <ide> <ide> // the working directory should be set correctly <del>func TestDockerRunWorkingDirectory(t *testing.T) { <add>func TestRunWorkingDirectory(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "-w", "/root", "busybox", "pwd") <ide> out, _, _, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil { <ide> func TestDockerRunWorkingDirectory(t *testing.T) { <ide> } <ide> <ide> // pinging Google's DNS resolver should fail when we disable the networking <del>func TestDockerRunWithoutNetworking(t *testing.T) { <add>func TestRunWithoutNetworking(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "--net=none", "busybox", "ping", "-c", "1", "8.8.8.8") <ide> out, _, exitCode, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil && exitCode != 1 { <ide> func TestDockerRunWithoutNetworking(t *testing.T) { <ide> } <ide> <ide> // Regression test for #4741 <del>func TestDockerRunWithVolumesAsFiles(t *testing.T) { <add>func TestRunWithVolumesAsFiles(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "--name", "test-data", "--volume", "/etc/hosts:/target-file", "busybox", "true") <ide> out, stderr, exitCode, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil && exitCode != 0 { <ide> func TestDockerRunWithVolumesAsFiles(t *testing.T) { <ide> } <ide> <ide> // Regression test for #4979 <del>func TestDockerRunWithVolumesFromExited(t *testing.T) { <add>func TestRunWithVolumesFromExited(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "--name", "test-data", "--volume", "/some/dir", "busybox", "touch", "/some/dir/file") <ide> out, stderr, exitCode, err := runCommandWithStdoutStderr(runCmd) <ide> if err != nil && exitCode != 0 { <ide> func TestDockerRunWithVolumesFromExited(t *testing.T) { <ide> } <ide> <ide> // Regression test for #4830 <del>func TestDockerRunWithRelativePath(t *testing.T) { <add>func TestRunWithRelativePath(t *testing.T) { <ide> runCmd := exec.Command(dockerBinary, "run", "-v", "tmp:/other-tmp", "busybox", "true") <ide> if _, _, _, err := runCommandWithStdoutStderr(runCmd); err == nil { <ide> t.Fatalf("relative path should result in an error") <ide> func TestDockerRunWithRelativePath(t *testing.T) { <ide> logDone("run - volume with relative path") <ide> } <ide> <del>func TestVolumesMountedAsReadonly(t *testing.T) { <add>func TestRunVolumesMountedAsReadonly(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-v", "/test:/test:ro", "busybox", "touch", "/test/somefile") <ide> if code, err := runCommand(cmd); err == nil || code == 0 { <ide> t.Fatalf("run should fail because volume is ro: exit code %d", code) <ide> func TestVolumesMountedAsReadonly(t *testing.T) { <ide> logDone("run - volumes as readonly mount") <ide> } <ide> <del>func TestVolumesFromInReadonlyMode(t *testing.T) { <add>func TestRunVolumesFromInReadonlyMode(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--name", "parent", "-v", "/test", "busybox", "true") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestVolumesFromInReadonlyMode(t *testing.T) { <ide> } <ide> <ide> // Regression test for #1201 <del>func TestVolumesFromInReadWriteMode(t *testing.T) { <add>func TestRunVolumesFromInReadWriteMode(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--name", "parent", "-v", "/test", "busybox", "true") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestVolumesFromInReadWriteMode(t *testing.T) { <ide> logDone("run - volumes from as read write mount") <ide> } <ide> <del>func TestVolumesFromInheritsReadOnly(t *testing.T) { <add>func TestRunVolumesFromInheritsReadOnly(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--name", "parent", "-v", "/test:/test:ro", "busybox", "true") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestVolumesFromInheritsReadOnly(t *testing.T) { <ide> } <ide> <ide> // Test for #1351 <del>func TestApplyVolumesFromBeforeVolumes(t *testing.T) { <add>func TestRunApplyVolumesFromBeforeVolumes(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--name", "parent", "-v", "/test", "busybox", "touch", "/test/foo") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestApplyVolumesFromBeforeVolumes(t *testing.T) { <ide> logDone("run - volumes from mounted first") <ide> } <ide> <del>func TestMultipleVolumesFrom(t *testing.T) { <add>func TestRunMultipleVolumesFrom(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--name", "parent1", "-v", "/test", "busybox", "touch", "/test/foo") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestMultipleVolumesFrom(t *testing.T) { <ide> } <ide> <ide> // this tests verifies the ID format for the container <del>func TestVerifyContainerID(t *testing.T) { <add>func TestRunVerifyContainerID(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-d", "busybox", "true") <ide> out, exit, err := runCommandWithOutput(cmd) <ide> if err != nil { <ide> func TestVerifyContainerID(t *testing.T) { <ide> } <ide> <ide> // Test that creating a container with a volume doesn't crash. Regression test for #995. <del>func TestCreateVolume(t *testing.T) { <add>func TestRunCreateVolume(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-v", "/var/lib/data", "busybox", "true") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestCreateVolume(t *testing.T) { <ide> <ide> // Test that creating a volume with a symlink in its path works correctly. Test for #5152. <ide> // Note that this bug happens only with symlinks with a target that starts with '/'. <del>func TestCreateVolumeWithSymlink(t *testing.T) { <add>func TestRunCreateVolumeWithSymlink(t *testing.T) { <ide> buildCmd := exec.Command(dockerBinary, "build", "-t", "docker-test-createvolumewithsymlink", "-") <ide> buildCmd.Stdin = strings.NewReader(`FROM busybox <ide> RUN mkdir /foo && ln -s /foo /bar`) <ide> func TestCreateVolumeWithSymlink(t *testing.T) { <ide> } <ide> <ide> // Tests that a volume path that has a symlink exists in a container mounting it with `--volumes-from`. <del>func TestVolumesFromSymlinkPath(t *testing.T) { <add>func TestRunVolumesFromSymlinkPath(t *testing.T) { <ide> buildCmd := exec.Command(dockerBinary, "build", "-t", "docker-test-volumesfromsymlinkpath", "-") <ide> buildCmd.Stdin = strings.NewReader(`FROM busybox <ide> RUN mkdir /baz && ln -s /baz /foo <ide> func TestVolumesFromSymlinkPath(t *testing.T) { <ide> logDone("run - volumes-from symlink path") <ide> } <ide> <del>func TestExitCode(t *testing.T) { <add>func TestRunExitCode(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "/bin/sh", "-c", "exit 72") <ide> <ide> exit, err := runCommand(cmd) <ide> func TestExitCode(t *testing.T) { <ide> logDone("run - correct exit code") <ide> } <ide> <del>func TestUserDefaultsToRoot(t *testing.T) { <add>func TestRunUserDefaultsToRoot(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "id") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestUserDefaultsToRoot(t *testing.T) { <ide> logDone("run - default user") <ide> } <ide> <del>func TestUserByName(t *testing.T) { <add>func TestRunUserByName(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-u", "root", "busybox", "id") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestUserByName(t *testing.T) { <ide> logDone("run - user by name") <ide> } <ide> <del>func TestUserByID(t *testing.T) { <add>func TestRunUserByID(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-u", "1", "busybox", "id") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestUserByID(t *testing.T) { <ide> logDone("run - user by id") <ide> } <ide> <del>func TestUserByIDBig(t *testing.T) { <add>func TestRunUserByIDBig(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-u", "2147483648", "busybox", "id") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestUserByIDBig(t *testing.T) { <ide> logDone("run - user by id, id too big") <ide> } <ide> <del>func TestUserByIDNegative(t *testing.T) { <add>func TestRunUserByIDNegative(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-u", "-1", "busybox", "id") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestUserByIDNegative(t *testing.T) { <ide> logDone("run - user by id, id negative") <ide> } <ide> <del>func TestUserByIDZero(t *testing.T) { <add>func TestRunUserByIDZero(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-u", "0", "busybox", "id") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestUserByIDZero(t *testing.T) { <ide> logDone("run - user by id, zero uid") <ide> } <ide> <del>func TestUserNotFound(t *testing.T) { <add>func TestRunUserNotFound(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-u", "notme", "busybox", "id") <ide> <ide> _, err := runCommand(cmd) <ide> func TestRunTwoConcurrentContainers(t *testing.T) { <ide> logDone("run - two concurrent containers") <ide> } <ide> <del>func TestEnvironment(t *testing.T) { <add>func TestRunEnvironment(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-h", "testing", "-e=FALSE=true", "-e=TRUE", "-e=TRICKY", "-e=HOME=", "busybox", "env") <ide> cmd.Env = append(os.Environ(), <ide> "TRUE=false", <ide> func TestEnvironment(t *testing.T) { <ide> logDone("run - verify environment") <ide> } <ide> <del>func TestContainerNetwork(t *testing.T) { <add>func TestRunContainerNetwork(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "ping", "-c", "1", "127.0.0.1") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestContainerNetwork(t *testing.T) { <ide> } <ide> <ide> // Issue #4681 <del>func TestLoopbackWhenNetworkDisabled(t *testing.T) { <add>func TestRunLoopbackWhenNetworkDisabled(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--net=none", "busybox", "ping", "-c", "1", "127.0.0.1") <ide> if _, err := runCommand(cmd); err != nil { <ide> t.Fatal(err) <ide> func TestLoopbackWhenNetworkDisabled(t *testing.T) { <ide> logDone("run - test container loopback when networking disabled") <ide> } <ide> <del>func TestNetHostNotAllowedWithLinks(t *testing.T) { <add>func TestRunNetHostNotAllowedWithLinks(t *testing.T) { <ide> _, _, err := cmd(t, "run", "--name", "linked", "busybox", "true") <ide> <ide> cmd := exec.Command(dockerBinary, "run", "--net=host", "--link", "linked:linked", "busybox", "true") <ide> func TestNetHostNotAllowedWithLinks(t *testing.T) { <ide> logDone("run - don't allow --net=host to be used with links") <ide> } <ide> <del>func TestLoopbackOnlyExistsWhenNetworkingDisabled(t *testing.T) { <add>func TestRunLoopbackOnlyExistsWhenNetworkingDisabled(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--net=none", "busybox", "ip", "-o", "-4", "a", "show", "up") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err != nil { <ide> func TestLoopbackOnlyExistsWhenNetworkingDisabled(t *testing.T) { <ide> logDone("run - test loopback only exists when networking disabled") <ide> } <ide> <del>func TestPrivilegedCanMknod(t *testing.T) { <add>func TestRunPrivilegedCanMknod(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--privileged", "busybox", "sh", "-c", "mknod /tmp/sda b 8 0 && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err != nil { <ide> func TestPrivilegedCanMknod(t *testing.T) { <ide> logDone("run - test privileged can mknod") <ide> } <ide> <del>func TestUnPrivilegedCanMknod(t *testing.T) { <add>func TestRunUnPrivilegedCanMknod(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "sh", "-c", "mknod /tmp/sda b 8 0 && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err != nil { <ide> func TestUnPrivilegedCanMknod(t *testing.T) { <ide> logDone("run - test un-privileged can mknod") <ide> } <ide> <del>func TestCapDropInvalid(t *testing.T) { <add>func TestRunCapDropInvalid(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-drop=CHPASS", "busybox", "ls") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err == nil { <ide> func TestCapDropInvalid(t *testing.T) { <ide> logDone("run - test --cap-drop=CHPASS invalid") <ide> } <ide> <del>func TestCapDropCannotMknod(t *testing.T) { <add>func TestRunCapDropCannotMknod(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-drop=MKNOD", "busybox", "sh", "-c", "mknod /tmp/sda b 8 0 && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err == nil { <ide> func TestCapDropCannotMknod(t *testing.T) { <ide> logDone("run - test --cap-drop=MKNOD cannot mknod") <ide> } <ide> <del>func TestCapDropCannotMknodLowerCase(t *testing.T) { <add>func TestRunCapDropCannotMknodLowerCase(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-drop=mknod", "busybox", "sh", "-c", "mknod /tmp/sda b 8 0 && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err == nil { <ide> func TestCapDropCannotMknodLowerCase(t *testing.T) { <ide> logDone("run - test --cap-drop=mknod cannot mknod lowercase") <ide> } <ide> <del>func TestCapDropALLCannotMknod(t *testing.T) { <add>func TestRunCapDropALLCannotMknod(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-drop=ALL", "busybox", "sh", "-c", "mknod /tmp/sda b 8 0 && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err == nil { <ide> func TestCapDropALLCannotMknod(t *testing.T) { <ide> logDone("run - test --cap-drop=ALL cannot mknod") <ide> } <ide> <del>func TestCapDropALLAddMknodCannotMknod(t *testing.T) { <add>func TestRunCapDropALLAddMknodCannotMknod(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-drop=ALL", "--cap-add=MKNOD", "busybox", "sh", "-c", "mknod /tmp/sda b 8 0 && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err != nil { <ide> func TestCapDropALLAddMknodCannotMknod(t *testing.T) { <ide> logDone("run - test --cap-drop=ALL --cap-add=MKNOD can mknod") <ide> } <ide> <del>func TestCapAddInvalid(t *testing.T) { <add>func TestRunCapAddInvalid(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-add=CHPASS", "busybox", "ls") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err == nil { <ide> func TestCapAddInvalid(t *testing.T) { <ide> logDone("run - test --cap-add=CHPASS invalid") <ide> } <ide> <del>func TestCapAddCanDownInterface(t *testing.T) { <add>func TestRunCapAddCanDownInterface(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-add=NET_ADMIN", "busybox", "sh", "-c", "ip link set eth0 down && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err != nil { <ide> func TestCapAddCanDownInterface(t *testing.T) { <ide> logDone("run - test --cap-add=NET_ADMIN can set eth0 down") <ide> } <ide> <del>func TestCapAddALLCanDownInterface(t *testing.T) { <add>func TestRunCapAddALLCanDownInterface(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-add=ALL", "busybox", "sh", "-c", "ip link set eth0 down && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err != nil { <ide> func TestCapAddALLCanDownInterface(t *testing.T) { <ide> logDone("run - test --cap-add=ALL can set eth0 down") <ide> } <ide> <del>func TestCapAddALLDropNetAdminCanDownInterface(t *testing.T) { <add>func TestRunCapAddALLDropNetAdminCanDownInterface(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--cap-add=ALL", "--cap-drop=NET_ADMIN", "busybox", "sh", "-c", "ip link set eth0 down && echo ok") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err == nil { <ide> func TestCapAddALLDropNetAdminCanDownInterface(t *testing.T) { <ide> logDone("run - test --cap-add=ALL --cap-drop=NET_ADMIN cannot set eth0 down") <ide> } <ide> <del>func TestPrivilegedCanMount(t *testing.T) { <add>func TestRunPrivilegedCanMount(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--privileged", "busybox", "sh", "-c", "mount -t tmpfs none /tmp && echo ok") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestPrivilegedCanMount(t *testing.T) { <ide> logDone("run - test privileged can mount") <ide> } <ide> <del>func TestUnPrivilegedCannotMount(t *testing.T) { <add>func TestRunUnPrivilegedCannotMount(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "sh", "-c", "mount -t tmpfs none /tmp && echo ok") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestUnPrivilegedCannotMount(t *testing.T) { <ide> logDone("run - test un-privileged cannot mount") <ide> } <ide> <del>func TestSysNotWritableInNonPrivilegedContainers(t *testing.T) { <add>func TestRunSysNotWritableInNonPrivilegedContainers(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "touch", "/sys/kernel/profiling") <ide> if code, err := runCommand(cmd); err == nil || code == 0 { <ide> t.Fatal("sys should not be writable in a non privileged container") <ide> func TestSysNotWritableInNonPrivilegedContainers(t *testing.T) { <ide> logDone("run - sys not writable in non privileged container") <ide> } <ide> <del>func TestSysWritableInPrivilegedContainers(t *testing.T) { <add>func TestRunSysWritableInPrivilegedContainers(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--privileged", "busybox", "touch", "/sys/kernel/profiling") <ide> if code, err := runCommand(cmd); err != nil || code != 0 { <ide> t.Fatalf("sys should be writable in privileged container") <ide> func TestSysWritableInPrivilegedContainers(t *testing.T) { <ide> logDone("run - sys writable in privileged container") <ide> } <ide> <del>func TestProcNotWritableInNonPrivilegedContainers(t *testing.T) { <add>func TestRunProcNotWritableInNonPrivilegedContainers(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "touch", "/proc/sysrq-trigger") <ide> if code, err := runCommand(cmd); err == nil || code == 0 { <ide> t.Fatal("proc should not be writable in a non privileged container") <ide> func TestProcNotWritableInNonPrivilegedContainers(t *testing.T) { <ide> logDone("run - proc not writable in non privileged container") <ide> } <ide> <del>func TestProcWritableInPrivilegedContainers(t *testing.T) { <add>func TestRunProcWritableInPrivilegedContainers(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--privileged", "busybox", "touch", "/proc/sysrq-trigger") <ide> if code, err := runCommand(cmd); err != nil || code != 0 { <ide> t.Fatalf("proc should be writable in privileged container") <ide> func TestRunWithCpuset(t *testing.T) { <ide> logDone("run - cpuset 0") <ide> } <ide> <del>func TestDeviceNumbers(t *testing.T) { <add>func TestRunDeviceNumbers(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "sh", "-c", "ls -l /dev/null") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestDeviceNumbers(t *testing.T) { <ide> logDone("run - test device numbers") <ide> } <ide> <del>func TestThatCharacterDevicesActLikeCharacterDevices(t *testing.T) { <add>func TestRunThatCharacterDevicesActLikeCharacterDevices(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "sh", "-c", "dd if=/dev/zero of=/zero bs=1k count=5 2> /dev/null ; du -h /zero") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestRunUnprivilegedWithChroot(t *testing.T) { <ide> logDone("run - unprivileged with chroot") <ide> } <ide> <del>func TestAddingOptionalDevices(t *testing.T) { <add>func TestRunAddingOptionalDevices(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--device", "/dev/zero:/dev/nulo", "busybox", "sh", "-c", "ls /dev/nulo") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestAddingOptionalDevices(t *testing.T) { <ide> logDone("run - test --device argument") <ide> } <ide> <del>func TestModeHostname(t *testing.T) { <add>func TestRunModeHostname(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-h=testhostname", "busybox", "cat", "/etc/hostname") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestModeHostname(t *testing.T) { <ide> logDone("run - hostname and several network modes") <ide> } <ide> <del>func TestRootWorkdir(t *testing.T) { <add>func TestRunRootWorkdir(t *testing.T) { <ide> s, _, err := cmd(t, "run", "--workdir", "/", "busybox", "pwd") <ide> if err != nil { <ide> t.Fatal(s, err) <ide> func TestRootWorkdir(t *testing.T) { <ide> logDone("run - workdir /") <ide> } <ide> <del>func TestAllowBindMountingRoot(t *testing.T) { <add>func TestRunAllowBindMountingRoot(t *testing.T) { <ide> s, _, err := cmd(t, "run", "-v", "/:/host", "busybox", "ls", "/host") <ide> if err != nil { <ide> t.Fatal(s, err) <ide> func TestAllowBindMountingRoot(t *testing.T) { <ide> logDone("run - bind mount / as volume") <ide> } <ide> <del>func TestDisallowBindMountingRootToRoot(t *testing.T) { <add>func TestRunDisallowBindMountingRootToRoot(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-v", "/:/", "busybox", "ls", "/host") <ide> out, _, err := runCommandWithOutput(cmd) <ide> if err == nil { <ide> func TestDisallowBindMountingRootToRoot(t *testing.T) { <ide> } <ide> <ide> // Test recursive bind mount works by default <del>func TestDockerRunWithVolumesIsRecursive(t *testing.T) { <add>func TestRunWithVolumesIsRecursive(t *testing.T) { <ide> tmpDir, err := ioutil.TempDir("", "docker_recursive_mount_test") <ide> if err != nil { <ide> t.Fatal(err) <ide> func TestDockerRunWithVolumesIsRecursive(t *testing.T) { <ide> logDone("run - volumes are bind mounted recursively") <ide> } <ide> <del>func TestDnsDefaultOptions(t *testing.T) { <add>func TestRunDnsDefaultOptions(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "busybox", "cat", "/etc/resolv.conf") <ide> <ide> actual, _, err := runCommandWithOutput(cmd) <ide> func TestDnsDefaultOptions(t *testing.T) { <ide> logDone("run - dns default options") <ide> } <ide> <del>func TestDnsOptions(t *testing.T) { <add>func TestRunDnsOptions(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "--dns=127.0.0.1", "--dns-search=mydomain", "busybox", "cat", "/etc/resolv.conf") <ide> <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestDnsOptions(t *testing.T) { <ide> logDone("run - dns options") <ide> } <ide> <del>func TestDnsOptionsBasedOnHostResolvConf(t *testing.T) { <add>func TestRunDnsOptionsBasedOnHostResolvConf(t *testing.T) { <ide> resolvConf, err := ioutil.ReadFile("/etc/resolv.conf") <ide> if os.IsNotExist(err) { <ide> t.Fatalf("/etc/resolv.conf does not exist") <ide> func TestRunAddHost(t *testing.T) { <ide> } <ide> <ide> // Regression test for #6983 <del>func TestAttachStdErrOnlyTTYMode(t *testing.T) { <add>func TestRunAttachStdErrOnlyTTYMode(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-t", "-a", "stderr", "busybox", "true") <ide> <ide> exitCode, err := runCommand(cmd) <ide> func TestAttachStdErrOnlyTTYMode(t *testing.T) { <ide> } <ide> <ide> // Regression test for #6983 <del>func TestAttachStdOutOnlyTTYMode(t *testing.T) { <add>func TestRunAttachStdOutOnlyTTYMode(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-t", "-a", "stdout", "busybox", "true") <ide> <ide> exitCode, err := runCommand(cmd) <ide> func TestAttachStdOutOnlyTTYMode(t *testing.T) { <ide> } <ide> <ide> // Regression test for #6983 <del>func TestAttachStdOutAndErrTTYMode(t *testing.T) { <add>func TestRunAttachStdOutAndErrTTYMode(t *testing.T) { <ide> cmd := exec.Command(dockerBinary, "run", "-t", "-a", "stdout", "-a", "stderr", "busybox", "true") <ide> <ide> exitCode, err := runCommand(cmd) <ide> func TestAttachStdOutAndErrTTYMode(t *testing.T) { <ide> logDone("run - Attach stderr and stdout with -t") <ide> } <ide> <del>func TestState(t *testing.T) { <add>func TestRunState(t *testing.T) { <ide> defer deleteAllContainers() <ide> cmd := exec.Command(dockerBinary, "run", "-d", "busybox", "top") <ide> <ide> func TestState(t *testing.T) { <ide> } <ide> <ide> // Test for #1737 <del>func TestCopyVolumeUidGid(t *testing.T) { <add>func TestRunCopyVolumeUidGid(t *testing.T) { <ide> name := "testrunvolumesuidgid" <ide> defer deleteImages(name) <ide> defer deleteAllContainers() <ide> func TestCopyVolumeUidGid(t *testing.T) { <ide> } <ide> <ide> // Test for #1582 <del>func TestCopyVolumeContent(t *testing.T) { <add>func TestRunCopyVolumeContent(t *testing.T) { <ide> name := "testruncopyvolumecontent" <ide> defer deleteImages(name) <ide> defer deleteAllContainers() <ide> func TestRunExitOnStdinClose(t *testing.T) { <ide> } <ide> <ide> // Test for #2267 <del>func TestWriteHostsFileAndNotCommit(t *testing.T) { <add>func TestRunWriteHostsFileAndNotCommit(t *testing.T) { <ide> name := "writehosts" <ide> cmd := exec.Command(dockerBinary, "run", "--name", name, "busybox", "sh", "-c", "echo test2267 >> /etc/hosts && cat /etc/hosts") <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestWriteHostsFileAndNotCommit(t *testing.T) { <ide> } <ide> <ide> // Test for #2267 <del>func TestWriteHostnameFileAndNotCommit(t *testing.T) { <add>func TestRunWriteHostnameFileAndNotCommit(t *testing.T) { <ide> name := "writehostname" <ide> cmd := exec.Command(dockerBinary, "run", "--name", name, "busybox", "sh", "-c", "echo test2267 >> /etc/hostname && cat /etc/hostname") <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestWriteHostnameFileAndNotCommit(t *testing.T) { <ide> } <ide> <ide> // Test for #2267 <del>func TestWriteResolvFileAndNotCommit(t *testing.T) { <add>func TestRunWriteResolvFileAndNotCommit(t *testing.T) { <ide> name := "writeresolv" <ide> cmd := exec.Command(dockerBinary, "run", "--name", name, "busybox", "sh", "-c", "echo test2267 >> /etc/resolv.conf && cat /etc/resolv.conf") <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestRunWithBadDevice(t *testing.T) { <ide> logDone("run - error with bad device") <ide> } <ide> <del>func TestEntrypoint(t *testing.T) { <add>func TestRunEntrypoint(t *testing.T) { <ide> name := "entrypoint" <ide> cmd := exec.Command(dockerBinary, "run", "--name", name, "--entrypoint", "/bin/echo", "busybox", "-n", "foobar") <ide> out, _, err := runCommandWithOutput(cmd) <ide> func TestEntrypoint(t *testing.T) { <ide> logDone("run - entrypoint") <ide> } <ide> <del>func TestBindMounts(t *testing.T) { <add>func TestRunBindMounts(t *testing.T) { <ide> tmpDir, err := ioutil.TempDir("", "docker-test-container") <ide> if err != nil { <ide> t.Fatal(err) <ide> func TestBindMounts(t *testing.T) { <ide> logDone("run - bind mounts") <ide> } <ide> <del>func TestMutableNetworkFiles(t *testing.T) { <add>func TestRunMutableNetworkFiles(t *testing.T) { <ide> defer deleteAllContainers() <ide> <ide> for _, fn := range []string{"resolv.conf", "hosts"} { <ide> func TestMutableNetworkFiles(t *testing.T) { <ide> } <ide> } <ide> <del>func TestHostsLinkedContainerUpdate(t *testing.T) { <add>func TestRunHostsLinkedContainerUpdate(t *testing.T) { <ide> deleteAllContainers() <ide> out, _, err := runCommandWithOutput(exec.Command(dockerBinary, "run", "-d", "--name", "c1", "busybox", "sh", "-c", "while true; do sleep 1; done")) <ide> if err != nil { <ide> func TestRunPortInUse(t *testing.T) { <ide> } <ide> <ide> // Regression test for #7792 <del>func TestMountOrdering(t *testing.T) { <add>func TestRunMountOrdering(t *testing.T) { <ide> tmpDir, err := ioutil.TempDir("", "docker_nested_mount_test") <ide> if err != nil { <ide> t.Fatal(err)
1
Text
Text
remove es6/ecmascript 2015 from buffer.md
a2ffdc9e9b7b7978c87bdf6577dc6b53cfb81b75
<ide><path>doc/api/buffer.md <ide> <ide> > Stability: 2 - Stable <ide> <del>Prior to the introduction of [`TypedArray`] in [`ECMAScript 2015`] (ES6), the <del>JavaScript language had no mechanism for reading or manipulating streams <del>of binary data. The `Buffer` class was introduced as part of the Node.js <del>API to make it possible to interact with octet streams in the context of things <del>like TCP streams and file system operations. <add>Prior to the introduction of [`TypedArray`], the JavaScript language had no <add>mechanism for reading or manipulating streams of binary data. The `Buffer` class <add>was introduced as part of the Node.js API to make it possible to interact with <add>octet streams in the context of things like TCP streams and file system <add>operations. <ide> <del>Now that [`TypedArray`] has been added in ES6, the `Buffer` class implements the <del>[`Uint8Array`] API in a manner that is more optimized and suitable for Node.js' <del>use cases. <add>With [`TypedArray`] now available, the `Buffer` class implements the <add>[`Uint8Array`] API in a manner that is more optimized and suitable for Node.js. <ide> <ide> Instances of the `Buffer` class are similar to arrays of integers but <ide> correspond to fixed-sized, raw memory allocations outside the V8 heap. <ide> changes: <ide> --> <ide> <ide> `Buffer` instances are also [`Uint8Array`] instances. However, there are subtle <del>incompatibilities with the TypedArray specification in [`ECMAScript 2015`]. <del>For example, while [`ArrayBuffer#slice()`] creates a copy of the slice, the <del>implementation of [`Buffer#slice()`][`buf.slice()`] creates a view over the <del>existing `Buffer` without copying, making [`Buffer#slice()`][`buf.slice()`] far <del>more efficient. <add>incompatibilities with [`TypedArray`]. For example, while <add>[`ArrayBuffer#slice()`] creates a copy of the slice, the implementation of <add>[`Buffer#slice()`][`buf.slice()`] creates a view over the existing `Buffer` <add>without copying, making [`Buffer#slice()`][`buf.slice()`] far more efficient. <ide> <ide> It is also possible to create new [`TypedArray`] instances from a `Buffer` with <ide> the following caveats: <ide> function: <ide> * [`Buffer.from(arrayBuffer[, byteOffset [, length]])`][`Buffer.from(arrayBuffer)`] <ide> * [`Buffer.from(string[, encoding])`][`Buffer.from(string)`] <ide> <del>## Buffers and ES6 iteration <add>## Buffers and iteration <ide> <del>`Buffer` instances can be iterated over using the [`ECMAScript 2015`] (ES6) `for..of` <del>syntax. <add>`Buffer` instances can be iterated over using `for..of` syntax: <ide> <ide> ```js <ide> const buf = Buffer.from([1, 2, 3]); <ide> This value may depend on the JS engine that is being used. <ide> [RFC1345]: https://tools.ietf.org/html/rfc1345 <ide> [RFC4648, Section 5]: https://tools.ietf.org/html/rfc4648#section-5 <ide> [WHATWG Encoding Standard]: https://encoding.spec.whatwg.org/ <del>[`ECMAScript 2015`]: https://www.ecma-international.org/ecma-262/6.0/index.html <ide> [iterator]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols
1
Ruby
Ruby
remove unused code
95d059720b4438ed650aac09e12bce2f7780ec9a
<ide><path>activesupport/test/multibyte_conformance_test.rb <ide> def test_normalizations_KD <ide> <ide> protected <ide> def each_line_of_norm_tests(&block) <del> lines = 0 <del> max_test_lines = 0 # Don't limit below 38, because that's the header of the testfile <ide> File.open(File.join(CACHE_DIR, UNIDATA_FILE), 'r') do | f | <del> until f.eof? || (max_test_lines > 38 and lines > max_test_lines) <del> lines += 1 <add> until f.eof? <ide> line = f.gets.chomp! <ide> next if (line.empty? || line =~ /^\#/) <ide>
1
Ruby
Ruby
stop ddl modifications for another test
86a73cd8492380eebaa6844dd3ba9924460a0e67
<ide><path>activerecord/test/cases/migration_test.rb <ide> def migrate(x); raise 'Something broke'; end <ide> refute Person.column_methods_hash.include?(:last_name) <ide> end <ide> <del> def test_only_loads_pending_migrations <del> # migrate up to 1 <del> ActiveRecord::SchemaMigration.create!(:version => '1') <del> <del> proxies = ActiveRecord::Migrator.migrate(MIGRATIONS_ROOT + "/valid", nil) <del> <del> names = proxies.map(&:name) <del> assert !names.include?('ValidPeopleHaveLastNames') <del> assert names.include?('WeNeedReminders') <del> assert names.include?('InnocentJointable') <del> end <del> <ide> def test_get_all_versions <ide> ActiveRecord::Migrator.migrate(MIGRATIONS_ROOT + "/valid") <ide> assert_equal([1,2,3], ActiveRecord::Migrator.get_all_versions) <ide><path>activerecord/test/cases/migrator_test.rb <ide> def test_migrator_forward <ide> assert_equal(3, ActiveRecord::Migrator.current_version) <ide> end <ide> <add> def test_only_loads_pending_migrations <add> # migrate up to 1 <add> ActiveRecord::SchemaMigration.create!(:version => '1') <add> <add> calls, migrator = migrator_class(3) <add> migrator.migrate("valid", nil) <add> <add> assert_equal [[:up, 2], [:up, 3]], calls <add> end <add> <ide> private <ide> def m(name, version, &block) <ide> x = Sensor.new name, version
2
Javascript
Javascript
fix broken images
808d6b94eb8d806e60bc46623e61996e8e0348d9
<ide><path>examples/with-framer-motion/components/Gallery.js <ide> const Thumbnail = ({ id, i }) => ( <ide> > <ide> <Link href="/image/[index]" as={`/image/${i}`} scroll={false}> <ide> <motion.img <del> src={`https://static1.squarespace.com/static/5b475b2c50a54f54f9b4e1dc/t/${id}.jpg?format=1500w`} <add> src={`https://images.unsplash.com/${id}?auto=format&fit=crop&w=1500`} <ide> alt="The Barbican" <ide> variants={imageVariants} <ide> transition={transition} <ide> const Thumbnail = ({ id, i }) => ( <ide> <ide> const Gallery = () => ( <ide> <> <del> <h1>Barbican</h1> <add> <h1>Motion</h1> <ide> <div className="gallery"> <ide> <motion.div <ide> className="thumbnails" <ide> const Gallery = () => ( <ide> bottom: -130px; <ide> } <ide> } <del> <add> <ide> @media screen and (min-width: 800px) { <ide> h1 { <ide> font-size: 180px; <ide> bottom: -170px; <ide> } <ide> } <del> <add> <ide> @media screen and (min-width: 1000px) { <ide> h1 { <ide> font-size: 220px; <ide><path>examples/with-framer-motion/components/SingleImage.js <ide> const SingleImage = ({ index }) => ( <ide> <motion.div className="single" initial="exit" animate="enter" exit="exit"> <ide> <motion.img <ide> variants={imageVariants} <del> src={`https://static1.squarespace.com/static/5b475b2c50a54f54f9b4e1dc/t/${images[index]}.jpg?format=1500w`} <add> src={`https://images.unsplash.com/${images[index]}?auto=format&fit=crop&w=1500`} <ide> alt="The Barbican" <ide> /> <ide> <motion.div className="back" variants={backVariants}> <ide><path>examples/with-framer-motion/constants.js <ide> export const images = [ <del> '5b5a3938562fa764113169a6/1532639559620/DSCF3338', <del> '5b5a3628f950b7390fbfc5f8/1532639027872/DSCF3246', <del> '5b5a3741575d1fccb5ac6b3f/1532639066455/DSCF3268', <del> '5b5a376b0e2e728eeeaca8e4/1532683586969/DSCF3274', <del> '5b5c228403ce64f3c80d4d8e/1532764845121/DSCF3348', <del> '5b5a3b800e2e728eeead9575/1532640158813/DSCF3375', <add> 'photo-1520517601640-32ec514e4a15', <add> 'photo-1518780535463-bc357fa46e64', <add> 'photo-1555068178-89125fb6356d', <add> 'photo-1553503359-d4ff2537a6ea', <add> 'photo-1585211751845-37663b4ab614', <add> 'photo-1496467115032-c504ef76521b', <ide> ] <ide><path>examples/with-framer-motion/pages/image/[index].js <ide> export async function getStaticProps({ params }) { <ide> export async function getStaticPaths() { <ide> return { <ide> paths: images.map((_id, index) => { <del> console.log(index) <ide> return { <ide> params: { <ide> index: `${index}`,
4
Go
Go
add newlines to formatprogress for json as well
e98b8e08575d28e4cc15fbd64111c42b86841e4c
<ide><path>pkg/streamformatter/streamformatter.go <ide> func (sf *StreamFormatter) FormatProgress(id, action string, progress *jsonmessa <ide> if err != nil { <ide> return nil <ide> } <del> return b <add> return append(b, streamNewlineBytes...) <ide> } <ide> endl := "\r" <ide> if progress.String() == "" {
1
Go
Go
improve error reporting
edd67fd4ad961f0782f1f94e6a26c95810dd037e
<ide><path>api/client/node/tasks.go <ide> import ( <ide> "github.com/docker/docker/cli" <ide> "github.com/docker/docker/opts" <ide> "github.com/docker/engine-api/types" <del> "github.com/docker/engine-api/types/swarm" <ide> "github.com/spf13/cobra" <ide> ) <ide> <ide> type tasksOptions struct { <ide> nodeID string <del> all bool <ide> noResolve bool <ide> filter opts.FilterOpt <ide> } <ide> func newTasksCommand(dockerCli *client.DockerCli) *cobra.Command { <ide> }, <ide> } <ide> flags := cmd.Flags() <del> flags.BoolVarP(&opts.all, "all", "a", false, "Display all instances") <ide> flags.BoolVar(&opts.noResolve, "no-resolve", false, "Do not map IDs to Names") <ide> flags.VarP(&opts.filter, "filter", "f", "Filter output based on conditions provided") <ide> <ide> func runTasks(dockerCli *client.DockerCli, opts tasksOptions) error { <ide> <ide> filter := opts.filter.Value() <ide> filter.Add("node", node.ID) <del> if !opts.all && !filter.Include("desired-state") { <del> filter.Add("desired-state", string(swarm.TaskStateRunning)) <del> filter.Add("desired-state", string(swarm.TaskStateAccepted)) <del> } <del> <ide> tasks, err := client.TaskList( <ide> ctx, <ide> types.TaskListOptions{Filter: filter}) <ide><path>api/client/service/tasks.go <ide> import ( <ide> "github.com/docker/docker/cli" <ide> "github.com/docker/docker/opts" <ide> "github.com/docker/engine-api/types" <del> "github.com/docker/engine-api/types/swarm" <ide> "github.com/spf13/cobra" <ide> ) <ide> <ide> type tasksOptions struct { <ide> serviceID string <del> all bool <ide> noResolve bool <ide> filter opts.FilterOpt <ide> } <ide> func newTasksCommand(dockerCli *client.DockerCli) *cobra.Command { <ide> }, <ide> } <ide> flags := cmd.Flags() <del> flags.BoolVarP(&opts.all, "all", "a", false, "Display all tasks") <ide> flags.BoolVar(&opts.noResolve, "no-resolve", false, "Do not map IDs to Names") <ide> flags.VarP(&opts.filter, "filter", "f", "Filter output based on conditions provided") <ide> <ide> func runTasks(dockerCli *client.DockerCli, opts tasksOptions) error { <ide> <ide> filter := opts.filter.Value() <ide> filter.Add("service", service.ID) <del> if !opts.all && !filter.Include("desired-state") { <del> filter.Add("desired-state", string(swarm.TaskStateRunning)) <del> filter.Add("desired-state", string(swarm.TaskStateAccepted)) <del> } <del> <ide> if filter.Include("node") { <ide> nodeFilters := filter.Get("node") <ide> for _, nodeFilter := range nodeFilters { <ide><path>api/client/task/print.go <ide> import ( <ide> ) <ide> <ide> const ( <del> psTaskItemFmt = "%s\t%s\t%s\t%s\t%s %s ago\t%s\t%s\n" <add> psTaskItemFmt = "%s\t%s\t%s\t%s\t%s\t%s %s ago\t%s\n" <add> maxErrLength = 30 <ide> ) <ide> <ide> type tasksBySlot []swarm.Task <ide> func Print(dockerCli *client.DockerCli, ctx context.Context, tasks []swarm.Task, <ide> <ide> // Ignore flushing errors <ide> defer writer.Flush() <del> fmt.Fprintln(writer, strings.Join([]string{"ID", "NAME", "SERVICE", "IMAGE", "LAST STATE", "DESIRED STATE", "NODE"}, "\t")) <add> fmt.Fprintln(writer, strings.Join([]string{"ID", "NAME", "IMAGE", "NODE", "DESIRED STATE", "CURRENT STATE", "ERROR"}, "\t")) <add> <add> prevName := "" <ide> for _, task := range tasks { <ide> serviceValue, err := resolver.Resolve(ctx, swarm.Service{}, task.ServiceID) <ide> if err != nil { <ide> func Print(dockerCli *client.DockerCli, ctx context.Context, tasks []swarm.Task, <ide> if err != nil { <ide> return err <ide> } <add> <ide> name := serviceValue <ide> if task.Slot > 0 { <ide> name = fmt.Sprintf("%s.%d", name, task.Slot) <ide> } <add> <add> // Indent the name if necessary <add> indentedName := name <add> if prevName == name { <add> indentedName = fmt.Sprintf(" \\_ %s", indentedName) <add> } <add> prevName = name <add> <add> // Trim and quote the error message. <add> taskErr := task.Status.Err <add> if len(taskErr) > maxErrLength { <add> taskErr = fmt.Sprintf("%s…", taskErr[:maxErrLength-1]) <add> } <add> if len(taskErr) > 0 { <add> taskErr = fmt.Sprintf("\"%s\"", taskErr) <add> } <add> <ide> fmt.Fprintf( <ide> writer, <ide> psTaskItemFmt, <ide> task.ID, <del> name, <del> serviceValue, <add> indentedName, <ide> task.Spec.ContainerSpec.Image, <add> nodeValue, <add> client.PrettyPrint(task.DesiredState), <ide> client.PrettyPrint(task.Status.State), <ide> strings.ToLower(units.HumanDuration(time.Since(task.Status.Timestamp))), <del> client.PrettyPrint(task.DesiredState), <del> nodeValue, <add> taskErr, <ide> ) <ide> } <ide>
3
Javascript
Javascript
fix typo to pass travis build
7022795f9dd7babc50b2e6fc039ec3f9781f5da9
<ide><path>lang/ms-my.js <ide> require('../moment').lang('ms-my', { <ide> months : "Januari_Februari_Mac_April_Mei_Jun_Julai_Ogos_September_Oktober_November_Disember".split("_"), <ide> monthsShort : "Jan_Feb_Mac_Apr_Mei_Jun_Jul_Ogs_Sep_Okt_Nov_Dis".split("_"), <del> weekdays : "Minggu_Isnin_Selasa_Rabu_Khamis_Jumaat_Sabtu".split("_"), <del> weekdaysShort : "Ahd_Isn_Sel_Rab_Kam_Jum_Sab".split("_"), <add> weekdays : "Ahad_Isnin_Selasa_Rabu_Khamis_Jumaat_Sabtu".split("_"), <add> weekdaysShort : "Ahd_Isn_Sel_Rab_Kha_Jum_Sab".split("_"), <ide> weekdaysMin : "Ah_Is_Sl_Rb_Km_Jm_Sb".split("_"), <ide> longDateFormat : { <ide> LT : "HH.mm", <ide> require('../moment').lang('ms-my', { <ide> }, <ide> relativeTime : { <ide> future : "dalam %s", <del> past : "%s yang lalu", <add> past : "%s yang lepas", <ide> s : "beberapa saat", <ide> m : "seminit", <ide> mm : "%d minit", <ide><path>test/lang/ms-my.js <ide> exports["lang:ms-my"] = { <ide> test.expect(22); <ide> <ide> var a = [ <del> ['dddd, MMMM Do YYYY, h:mm:ss a', 'Ahad, Februari 14 2010, 3:25:50 pm'], <del> ['ddd, hA', 'Ahd, 3PM'], <add> ['dddd, MMMM Do YYYY, h:mm:ss a', 'Ahad, Februari 14 2010, 3:25:50 petang'], <add> ['ddd, hA', 'Ahd, 3petang'], <ide> ['M Mo MM MMMM MMM', '2 2 02 Februari Feb'], <ide> ['YYYY YY', '2010 10'], <ide> ['D Do DD', '14 14 14'], <ide> ['d do dddd ddd dd', '0 0 Ahad Ahd Ah'], <ide> ['DDD DDDo DDDD', '45 45 045'], <del> ['w wo ww', '8 8 08'], <add> ['w wo ww', '7 7 07'], <ide> ['h hh', '3 03'], <ide> ['H HH', '15 15'], <ide> ['m mm', '25 25'], <ide> ['s ss', '50 50'], <del> ['a A', 'pm PM'], <del> ['t\\he DDDo \\d\\ay of t\\he ye\\ar', 'hari ke 45 tahun ini'], <del> ['L', '02/14/2010'], <del> ['LL', 'Februari 14 2010'], <del> ['LLL', 'Februari 14 2010 3:25 PM'], <del> ['LLLL', 'Ahad, Februari 14 2010 3:25 PM'], <add> ['a A', 'petang petang'], <add> ['hari ke DDDo tahun ini', 'hari ke 45 tahun ini'], <add> ['L', '14/02/2010'], <add> ['LL', '14 Februari 2010'], <add> ['LLL', '14 Februari 2010 3:25'], <add> ['LLLL', 'Ahad, Februari 14 2010 3:25'], <ide> ['l', '2/14/2010'], <ide> ['ll', 'Feb 14 2010'], <ide> ['lll', 'Feb 14 2010 3:25 PM'], <ide> exports["lang:ms-my"] = { <ide> "format ordinal" : function(test) { <ide> test.expect(31); <ide> <del> test.equal(moment([2011, 0, 1]).format('DDDo'), '1st', '1st'); <del> test.equal(moment([2011, 0, 2]).format('DDDo'), '2nd', '2nd'); <del> test.equal(moment([2011, 0, 3]).format('DDDo'), '3rd', '3rd'); <del> test.equal(moment([2011, 0, 4]).format('DDDo'), '4th', '4th'); <del> test.equal(moment([2011, 0, 5]).format('DDDo'), '5th', '5th'); <del> test.equal(moment([2011, 0, 6]).format('DDDo'), '6th', '6th'); <del> test.equal(moment([2011, 0, 7]).format('DDDo'), '7th', '7th'); <del> test.equal(moment([2011, 0, 8]).format('DDDo'), '8th', '8th'); <del> test.equal(moment([2011, 0, 9]).format('DDDo'), '9th', '9th'); <del> test.equal(moment([2011, 0, 10]).format('DDDo'), '10th', '10th'); <del> <del> test.equal(moment([2011, 0, 11]).format('DDDo'), '11th', '11th'); <del> test.equal(moment([2011, 0, 12]).format('DDDo'), '12th', '12th'); <del> test.equal(moment([2011, 0, 13]).format('DDDo'), '13th', '13th'); <del> test.equal(moment([2011, 0, 14]).format('DDDo'), '14th', '14th'); <del> test.equal(moment([2011, 0, 15]).format('DDDo'), '15th', '15th'); <del> test.equal(moment([2011, 0, 16]).format('DDDo'), '16th', '16th'); <del> test.equal(moment([2011, 0, 17]).format('DDDo'), '17th', '17th'); <del> test.equal(moment([2011, 0, 18]).format('DDDo'), '18th', '18th'); <del> test.equal(moment([2011, 0, 19]).format('DDDo'), '19th', '19th'); <del> test.equal(moment([2011, 0, 20]).format('DDDo'), '20th', '20th'); <del> <del> test.equal(moment([2011, 0, 21]).format('DDDo'), '21st', '21st'); <del> test.equal(moment([2011, 0, 22]).format('DDDo'), '22nd', '22nd'); <del> test.equal(moment([2011, 0, 23]).format('DDDo'), '23rd', '23rd'); <del> test.equal(moment([2011, 0, 24]).format('DDDo'), '24th', '24th'); <del> test.equal(moment([2011, 0, 25]).format('DDDo'), '25th', '25th'); <del> test.equal(moment([2011, 0, 26]).format('DDDo'), '26th', '26th'); <del> test.equal(moment([2011, 0, 27]).format('DDDo'), '27th', '27th'); <del> test.equal(moment([2011, 0, 28]).format('DDDo'), '28th', '28th'); <del> test.equal(moment([2011, 0, 29]).format('DDDo'), '29th', '29th'); <del> test.equal(moment([2011, 0, 30]).format('DDDo'), '30th', '30th'); <del> <del> test.equal(moment([2011, 0, 31]).format('DDDo'), '31st', '31st'); <add> test.equal(moment([2011, 0, 1]).format('DDDo'), '1', '1'); <add> test.equal(moment([2011, 0, 2]).format('DDDo'), '2', '2'); <add> test.equal(moment([2011, 0, 3]).format('DDDo'), '3', '3'); <add> test.equal(moment([2011, 0, 4]).format('DDDo'), '4', '4'); <add> test.equal(moment([2011, 0, 5]).format('DDDo'), '5', '5'); <add> test.equal(moment([2011, 0, 6]).format('DDDo'), '6', '6'); <add> test.equal(moment([2011, 0, 7]).format('DDDo'), '7', '7'); <add> test.equal(moment([2011, 0, 8]).format('DDDo'), '8', '8'); <add> test.equal(moment([2011, 0, 9]).format('DDDo'), '9', '9'); <add> test.equal(moment([2011, 0, 10]).format('DDDo'), '10', '10'); <add> <add> test.equal(moment([2011, 0, 11]).format('DDDo'), '11', '11'); <add> test.equal(moment([2011, 0, 12]).format('DDDo'), '12', '12'); <add> test.equal(moment([2011, 0, 13]).format('DDDo'), '13', '13'); <add> test.equal(moment([2011, 0, 14]).format('DDDo'), '14', '14'); <add> test.equal(moment([2011, 0, 15]).format('DDDo'), '15', '15'); <add> test.equal(moment([2011, 0, 16]).format('DDDo'), '16', '16'); <add> test.equal(moment([2011, 0, 17]).format('DDDo'), '17', '17'); <add> test.equal(moment([2011, 0, 18]).format('DDDo'), '18', '18'); <add> test.equal(moment([2011, 0, 19]).format('DDDo'), '19', '19'); <add> test.equal(moment([2011, 0, 20]).format('DDDo'), '20', '20'); <add> <add> test.equal(moment([2011, 0, 21]).format('DDDo'), '21', '21'); <add> test.equal(moment([2011, 0, 22]).format('DDDo'), '22', '22'); <add> test.equal(moment([2011, 0, 23]).format('DDDo'), '23', '23'); <add> test.equal(moment([2011, 0, 24]).format('DDDo'), '24', '24'); <add> test.equal(moment([2011, 0, 25]).format('DDDo'), '25', '25'); <add> test.equal(moment([2011, 0, 26]).format('DDDo'), '26', '26'); <add> test.equal(moment([2011, 0, 27]).format('DDDo'), '27', '27'); <add> test.equal(moment([2011, 0, 28]).format('DDDo'), '28', '28'); <add> test.equal(moment([2011, 0, 29]).format('DDDo'), '29', '29'); <add> test.equal(moment([2011, 0, 30]).format('DDDo'), '30', '30'); <add> <add> test.equal(moment([2011, 0, 31]).format('DDDo'), '31', '31'); <ide> test.done(); <ide> }, <ide> <ide> exports["lang:ms-my"] = { <ide> <ide> var a = moment().hours(2).minutes(0).seconds(0); <ide> <del> test.equal(moment(a).calendar(), "Hari ini pada pukul 2:00 AM", "hari ini pada waktu yang sama"); <del> test.equal(moment(a).add({ m: 25 }).calendar(), "Hari ini pada pukul 2:25 AM", "Sekarang tambah 25 minit"); <del> test.equal(moment(a).add({ h: 1 }).calendar(), "Hari ini pada pukul 3:00 AM", "Sekarang tambah 1 jam"); <del> test.equal(moment(a).add({ d: 1 }).calendar(), "Esok pada pukul 2:00 AM", "esok pada waktu yang sama"); <del> test.equal(moment(a).subtract({ h: 1 }).calendar(), "Hari ini pada pukul 1:00 AM", "Sekarang tolak 1 jam"); <del> test.equal(moment(a).subtract({ d: 1 }).calendar(), "Semalam pada pukul 2:00 AM", "semalam pada waktu yang sama"); <add> test.equal(moment(a).calendar(), "Hari ini pada pukul 2:00", "hari ini pada waktu yang sama"); <add> test.equal(moment(a).add({ m: 25 }).calendar(), "Hari ini pada pukul 2:25", "Sekarang tambah 25 minit"); <add> test.equal(moment(a).add({ h: 1 }).calendar(), "Hari ini pada pukul 3:00", "Sekarang tambah 1 jam"); <add> test.equal(moment(a).add({ d: 1 }).calendar(), "Esok pada pukul 2:00", "esok pada waktu yang sama"); <add> test.equal(moment(a).subtract({ h: 1 }).calendar(), "Hari ini pada pukul 1:00", "Sekarang tolak 1 jam"); <add> test.equal(moment(a).subtract({ d: 1 }).calendar(), "Semalam pada pukul 2:00", "semalam pada waktu yang sama"); <ide> <ide> test.done(); <ide> }, <ide> exports["lang:ms-my"] = { <ide> <ide> for (i = 2; i < 7; i++) { <ide> m = moment().add({ d: i }); <del> test.equal(m.calendar(), m.format('dddd [at] LT'), "Hari ini + " + i + " hari waktu sekarang"); <add> test.equal(m.calendar(), m.format('dddd [pukul] LT'), "Hari ini + " + i + " hari waktu sekarang"); <ide> m.hours(0).minutes(0).seconds(0).milliseconds(0); <del> test.equal(m.calendar(), m.format('dddd [at] LT'), "Hari ini + " + i + " hari permulaan hari"); <add> test.equal(m.calendar(), m.format('dddd [pukul] LT'), "Hari ini + " + i + " hari permulaan hari"); <ide> m.hours(23).minutes(59).seconds(59).milliseconds(999); <del> test.equal(m.calendar(), m.format('dddd [at] LT'), "Hari ini + " + i + " hari tamat hari"); <add> test.equal(m.calendar(), m.format('dddd [pukul] LT'), "Hari ini + " + i + " hari tamat hari"); <ide> } <ide> test.done(); <ide> }, <ide> exports["lang:ms-my"] = { <ide> <ide> for (i = 2; i < 7; i++) { <ide> m = moment().subtract({ d: i }); <del> test.equal(m.calendar(), m.format('[last] dddd [at] LT'), "Hari ini - " + i + " hari waktu sekarang"); <add> test.equal(m.calendar(), m.format('dddd [lepas] [pukul] LT'), "Hari ini - " + i + " hari waktu sekarang"); <ide> m.hours(0).minutes(0).seconds(0).milliseconds(0); <del> test.equal(m.calendar(), m.format('[last] dddd [at] LT'), "Hari ini - " + i + " hari permulaan hari"); <add> test.equal(m.calendar(), m.format('dddd [lepas] [pukul] LT'), "Hari ini - " + i + " hari permulaan hari"); <ide> m.hours(23).minutes(59).seconds(59).milliseconds(999); <del> test.equal(m.calendar(), m.format('[last] dddd [at] LT'), "Hari ini - " + i + " hari tamat hari"); <add> test.equal(m.calendar(), m.format('dddd [lepas] [pukul] LT'), "Hari ini - " + i + " hari tamat hari"); <ide> } <ide> test.done(); <ide> }, <ide> exports["lang:ms-my"] = { <ide> var weeksAgo = moment().subtract({ w: 1 }), <ide> weeksFromNow = moment().add({ w: 1 }); <ide> <del> test.equal(weeksAgo.calendar(), weeksAgo.format('L'), "1 minggu lalu"); <add> test.equal(weeksAgo.calendar(), weeksAgo.format('L'), "1 minggu lepas"); <ide> test.equal(weeksFromNow.calendar(), weeksFromNow.format('L'), "dalam 1 minggu"); <ide> <ide> weeksAgo = moment().subtract({ w: 2 }); <ide> exports["lang:ms-my"] = { <ide> test.expect(5); <ide> <ide> test.equal(moment([2012, 0, 1]).week(), 1, "Jan 1 2012 sepatutnya minggu 1"); <del> test.equal(moment([2012, 0, 7]).week(), 1, "Jan 7 2012 sepatutnya minggu 1"); <add> test.equal(moment([2012, 0, 7]).week(), 2, "Jan 7 2012 sepatutnya minggu 2"); <ide> test.equal(moment([2012, 0, 8]).week(), 2, "Jan 8 2012 sepatutnya minggu 2"); <del> test.equal(moment([2012, 0, 14]).week(), 2, "Jan 14 2012 sepatutnya minggu 2"); <add> test.equal(moment([2012, 0, 14]).week(), 3, "Jan 14 2012 sepatutnya minggu 3"); <ide> test.equal(moment([2012, 0, 15]).week(), 3, "Jan 15 2012 sepatutnya minggu 3"); <ide> <ide> test.done(); <ide> exports["lang:ms-my"] = { <ide> test.equal(moment([2006, 11, 31]).week(), 1, "Dec 31 2006 sepatutnya minggu 1"); <ide> test.equal(moment([2007, 0, 1]).week(), 1, "Jan 1 2007 sepatutnya minggu 1"); <ide> test.equal(moment([2007, 0, 6]).week(), 1, "Jan 6 2007 sepatutnya minggu 1"); <del> test.equal(moment([2007, 0, 7]).week(), 2, "Jan 7 2007 sepatutnya minggu 2"); <add> test.equal(moment([2007, 0, 7]).week(), 1, "Jan 7 2007 sepatutnya minggu 1"); <ide> test.equal(moment([2007, 0, 13]).week(), 2, "Jan 13 2007 sepatutnya minggu 2"); <del> test.equal(moment([2007, 0, 14]).week(), 3, "Jan 14 2007 sepatutnya minggu 3"); <add> test.equal(moment([2007, 0, 14]).week(), 2, "Jan 14 2007 sepatutnya minggu 2"); <ide> <ide> test.done(); <ide> }, <ide> <ide> "weeks year starting tuesday" : function(test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 sepatutnya minggu 1"); <add> test.equal(moment([2007, 11, 30]).week(), 52, "Dec 30 2007 sepatutnya minggu 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 sepatutnya minggu 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 sepatutnya minggu 1"); <del> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 sepatutnya minggu 2"); <add> test.equal(moment([2008, 0, 6]).week(), 1, "Jan 6 2008 sepatutnya minggu 1"); <ide> test.equal(moment([2008, 0, 12]).week(), 2, "Jan 12 2008 sepatutnya minggu 2"); <del> test.equal(moment([2008, 0, 13]).week(), 3, "Jan 13 2008 sepatutnya minggu 3"); <add> test.equal(moment([2008, 0, 13]).week(), 2, "Jan 13 2008 sepatutnya minggu 2"); <ide> <ide> test.done(); <ide> }, <ide> <ide> "weeks year starting wednesday" : function(test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2002, 11, 29]).week(), 1, "Dec 29 2002 sepatutnya minggu 1"); <add> test.equal(moment([2002, 11, 29]).week(), 52, "Dec 29 2002 sepatutnya minggu 52"); <ide> test.equal(moment([2003, 0, 1]).week(), 1, "Jan 1 2003 sepatutnya minggu 1"); <ide> test.equal(moment([2003, 0, 4]).week(), 1, "Jan 4 2003 sepatutnya minggu 1"); <del> test.equal(moment([2003, 0, 5]).week(), 2, "Jan 5 2003 sepatutnya minggu 2"); <add> test.equal(moment([2003, 0, 5]).week(), 1, "Jan 5 2003 sepatutnya minggu 1"); <ide> test.equal(moment([2003, 0, 11]).week(), 2, "Jan 11 2003 sepatutnya minggu 2"); <del> test.equal(moment([2003, 0, 12]).week(), 3, "Jan 12 2003 sepatutnya minggu 3"); <add> test.equal(moment([2003, 0, 12]).week(), 2, "Jan 12 2003 sepatutnya minggu 2"); <ide> <ide> test.done(); <ide> }, <ide> <ide> "weeks year starting thursday" : function(test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2008, 11, 28]).week(), 1, "Dec 28 2008 sepatutnya minggu 1"); <add> test.equal(moment([2008, 11, 28]).week(), 52, "Dec 28 2008 sepatutnya minggu 52"); <ide> test.equal(moment([2009, 0, 1]).week(), 1, "Jan 1 2009 sepatutnya minggu 1"); <ide> test.equal(moment([2009, 0, 3]).week(), 1, "Jan 3 2009 sepatutnya minggu 1"); <del> test.equal(moment([2009, 0, 4]).week(), 2, "Jan 4 2009 sepatutnya minggu 2"); <add> test.equal(moment([2009, 0, 4]).week(), 1, "Jan 4 2009 sepatutnya minggu 1"); <ide> test.equal(moment([2009, 0, 10]).week(), 2, "Jan 10 2009 sepatutnya minggu 2"); <del> test.equal(moment([2009, 0, 11]).week(), 3, "Jan 11 2009 sepatutnya minggu 3"); <add> test.equal(moment([2009, 0, 11]).week(), 2, "Jan 11 2009 sepatutnya minggu 2"); <ide> <ide> test.done(); <ide> }, <del> <add>petang <ide> "weeks year starting friday" : function(test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2009, 11, 27]).week(), 1, "Dec 27 2009 sepatutnya minggu 1"); <add> test.equal(moment([2009, 11, 27]).week(), 52, "Dec 27 2009 sepatutnya minggu 52"); <ide> test.equal(moment([2010, 0, 1]).week(), 1, "Jan 1 2010 sepatutnya minggu 1"); <ide> test.equal(moment([2010, 0, 2]).week(), 1, "Jan 2 2010 sepatutnya minggu 1"); <del> test.equal(moment([2010, 0, 3]).week(), 2, "Jan 3 2010 sepatutnya minggu 2"); <add> test.equal(moment([2010, 0, 3]).week(), 1, "Jan 3 2010 sepatutnya minggu 1"); <ide> test.equal(moment([2010, 0, 9]).week(), 2, "Jan 9 2010 sepatutnya minggu 2"); <del> test.equal(moment([2010, 0, 10]).week(), 3, "Jan 10 2010 sepatutnya minggu 3"); <add> test.equal(moment([2010, 0, 10]).week(), 2, "Jan 10 2010 sepatutnya minggu 2"); <ide> <ide> test.done(); <ide> }, <ide> <ide> "weeks year starting saturday" : function(test) { <ide> test.expect(5); <ide> <del> test.equal(moment([2010, 11, 26]).week(), 1, "Dec 26 2010 sepatutnya minggu 1"); <add> test.equal(moment([2010, 11, 26]).week(), 52, "Dec 26 2010 sepatutnya minggu 52"); <ide> test.equal(moment([2011, 0, 1]).week(), 1, "Jan 1 2011 sepatutnya minggu 1"); <del> test.equal(moment([2011, 0, 2]).week(), 2, "Jan 2 2011 sepatutnya minggu 2"); <add> test.equal(moment([2011, 0, 2]).week(), 1, "Jan 2 2011 sepatutnya minggu 1"); <ide> test.equal(moment([2011, 0, 8]).week(), 2, "Jan 8 2011 sepatutnya minggu 2"); <del> test.equal(moment([2011, 0, 9]).week(), 3, "Jan 9 2011 sepatutnya minggu 3"); <add> test.equal(moment([2011, 0, 9]).week(), 2, "Jan 9 2011 sepatutnya minggu 2"); <ide> <ide> test.done(); <ide> }, <ide> <ide> "weeks year starting sunday format" : function(test) { <ide> test.expect(5); <ide> <del> test.equal(moment([2012, 0, 1]).format('w ww wo'), '1 01 1st', "Jan 1 2012 sepatutnya minggu 1"); <del> test.equal(moment([2012, 0, 7]).format('w ww wo'), '1 01 1st', "Jan 7 2012 sepatutnya minggu 1"); <del> test.equal(moment([2012, 0, 8]).format('w ww wo'), '2 02 2nd', "Jan 8 2012 sepatutnya minggu 2"); <del> test.equal(moment([2012, 0, 14]).format('w ww wo'), '2 02 2nd', "Jan 14 2012 sepatutnya minggu 2"); <del> test.equal(moment([2012, 0, 15]).format('w ww wo'), '3 03 3rd', "Jan 15 2012 sepatutnya minggu 3"); <add> test.equal(moment([2012, 0, 1]).format('w ww wo'), '1 01 1', "Jan 1 2012 sepatutnya minggu 1"); <add> test.equal(moment([2012, 0, 7]).format('w ww wo'), '1 01 1', "Jan 7 2012 sepatutnya minggu 1"); <add> test.equal(moment([2012, 0, 8]).format('w ww wo'), '2 02 2', "Jan 8 2012 sepatutnya minggu 2"); <add> test.equal(moment([2012, 0, 14]).format('w ww wo'), '2 02 2', "Jan 14 2012 sepatutnya minggu 2"); <add> test.equal(moment([2012, 0, 15]).format('w ww wo'), '3 03 3', "Jan 15 2012 sepatutnya minggu 3"); <ide> <ide> test.done(); <ide> }
2
Ruby
Ruby
fix state leaks in actionmailer/test/base_test.rb
1401637e3f9836d6a9d41f81dc722edcb1017bc4
<ide><path>actionmailer/test/base_test.rb <ide> require 'mailers/asset_mailer' <ide> <ide> class BaseTest < ActiveSupport::TestCase <del> def teardown <del> ActionMailer::Base.asset_host = nil <del> ActionMailer::Base.assets_dir = nil <del> ActionMailer::Base.preview_interceptors.clear <add> setup do <add> @original_asset_host = ActionMailer::Base.asset_host <add> @original_assets_dir = ActionMailer::Base.assets_dir <add> end <add> <add> teardown do <add> ActionMailer::Base.asset_host = @original_asset_host <add> ActionMailer::Base.assets_dir = @original_assets_dir <add> BaseMailer.deliveries.clear <ide> end <ide> <ide> test "method call to mail does not raise error" do <ide> def teardown <ide> test "attachment gets content type from filename" do <ide> email = BaseMailer.attachment_with_content <ide> assert_equal('invoice.pdf', email.attachments[0].filename) <add> assert_equal('application/pdf', email.attachments[0].mime_type) <ide> end <ide> <ide> test "attachment with hash" do <ide> def teardown <ide> end <ide> <ide> test "subject gets default from I18n" do <del> BaseMailer.default subject: nil <del> email = BaseMailer.welcome(subject: nil) <del> assert_equal "Welcome", email.subject <add> with_default BaseMailer, subject: nil do <add> email = BaseMailer.welcome(subject: nil) <add> assert_equal "Welcome", email.subject <ide> <del> I18n.backend.store_translations('en', base_mailer: {welcome: {subject: "New Subject!"}}) <del> email = BaseMailer.welcome(subject: nil) <del> assert_equal "New Subject!", email.subject <add> with_translation 'en', base_mailer: {welcome: {subject: "New Subject!"}} do <add> email = BaseMailer.welcome(subject: nil) <add> assert_equal "New Subject!", email.subject <add> end <add> end <ide> end <ide> <ide> test 'default subject can have interpolations' do <del> I18n.backend.store_translations('en', base_mailer: {with_subject_interpolations: {subject: 'Will the real %{rapper_or_impersonator} please stand up?'}}) <del> email = BaseMailer.with_subject_interpolations <del> assert_equal 'Will the real Slim Shady please stand up?', email.subject <add> with_translation 'en', base_mailer: {with_subject_interpolations: {subject: 'Will the real %{rapper_or_impersonator} please stand up?'}} do <add> email = BaseMailer.with_subject_interpolations <add> assert_equal 'Will the real Slim Shady please stand up?', email.subject <add> end <ide> end <ide> <ide> test "translations are scoped properly" do <del> I18n.backend.store_translations('en', base_mailer: {email_with_translations: {greet_user: "Hello %{name}!"}}) <del> email = BaseMailer.email_with_translations <del> assert_equal 'Hello lifo!', email.body.encoded <add> with_translation 'en', base_mailer: {email_with_translations: {greet_user: "Hello %{name}!"}} do <add> email = BaseMailer.email_with_translations <add> assert_equal 'Hello lifo!', email.body.encoded <add> end <ide> end <ide> <ide> # Implicit multipart <ide> def teardown <ide> end <ide> <ide> test "calling just the action should return the generated mail object" do <del> BaseMailer.deliveries.clear <ide> email = BaseMailer.welcome <ide> assert_equal(0, BaseMailer.deliveries.length) <ide> assert_equal('The first email on new API!', email.subject) <ide> end <ide> <ide> test "calling deliver on the action should deliver the mail object" do <del> BaseMailer.deliveries.clear <ide> BaseMailer.expects(:deliver_mail).once <ide> mail = BaseMailer.welcome.deliver <ide> assert_equal 'The first email on new API!', mail.subject <ide> end <ide> <ide> test "calling deliver on the action should increment the deliveries collection if using the test mailer" do <ide> BaseMailer.delivery_method = :test <del> BaseMailer.deliveries.clear <ide> BaseMailer.welcome.deliver <ide> assert_equal(1, BaseMailer.deliveries.length) <ide> end <ide> def teardown <ide> end <ide> <ide> test "should raise if missing template in implicit render" do <del> BaseMailer.deliveries.clear <ide> assert_raises ActionView::MissingTemplate do <ide> BaseMailer.implicit_different_template('missing_template').deliver <ide> end <ide> def teardown <ide> end <ide> <ide> test "assets tags should use a Mailer's asset_host settings when available" do <del> begin <del> ActionMailer::Base.config.asset_host = "http://global.com" <del> ActionMailer::Base.config.assets_dir = "global/" <add> ActionMailer::Base.config.asset_host = "http://global.com" <add> ActionMailer::Base.config.assets_dir = "global/" <ide> <del> AssetMailer.asset_host = "http://local.com" <add> TempAssetMailer = Class.new(AssetMailer) do <add> self.mailer_name = "asset_mailer" <add> self.asset_host = "http://local.com" <add> end <ide> <del> mail = AssetMailer.welcome <add> mail = TempAssetMailer.welcome <ide> <del> assert_equal(%{<img alt="Dummy" src="http://local.com/images/dummy.png" />}, mail.body.to_s.strip) <del> ensure <del> AssetMailer.asset_host = ActionMailer::Base.config.asset_host <del> end <add> assert_equal(%{<img alt="Dummy" src="http://local.com/images/dummy.png" />}, mail.body.to_s.strip) <ide> end <ide> <ide> test 'the view is not rendered when mail was never called' do <ide> def self.delivered_email(mail) <ide> end <ide> <ide> test "you can register an observer to the mail object that gets informed on email delivery" do <del> ActionMailer::Base.register_observer(MyObserver) <del> mail = BaseMailer.welcome <del> MyObserver.expects(:delivered_email).with(mail) <del> mail.deliver <add> mail_side_effects do <add> ActionMailer::Base.register_observer(MyObserver) <add> mail = BaseMailer.welcome <add> MyObserver.expects(:delivered_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> test "you can register an observer using its stringified name to the mail object that gets informed on email delivery" do <del> ActionMailer::Base.register_observer("BaseTest::MyObserver") <del> mail = BaseMailer.welcome <del> MyObserver.expects(:delivered_email).with(mail) <del> mail.deliver <add> mail_side_effects do <add> ActionMailer::Base.register_observer("BaseTest::MyObserver") <add> mail = BaseMailer.welcome <add> MyObserver.expects(:delivered_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> test "you can register an observer using its symbolized underscored name to the mail object that gets informed on email delivery" do <del> ActionMailer::Base.register_observer(:"base_test/my_observer") <del> mail = BaseMailer.welcome <del> MyObserver.expects(:delivered_email).with(mail) <del> mail.deliver <add> mail_side_effects do <add> ActionMailer::Base.register_observer(:"base_test/my_observer") <add> mail = BaseMailer.welcome <add> MyObserver.expects(:delivered_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> test "you can register multiple observers to the mail object that both get informed on email delivery" do <del> ActionMailer::Base.register_observers("BaseTest::MyObserver", MySecondObserver) <del> mail = BaseMailer.welcome <del> MyObserver.expects(:delivered_email).with(mail) <del> MySecondObserver.expects(:delivered_email).with(mail) <del> mail.deliver <add> mail_side_effects do <add> ActionMailer::Base.register_observers("BaseTest::MyObserver", MySecondObserver) <add> mail = BaseMailer.welcome <add> MyObserver.expects(:delivered_email).with(mail) <add> MySecondObserver.expects(:delivered_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> class MyInterceptor <ide> def self.delivering_email(mail); end <ide> def self.previewing_email(mail); end <ide> end <ide> <del> class BaseMailerPreview < ActionMailer::Preview <del> def welcome <del> BaseMailer.welcome <del> end <del> end <del> <ide> test "you can register an interceptor to the mail object that gets passed the mail object before delivery" do <del> ActionMailer::Base.register_interceptor(MyInterceptor) <del> mail = BaseMailer.welcome <del> MyInterceptor.expects(:delivering_email).with(mail) <del> mail.deliver <add> mail_side_effects do <add> ActionMailer::Base.register_interceptor(MyInterceptor) <add> mail = BaseMailer.welcome <add> MyInterceptor.expects(:delivering_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> test "you can register an interceptor using its stringified name to the mail object that gets passed the mail object before delivery" do <del> ActionMailer::Base.register_interceptor("BaseTest::MyInterceptor") <del> mail = BaseMailer.welcome <del> MyInterceptor.expects(:delivering_email).with(mail) <del> mail.deliver <add> mail_side_effects do <add> ActionMailer::Base.register_interceptor("BaseTest::MyInterceptor") <add> mail = BaseMailer.welcome <add> MyInterceptor.expects(:delivering_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> test "you can register an interceptor using its symbolized underscored name to the mail object that gets passed the mail object before delivery" do <del> ActionMailer::Base.register_interceptor(:"base_test/my_interceptor") <del> mail = BaseMailer.welcome <del> MyInterceptor.expects(:delivering_email).with(mail) <del> mail.deliver <add> mail_side_effects do <add> ActionMailer::Base.register_interceptor(:"base_test/my_interceptor") <add> mail = BaseMailer.welcome <add> MyInterceptor.expects(:delivering_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> test "you can register multiple interceptors to the mail object that both get passed the mail object before delivery" do <del> ActionMailer::Base.register_interceptors("BaseTest::MyInterceptor", MySecondInterceptor) <del> mail = BaseMailer.welcome <del> MyInterceptor.expects(:delivering_email).with(mail) <del> MySecondInterceptor.expects(:delivering_email).with(mail) <del> mail.deliver <del> end <del> <del> test "you can register a preview interceptor to the mail object that gets passed the mail object before previewing" do <del> ActionMailer::Base.register_preview_interceptor(MyInterceptor) <del> mail = BaseMailer.welcome <del> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <del> MyInterceptor.expects(:previewing_email).with(mail) <del> BaseMailerPreview.call(:welcome) <del> end <del> <del> test "you can register a preview interceptor using its stringified name to the mail object that gets passed the mail object before previewing" do <del> ActionMailer::Base.register_preview_interceptor("BaseTest::MyInterceptor") <del> mail = BaseMailer.welcome <del> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <del> MyInterceptor.expects(:previewing_email).with(mail) <del> BaseMailerPreview.call(:welcome) <del> end <del> <del> test "you can register an interceptor using its symbolized underscored name to the mail object that gets passed the mail object before previewing" do <del> ActionMailer::Base.register_preview_interceptor(:"base_test/my_interceptor") <del> mail = BaseMailer.welcome <del> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <del> MyInterceptor.expects(:previewing_email).with(mail) <del> BaseMailerPreview.call(:welcome) <del> end <del> <del> test "you can register multiple preview interceptors to the mail object that both get passed the mail object before previewing" do <del> ActionMailer::Base.register_preview_interceptors("BaseTest::MyInterceptor", MySecondInterceptor) <del> mail = BaseMailer.welcome <del> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <del> MyInterceptor.expects(:previewing_email).with(mail) <del> MySecondInterceptor.expects(:previewing_email).with(mail) <del> BaseMailerPreview.call(:welcome) <add> mail_side_effects do <add> ActionMailer::Base.register_interceptors("BaseTest::MyInterceptor", MySecondInterceptor) <add> mail = BaseMailer.welcome <add> MyInterceptor.expects(:delivering_email).with(mail) <add> MySecondInterceptor.expects(:delivering_email).with(mail) <add> mail.deliver <add> end <ide> end <ide> <ide> test "being able to put proc's into the defaults hash and they get evaluated on mail sending" do <ide> def with_default(klass, new_values) <ide> ensure <ide> klass.default_params = old <ide> end <add> <add> # A simple hack to restore the observers and interceptors for Mail, as it <add> # does not have an unregister API yet. <add> def mail_side_effects <add> old_observers = Mail.class_variable_get(:@@delivery_notification_observers) <add> old_delivery_interceptors = Mail.class_variable_get(:@@delivery_interceptors) <add> yield <add> ensure <add> Mail.class_variable_set(:@@delivery_notification_observers, old_observers) <add> Mail.class_variable_set(:@@delivery_interceptors, old_delivery_interceptors) <add> end <add> <add> def with_translation(locale, data) <add> I18n.backend.store_translations(locale, data) <add> yield <add> ensure <add> I18n.backend.reload! <add> end <add>end <add> <add>class BasePreviewInterceptorsTest < ActiveSupport::TestCase <add> teardown do <add> ActionMailer::Base.preview_interceptors.clear <add> end <add> <add> class BaseMailerPreview < ActionMailer::Preview <add> def welcome <add> BaseMailer.welcome <add> end <add> end <add> <add> class MyInterceptor <add> def self.delivering_email(mail); end <add> def self.previewing_email(mail); end <add> end <add> <add> class MySecondInterceptor <add> def self.delivering_email(mail); end <add> def self.previewing_email(mail); end <add> end <add> <add> test "you can register a preview interceptor to the mail object that gets passed the mail object before previewing" do <add> ActionMailer::Base.register_preview_interceptor(MyInterceptor) <add> mail = BaseMailer.welcome <add> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <add> MyInterceptor.expects(:previewing_email).with(mail) <add> BaseMailerPreview.call(:welcome) <add> end <add> <add> test "you can register a preview interceptor using its stringified name to the mail object that gets passed the mail object before previewing" do <add> ActionMailer::Base.register_preview_interceptor("BasePreviewInterceptorsTest::MyInterceptor") <add> mail = BaseMailer.welcome <add> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <add> MyInterceptor.expects(:previewing_email).with(mail) <add> BaseMailerPreview.call(:welcome) <add> end <add> <add> test "you can register an interceptor using its symbolized underscored name to the mail object that gets passed the mail object before previewing" do <add> ActionMailer::Base.register_preview_interceptor(:"base_preview_interceptors_test/my_interceptor") <add> mail = BaseMailer.welcome <add> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <add> MyInterceptor.expects(:previewing_email).with(mail) <add> BaseMailerPreview.call(:welcome) <add> end <add> <add> test "you can register multiple preview interceptors to the mail object that both get passed the mail object before previewing" do <add> ActionMailer::Base.register_preview_interceptors("BasePreviewInterceptorsTest::MyInterceptor", MySecondInterceptor) <add> mail = BaseMailer.welcome <add> BaseMailerPreview.any_instance.stubs(:welcome).returns(mail) <add> MyInterceptor.expects(:previewing_email).with(mail) <add> MySecondInterceptor.expects(:previewing_email).with(mail) <add> BaseMailerPreview.call(:welcome) <add> end <ide> end
1
Java
Java
fix failing test
e12fcca1e3e4d3e8c676dcf3cee94311a4161cbe
<ide><path>spring-test/src/test/java/org/springframework/test/web/reactive/server/samples/ResponseEntityTests.java <ide> public void entityStream() { <ide> .accept(TEXT_EVENT_STREAM) <ide> .exchange() <ide> .expectStatus().isOk() <del> .expectHeader().contentType(TEXT_EVENT_STREAM) <add> .expectHeader().contentTypeCompatibleWith(TEXT_EVENT_STREAM) <ide> .returnResult(Person.class); <ide> <ide> StepVerifier.create(result.getResponseBody())
1
Ruby
Ruby
fix path to livecheck watchlist
3b65ecaf4caa608e4cf31684edc767b2b320ae4b
<ide><path>Library/Homebrew/dev-cmd/livecheck.rb <ide> module Homebrew <ide> <ide> module_function <ide> <del> WATCHLIST_PATH = ( <del> Homebrew::EnvConfig.livecheck_watchlist || <del> "#{Dir.home}/.brew_livecheck_watchlist" <del> ).freeze <add> WATCHLIST_PATH = File.expand_path(Homebrew::EnvConfig.livecheck_watchlist).freeze <ide> <ide> sig { returns(CLI::Parser) } <ide> def livecheck_args <ide><path>Library/Homebrew/env_config.rb <ide> module EnvConfig <ide> boolean: true, <ide> }, <ide> HOMEBREW_LIVECHECK_WATCHLIST: { <del> description: "Consult this file for the list of formulae to check by default when no formula argument " \ <del> "is passed to `brew livecheck`.", <del> default: "$HOME/.brew_livecheck_watchlist", <add> description: "Consult this file for the list of formulae to check by default when no formula argument " \ <add> "is passed to `brew livecheck`.", <add> default_text: "$HOME/.brew_livecheck_watchlist", <add> default: "~/.brew_livecheck_watchlist", <ide> }, <ide> HOMEBREW_LOGS: { <ide> description: "Use this directory to store log files.",
2
Javascript
Javascript
add test for
c82828ec278c3792ada33d60414da76c3d20b6b1
<ide><path>test/simple/test-readdir.js <ide> process.addListener('exit', function() { <ide> assert.equal(false, got_error); <ide> console.log('exit'); <ide> }); <add> <add> <add>// readdir() on file should throw ENOTDIR <add>// https://github.com/joyent/node/issues/1869 <add>(function() { <add> var has_caught = false; <add> <add> try { <add> fs.readdirSync(__filename) <add> } <add> catch (e) { <add> has_caught = true; <add> assert.equal(e.code, 'ENOTDIR'); <add> } <add> <add> assert(has_caught); <add>})(); <add> <add> <add>(function() { <add> var readdir_cb_called = false; <add> <add> fs.readdir(__filename, function(e) { <add> readdir_cb_called = true; <add> assert.equal(e.code, 'ENOTDIR'); <add> }); <add> <add> process.on('exit', function() { <add> assert(readdir_cb_called); <add> }); <add>})(); <ide>\ No newline at end of file
1
Python
Python
use two different tokenizers for storyand summary
47a06d88a00c59ea1fb54e92178b3f5d2e8e8973
<ide><path>examples/run_seq2seq_finetuning.py <ide> import torch <ide> from torch.utils.data import Dataset <ide> <del>from transformers import BertTokenizer <add>from transformers import AutoTokenizer, Model2Model <ide> <ide> logger = logging.getLogger(__name__) <ide> <ide> class TextDataset(Dataset): <ide> [2] https://github.com/abisee/cnn-dailymail/ <ide> """ <ide> <del> def __init_(self, tokenizer, data_dir="", block_size=512): <add> def __init_(self, tokenizer_src, tokenizer_tgt, data_dir="", block_size=512): <ide> assert os.path.isdir(data_dir) <ide> <ide> # Load features that have already been computed if present <ide> def __init_(self, tokenizer, data_dir="", block_size=512): <ide> except IndexError: # skip ill-formed stories <ide> continue <ide> <del> summary = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(summary)) <del> summary_seq = _fit_to_block_size(summary, block_size) <del> <del> story = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(story)) <add> story = tokenizer_src.convert_tokens_to_ids(tokenizer_src.tokenize(story)) <ide> story_seq = _fit_to_block_size(story, block_size) <ide> <del> self.examples.append( <del> tokenizer.add_special_token_sequence_pair(story_seq, summary_seq) <del> ) <add> summary = tokenizer_tgt.convert_tokens_to_ids(tokenizer_tgt.tokenize(summary)) <add> summary_seq = _fit_to_block_size(summary, block_size) <add> <add> self.examples.append((story_seq, summary_seq)) <ide> <ide> logger.info("Saving features into cache file %s", cached_features_file) <ide> with open(cached_features_file, "wb") as sink: <ide> def _fit_to_block_size(sequence, block_size): <ide> return sequence.extend([-1] * [block_size - len(sequence)]) <ide> <ide> <del>def load_and_cache_examples(args, tokenizer): <del> dataset = TextDataset(tokenizer, file_path=args.data_dir) <add>def load_and_cache_examples(args, tokenizer_src, tokenizer_tgt): <add> dataset = TextDataset(tokenizer_src, tokenizer_tgt, file_path=args.data_dir) <ide> return dataset <ide> <ide> <ide> def main(): <ide> <ide> # Optional parameters <ide> parser.add_argument( <del> "--model_name_or_path", <add> "--decoder_name_or_path", <ide> default="bert-base-cased", <ide> type=str, <del> help="The model checkpoint for weights initialization.", <add> help="The model checkpoint to initialize the decoder's weights with.", <add> ) <add> parser.add_argument( <add> "--decoder_type", <add> default="bert", <add> type=str, <add> help="The decoder architecture to be fine-tuned.", <add> ) <add> parser.add_argument( <add> "--encoder_name_or_path", <add> default="bert-base-cased", <add> type=str, <add> help="The model checkpoint to initialize the encoder's weights with.", <add> ) <add> parser.add_argument( <add> "--encoder_type", <add> default="bert", <add> type=str, <add> help="The encoder architecture to be fine-tuned.", <ide> ) <ide> parser.add_argument("--seed", default=42, type=int) <ide> args = parser.parse_args() <ide> <add> if args.encoder_type != 'bert' or args.decoder_type != 'bert': <add> raise ValueError("Only the BERT architecture is currently supported for seq2seq.") <add> <ide> # Set up training device <ide> # device = torch.device("cpu") <ide> <ide> # Set seed <ide> set_seed(args) <ide> <ide> # Load pretrained model and tokenizer <del> tokenizer_class = BertTokenizer <del> # config = config_class.from_pretrained(args.model_name_or_path) <del> tokenizer = tokenizer_class.from_pretrained(args.model_name_or_path) <del> # model = model_class.from_pretrained(args.model_name_or_path, config=config) <add> encoder_tokenizer_class = AutoTokenizer.from_pretrained(args.encoder_name_or_path) <add> decoder_tokenizer_class = AutoTokenizer.from_pretrained(args.decoder_name_or_path) <add> model = Model2Model.from_pretrained(args.encoder_name_or_path, args.decoder_name_or_path) <ide> # model.to(device) <ide> <ide> logger.info("Training/evaluation parameters %s", args) <ide> <ide> # Training <del> _ = load_and_cache_examples(args, tokenizer) <add> source, target = load_and_cache_examples(args, tokenizer) <ide> # global_step, tr_loss = train(args, train_dataset, model, tokenizer) <ide> # logger.info(" global_step = %s, average loss = %s", global_step, tr_loss) <ide>
1
Python
Python
fix sorting function
bd5c7770e20516fe1697f0aa2d71ee9d2763a9a5
<ide><path>contrib/scrape-ec2-prices.py <ide> def sort_key_by_numeric_other(key_value): <ide> """ <ide> Split key into numeric, alpha and other part and sort accordingly. <ide> """ <del> return tuple(( <del> int(numeric) if numeric else None, <del> INSTANCE_SIZES.index(alpha) if alpha in INSTANCE_SIZES else alpha, <del> other <del> ) for (numeric, alpha, other) in RE_NUMERIC_OTHER.findall(key_value[0])) <add> result = [] <add> <add> for (numeric, alpha, other) in RE_NUMERIC_OTHER.findall(key_value[0]): <add> numeric = int(numeric) if numeric else -1 <add> alpha = INSTANCE_SIZES.index(alpha) if alpha in INSTANCE_SIZES else alpha <add> alpha = str(alpha) <add> item = tuple([numeric, alpha, other]) <add> result.append(item) <add> <add> return tuple(result) <ide> <ide> <ide> def main(): <del> print('Scraping EC2 pricing data') <add> print('Scraping EC2 pricing data (this may take up to 2 minutes)....') <ide> <ide> pricing_data = scrape_ec2_pricing() <ide> update_pricing_file(pricing_file_path=PRICING_FILE_PATH,
1
PHP
PHP
add missing import
2a803ea412b36467eea68e8fe54cb7292aa48ee1
<ide><path>lib/Cake/TestSuite/CakeTestRunner.php <ide> */ <ide> require_once 'PHPUnit/TextUI/TestRunner.php'; <ide> <add>App::uses('CakeFixtureManager', 'TestSuite/Fixture'); <add> <ide> /** <ide> * A custom test runner for Cake's use of PHPUnit. <ide> *
1
Javascript
Javascript
internalize native module
74859c7f0ddc4f08a0229adf0d23d4347b3bd06b
<ide><path>Libraries/Components/DatePickerAndroid/NativeDatePickerAndroid.js <del>/** <del> * Copyright (c) Meta Platforms, Inc. and affiliates. <del> * <del> * This source code is licensed under the MIT license found in the <del> * LICENSE file in the root directory of this source tree. <del> * <del> * @flow <del> * @format <del> */ <del> <del>import type {TurboModule} from '../../TurboModule/RCTExport'; <del> <del>import * as TurboModuleRegistry from '../../TurboModule/TurboModuleRegistry'; <del> <del>/** <del> * This file backs native modules that are used internally at Meta <del> * and this JS spec was intentionally left here. In the meanwhile this <del> * file should not be deleted. <del> */ <del>export interface Spec extends TurboModule { <del> +open: (options: Object) => Promise<Object>; <del>} <del> <del>export default (TurboModuleRegistry.getEnforcing<Spec>( <del> 'DatePickerAndroid', <del>): Spec);
1
PHP
PHP
modify the cache.php docblocks
3adc2196f79fa4d8470d41d5a7584f2b0432a6fc
<ide><path>config/cache.php <ide> | using this caching library. This connection is used when another is <ide> | not explicitly specified when executing a given caching function. <ide> | <del> | Supported: "apc", "array", "database", "file", <del> | "memcached", "redis", "dynamodb" <del> | <ide> */ <ide> <ide> 'default' => env('CACHE_DRIVER', 'file'), <ide> | well as their drivers. You may even define multiple stores for the <ide> | same cache driver to group types of items stored in your caches. <ide> | <add> | Supported drivers: "apc", "array", "database", "file", <add> | "memcached", "redis", "dynamodb" <add> | <ide> */ <ide> <ide> 'stores' => [
1
Text
Text
fix broken link in fs.md
e0a3faeff7cde1fa183a6efc3a3b3167deabd420
<ide><path>doc/api/fs.md <ide> the file contents. <ide> [`Number.MAX_SAFE_INTEGER`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER <ide> [`ReadDirectoryChangesW`]: https://docs.microsoft.com/en-us/windows/desktop/api/winbase/nf-winbase-readdirectorychangesw <ide> [`ReadStream`]: #fs_class_fs_readstream <del>[Readable Stream]: #stream_class_stream_readable <add>[Readable Stream]: stream.html#stream_class_stream_readable <ide> [`URL`]: url.html#url_the_whatwg_url_api <ide> [`UV_THREADPOOL_SIZE`]: cli.html#cli_uv_threadpool_size_size <ide> [`WriteStream`]: #fs_class_fs_writestream
1
Text
Text
rename this tip to be less confusing
7640e5310219577ab4a9f8ea3b2683623632fc3b
<ide><path>docs/tips/10-props-in-getInitialState-as-anti-pattern.md <ide> --- <ide> id: props-in-getInitialState-as-anti-pattern <del>title: Props in getInitialState Is an Anti-Pattern <add>title: Using state to cache calculations is an antipattern <ide> layout: tips <ide> permalink: props-in-getInitialState-as-anti-pattern.html <ide> prev: componentWillReceiveProps-not-triggered-after-mounting.html <ide> next: dom-event-listeners.html <ide> > <ide> > This isn't really a React-specific tip, as such anti-patterns often occur in code in general; in this case, React simply points them out more clearly. <ide> <del>Using props, passed down from parent, to generate state in `getInitialState` often leads to duplication of "source of truth", i.e. where the real data is. Whenever possible, compute values on-the-fly to ensure that they don't get out of sync later on and cause maintenance trouble. <add>Using state to cache values calculated from props (for example in `getInitialState`) often leads to duplication of "source of truth", i.e. where the real data is. Whenever possible, compute values on-the-fly to ensure that they don't get out of sync later on and cause maintenance trouble. <ide> <ide> Bad example: <ide>
1
Text
Text
add link to chartjs-plugin-waterfall
95d7d8c20df4377c238224654037e2f4886823a8
<ide><path>docs/notes/extensions.md <ide> In addition, many charts can be found on the [npm registry](https://www.npmjs.co <ide> - <a href="https://github.com/chartjs/chartjs-plugin-deferred" target="_blank">chartjs-plugin-deferred</a> - Defers initial chart update until chart scrolls into viewport. <ide> - <a href="https://github.com/compwright/chartjs-plugin-draggable" target="_blank">chartjs-plugin-draggable</a> - Makes select chart elements draggable with the mouse. <ide> - <a href="https://github.com/y-takey/chartjs-plugin-stacked100" target="_blank">chartjs-plugin-stacked100</a> - Draws 100% stacked bar chart. <add> - <a href="https://github.com/everestate/chartjs-plugin-waterfall" target="_blank">chartjs-plugin-waterfall</a> - Enables easy use of waterfall charts. <ide> - <a href="https://github.com/chartjs/chartjs-plugin-zoom" target="_blank">chartjs-plugin-zoom</a> - Enables zooming and panning on charts. <ide> <ide> In addition, many plugins can be found on the [npm registry](https://www.npmjs.com/search?q=chartjs-plugin-).
1
Text
Text
fix the wrong name of assertionerror
6b57a51b5469e1c639bc067373c0a985a9462b9a
<ide><path>doc/api/assert.md <ide> try { <ide> } catch (err) { <ide> assert(err instanceof assert.AssertionError); <ide> assert.strictEqual(err.message, message); <del> assert.strictEqual(err.name, 'AssertionError [ERR_ASSERTION]'); <add> assert.strictEqual(err.name, 'AssertionError'); <ide> assert.strictEqual(err.actual, 1); <ide> assert.strictEqual(err.expected, 2); <ide> assert.strictEqual(err.code, 'ERR_ASSERTION');
1
Javascript
Javascript
pass segment hash from js to java
3ea899f3fb8e3c4153f54980855dfd68c50a6b35
<ide><path>Libraries/Core/setUpSegmentFetcher.js <ide> export type GetSegmentFunction = typeof __getSegment; <ide> <ide> function __fetchSegment( <ide> segmentId: number, <del> options: {| <del> +otaBuildNumber: ?string, <del> +requestedModuleName?: ?string, <del> |}, <add> options: $ReadOnly<{ <add> otaBuildNumber: ?string, <add> requestedModuleName: string, <add> segmentHash: string, <add> }>, <ide> callback: (?Error) => void, <ide> ) { <ide> const SegmentFetcher = require('./SegmentFetcher/NativeSegmentFetcher') <ide> global.__fetchSegment = __fetchSegment; <ide> <ide> function __getSegment( <ide> segmentId: number, <del> options: {| <del> +otaBuildNumber: ?string, <del> +requestedModuleName?: ?string, <del> |}, <add> options: $ReadOnly<{ <add> otaBuildNumber: ?string, <add> requestedModuleName: string, <add> segmentHash: string, <add> }>, <ide> callback: (?Error, ?string) => void, <ide> ) { <ide> const SegmentFetcher = require('./SegmentFetcher/NativeSegmentFetcher')
1