content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Go | Go | add filesystemtype for containers | 80bd64245f14d4d8a6fc8349cff8b441d770da42 | <ide><path>container.go
<ide> type Container struct {
<ide> ResolvConfPath string
<ide> HostnamePath string
<ide> HostsPath string
<add> FilesystemType string
<ide>
<ide> cmd *exec.Cmd
<ide> stdout *utils.WriteBroadcaster
<ide><path>runtime.go
<ide> import (
<ide> "time"
<ide> )
<ide>
<add>const (
<add> DefaultFilesystemType = "devicemapper"
<add>)
<add>
<ide> var defaultDns = []string{"8.8.8.8", "8.8.4.4"}
<ide>
<ide> type Capabilities struct {
<ide> func (runtime *Runtime) restore() error {
<ide> return err
<ide> }
<ide>
<del> deviceSet := runtime.deviceSet
<del> containers := []*Container{}
<del> containersToMigrate := []*Container{}
<add> var (
<add> containers []*Container
<add> containersToMigrate []*Container
<add> )
<ide>
<ide> for i, v := range dir {
<ide> id := v.Name()
<ide> func (runtime *Runtime) restore() error {
<ide> utils.Debugf("Loaded container %v", container.ID)
<ide> containers = append(containers, container)
<ide>
<del> if !deviceSet.HasDevice(container.ID) {
<add> if container.FilesystemType != DefaultFilesystemType {
<ide> containersToMigrate = append(containersToMigrate, container)
<ide> }
<ide> }
<ide>
<del> // Migrate AUFS containers to device mapper
<add> // Migrate containers to the default filesystem type
<ide> if len(containersToMigrate) > 0 {
<ide> if err := migrateToDeviceMapper(runtime, containersToMigrate); err != nil {
<ide> return err
<ide> func migrateToDeviceMapper(runtime *Runtime, containers []*Container) error {
<ide> fmt.Printf("Failed to remove rw layer %s\n", err)
<ide> }
<ide>
<add> container.FilesystemType = DefaultFilesystemType
<add> if err := container.ToDisk(); err != nil {
<add> fmt.Printf("Failed to save filesystem type to disk %s\n", err)
<add> }
<add>
<ide> fmt.Printf("Successful migration for %s\n", container.ID)
<ide> }
<ide> fmt.Printf("Migration complete\n")
<ide> func (runtime *Runtime) Create(config *Config) (*Container, error) {
<ide> Image: img.ID, // Always use the resolved image id
<ide> NetworkSettings: &NetworkSettings{},
<ide> // FIXME: do we need to store this in the container?
<del> SysInitPath: sysInitPath,
<add> SysInitPath: sysInitPath,
<add> FilesystemType: DefaultFilesystemType,
<ide> }
<ide> container.root = runtime.containerRoot(container.ID)
<ide> // Step 1: create the container directory.
<ide><path>runtime_test.go
<ide> func init() {
<ide> os.Setenv("TEST", "1")
<ide> os.Setenv("DOCKER_LOOPBACK_DATA_SIZE", "209715200") // 200MB
<ide> os.Setenv("DOCKER_LOOPBACK_META_SIZE", "104857600") // 100MB
<del> os.Setenv("DOCKER_BASE_FS_SIZE", "157286400") // 150MB
<add> os.Setenv("DOCKER_BASE_FS_SIZE", "157286400") // 150MB
<ide>
<ide> // Hack to run sys init during unit testing
<ide> if selfPath := utils.SelfPath(); selfPath == "/sbin/init" || selfPath == "/.dockerinit" {
<ide> func TestRestore(t *testing.T) {
<ide> }
<ide> container2.State.Running = false
<ide> }
<add>
<add>func TestContainerCreatedWithDefaultFilesystemType(t *testing.T) {
<add> runtime := mkRuntime(t)
<add> defer nuke(runtime)
<add>
<add> container, _, _ := mkContainer(runtime, []string{"_", "ls", "-al"}, t)
<add> defer runtime.Destroy(container)
<add>
<add> if container.FilesystemType != DefaultFilesystemType {
<add> t.Fatalf("Container filesystem type should be %s but got %s", DefaultFilesystemType, container.FilesystemType)
<add> }
<add>} | 3 |
PHP | PHP | update the console shell for easier maintenance | 69be4bb1e8b58f3c73c8337634918e14f8935e76 | <ide><path>lib/Cake/Console/Command/ConsoleShell.php
<ide> class ConsoleShell extends AppShell {
<ide> */
<ide> public $models = array();
<ide>
<add>/**
<add> * _finished
<add> *
<add> * This shell is perpetual, setting this property to true exits the process
<add> *
<add> * @var mixed
<add> */
<add> protected $_finished = false;
<add>
<add>/**
<add> * _methodPatterns
<add> *
<add> * @var array
<add> */
<add> protected $_methodPatterns = array(
<add> 'help' => '/^(help|\?)/',
<add> '_exit' => '/^(quit|exit)/',
<add> '_models' => '/^models/i',
<add> '_bind' => '/^(\w+) bind (\w+) (\w+)/',
<add> '_unbind' => '/^(\w+) unbind (\w+) (\w+)/',
<add> '_find' => '/.+->find/',
<add> '_save' => '/.+->save/',
<add> '_columns' => '/^(\w+) columns/',
<add> '_routesReload' => '/^routes\s+reload/i',
<add> '_routesShow' => '/^routes\s+show/i',
<add> '_routeToString' => '/^route\s+(\(.*\))$/i',
<add> '_routeToArray' => '/^route\s+(.*)$/i',
<add> );
<add>
<ide> /**
<ide> * Override startup of the Shell
<ide> *
<ide> public function startup() {
<ide> }
<ide> }
<ide>
<add>/**
<add> * getOptionParser
<add> *
<add> * @return void
<add> */
<ide> public function getOptionParser() {
<ide> $description = array(
<ide> 'The interactive console is a tool for testing parts of your',
<ide> public function help() {
<ide> * @return void
<ide> */
<ide> public function main($command = null) {
<del> while (true) {
<add> $this->_finished = false;
<add> while (!$this->_finished) {
<ide> if (empty($command)) {
<ide> $command = trim($this->in(''));
<ide> }
<ide>
<del> switch ($command) {
<del> case 'help':
<del> $this->help();
<del> break;
<del> case 'quit':
<del> case 'exit':
<del> return true;
<del> case 'models':
<del> $this->out(__d('cake_console', 'Model classes:'));
<del> $this->hr();
<del> foreach ($this->models as $model) {
<del> $this->out(" - {$model}");
<del> }
<del> break;
<del> case preg_match("/^(\w+) bind (\w+) (\w+)/", $command, $tmp):
<del> foreach ($tmp as $data) {
<del> $data = strip_tags($data);
<del> $data = str_replace($this->badCommandChars, "", $data);
<del> }
<add> $method = $this->_method($command);
<ide>
<del> $modelA = $tmp[1];
<del> $association = $tmp[2];
<del> $modelB = $tmp[3];
<add> if ($method) {
<add> $this->$method($command);
<add> } else {
<add> $this->out(__d('cake_console', "Invalid command"));
<add> $this->out();
<add> }
<add> $command = '';
<add> }
<add> }
<ide>
<del> if ($this->_isValidModel($modelA) && $this->_isValidModel($modelB) && in_array($association, $this->associations)) {
<del> $this->{$modelA}->bindModel(array($association => array($modelB => array('className' => $modelB))), false);
<del> $this->out(__d('cake_console', "Created %s association between %s and %s",
<del> $association, $modelA, $modelB));
<del> } else {
<del> $this->out(__d('cake_console', "Please verify you are using valid models and association types"));
<del> }
<del> break;
<del> case preg_match("/^(\w+) unbind (\w+) (\w+)/", $command, $tmp):
<del> foreach ($tmp as $data) {
<del> $data = strip_tags($data);
<del> $data = str_replace($this->badCommandChars, "", $data);
<del> }
<add>/**
<add> * Determine the method to process the current command
<add> *
<add> * @param string $command
<add> * @return string or false
<add> */
<add> protected function _method($command) {
<add> foreach ($this->_methodPatterns as $method => $pattern) {
<add> if (preg_match($pattern, $command)) {
<add> return $method;
<add> }
<add> }
<ide>
<del> $modelA = $tmp[1];
<del> $association = $tmp[2];
<del> $modelB = $tmp[3];
<add> return false;
<add> }
<ide>
<del> // Verify that there is actually an association to unbind
<del> $currentAssociations = $this->{$modelA}->getAssociated();
<del> $validCurrentAssociation = false;
<add>/**
<add> * Set the finiished property so that the loop in main method ends
<add> *
<add> * @return void
<add> */
<add> protected function _exit() {
<add> $this->_finished = true;
<add> }
<ide>
<del> foreach ($currentAssociations as $model => $currentAssociation) {
<del> if ($model == $modelB && $association == $currentAssociation) {
<del> $validCurrentAssociation = true;
<del> }
<del> }
<add>/**
<add> * List all models
<add> *
<add> * @return void
<add> */
<add> protected function _models() {
<add> $this->out(__d('cake_console', 'Model classes:'));
<add> $this->hr();
<add> foreach ($this->models as $model) {
<add> $this->out(" - {$model}");
<add> }
<add> }
<ide>
<del> if ($this->_isValidModel($modelA) && $this->_isValidModel($modelB) && in_array($association, $this->associations) && $validCurrentAssociation) {
<del> $this->{$modelA}->unbindModel(array($association => array($modelB)));
<del> $this->out(__d('cake_console', "Removed %s association between %s and %s",
<del> $association, $modelA, $modelB));
<del> } else {
<del> $this->out(__d('cake_console', "Please verify you are using valid models, valid current association, and valid association types"));
<del> }
<del> break;
<del> case (strpos($command, "->find") > 0):
<del> // Remove any bad info
<del> $command = strip_tags($command);
<del> $command = str_replace($this->badCommandChars, "", $command);
<del>
<del> // Do we have a valid model?
<del> list($modelToCheck, $tmp) = explode('->', $command);
<del>
<del> if ($this->_isValidModel($modelToCheck)) {
<del> $findCommand = "\$data = \$this->$command;";
<del> //@codingStandardsIgnoreStart
<del> @eval($findCommand);
<del> //@codingStandardsIgnoreEnd
<del>
<del> if (is_array($data)) {
<del> foreach ($data as $idx => $results) {
<del> if (is_numeric($idx)) { // findAll() output
<del> foreach ($results as $modelName => $result) {
<del> $this->out("$modelName");
<del>
<del> foreach ($result as $field => $value) {
<del> if (is_array($value)) {
<del> foreach ($value as $field2 => $value2) {
<del> $this->out("\t$field2: $value2");
<del> }
<del>
<del> $this->out();
<del> } else {
<del> $this->out("\t$field: $value");
<del> }
<del> }
<del> }
<del> } else { // find() output
<del> $this->out($idx);
<del>
<del> foreach ($results as $field => $value) {
<del> if (is_array($value)) {
<del> foreach ($value as $field2 => $value2) {
<del> $this->out("\t$field2: $value2");
<del> }
<del>
<del> $this->out();
<del> } else {
<del> $this->out("\t$field: $value");
<del> }
<add>/**
<add> * Bind an association
<add> *
<add> * @param mixed $command
<add> * @return void
<add> */
<add> protected function _bind($command) {
<add> preg_match($this->_methodPatterns[__FUNCTION__], $command, $tmp);
<add>
<add> foreach ($tmp as $data) {
<add> $data = strip_tags($data);
<add> $data = str_replace($this->badCommandChars, "", $data);
<add> }
<add>
<add> $modelA = $tmp[1];
<add> $association = $tmp[2];
<add> $modelB = $tmp[3];
<add>
<add> if ($this->_isValidModel($modelA) && $this->_isValidModel($modelB) && in_array($association, $this->associations)) {
<add> $this->{$modelA}->bindModel(array($association => array($modelB => array('className' => $modelB))), false);
<add> $this->out(__d('cake_console', "Created %s association between %s and %s",
<add> $association, $modelA, $modelB));
<add> } else {
<add> $this->out(__d('cake_console', "Please verify you are using valid models and association types"));
<add> }
<add> }
<add>
<add>/**
<add> * Unbind an association
<add> *
<add> * @param mixed $command
<add> * @return void
<add> */
<add> protected function _unbind($command) {
<add> preg_match($this->_methodPatterns[__FUNCTION__], $command, $tmp);
<add>
<add> foreach ($tmp as $data) {
<add> $data = strip_tags($data);
<add> $data = str_replace($this->badCommandChars, "", $data);
<add> }
<add>
<add> $modelA = $tmp[1];
<add> $association = $tmp[2];
<add> $modelB = $tmp[3];
<add>
<add> // Verify that there is actually an association to unbind
<add> $currentAssociations = $this->{$modelA}->getAssociated();
<add> $validCurrentAssociation = false;
<add>
<add> foreach ($currentAssociations as $model => $currentAssociation) {
<add> if ($model == $modelB && $association == $currentAssociation) {
<add> $validCurrentAssociation = true;
<add> }
<add> }
<add>
<add> if ($this->_isValidModel($modelA) && $this->_isValidModel($modelB) && in_array($association, $this->associations) && $validCurrentAssociation) {
<add> $this->{$modelA}->unbindModel(array($association => array($modelB)));
<add> $this->out(__d('cake_console', "Removed %s association between %s and %s",
<add> $association, $modelA, $modelB));
<add> } else {
<add> $this->out(__d('cake_console', "Please verify you are using valid models, valid current association, and valid association types"));
<add> }
<add> }
<add>
<add>/**
<add> * Perform a find
<add> *
<add> * @param mixed $command
<add> * @return void
<add> */
<add> protected function _find($command) {
<add> $command = strip_tags($command);
<add> $command = str_replace($this->badCommandChars, "", $command);
<add>
<add> // Do we have a valid model?
<add> list($modelToCheck, $tmp) = explode('->', $command);
<add>
<add> if ($this->_isValidModel($modelToCheck)) {
<add> $findCommand = "\$data = \$this->$command;";
<add> //@codingStandardsIgnoreStart
<add> @eval($findCommand);
<add> //@codingStandardsIgnoreEnd
<add>
<add> if (is_array($data)) {
<add> foreach ($data as $idx => $results) {
<add> if (is_numeric($idx)) { // findAll() output
<add> foreach ($results as $modelName => $result) {
<add> $this->out("$modelName");
<add>
<add> foreach ($result as $field => $value) {
<add> if (is_array($value)) {
<add> foreach ($value as $field2 => $value2) {
<add> $this->out("\t$field2: $value2");
<ide> }
<add>
<add> $this->out();
<add> } else {
<add> $this->out("\t$field: $value");
<ide> }
<ide> }
<del> } else {
<del> $this->out();
<del> $this->out(__d('cake_console', "No result set found"));
<ide> }
<del> } else {
<del> $this->out(__d('cake_console', "%s is not a valid model", $modelToCheck));
<del> }
<add> } else { // find() output
<add> $this->out($idx);
<ide>
<del> break;
<del> case (strpos($command, '->save') > 0):
<del> // Validate the model we're trying to save here
<del> $command = strip_tags($command);
<del> $command = str_replace($this->badCommandChars, "", $command);
<del> list($modelToSave, $tmp) = explode("->", $command);
<del>
<del> if ($this->_isValidModel($modelToSave)) {
<del> // Extract the array of data we are trying to build
<del> list(, $data) = explode("->save", $command);
<del> $data = preg_replace('/^\(*(array)?\(*(.+?)\)*$/i', '\\2', $data);
<del> $saveCommand = "\$this->{$modelToSave}->save(array('{$modelToSave}' => array({$data})));";
<del> //@codingStandardsIgnoreStart
<del> @eval($saveCommand);
<del> //@codingStandardsIgnoreEnd
<del> $this->out(__d('cake_console', 'Saved record for %s', $modelToSave));
<del> }
<del> break;
<del> case preg_match("/^(\w+) columns/", $command, $tmp):
<del> $modelToCheck = strip_tags(str_replace($this->badCommandChars, "", $tmp[1]));
<del>
<del> if ($this->_isValidModel($modelToCheck)) {
<del> // Get the column info for this model
<del> $fieldsCommand = "\$data = \$this->{$modelToCheck}->getColumnTypes();";
<del> //@codingStandardsIgnoreStart
<del> @eval($fieldsCommand);
<del> //@codingStandardsIgnoreEnd
<del>
<del> if (is_array($data)) {
<del> foreach ($data as $field => $type) {
<del> $this->out("\t{$field}: {$type}");
<add> foreach ($results as $field => $value) {
<add> if (is_array($value)) {
<add> foreach ($value as $field2 => $value2) {
<add> $this->out("\t$field2: $value2");
<add> }
<add>
<add> $this->out();
<add> } else {
<add> $this->out("\t$field: $value");
<ide> }
<ide> }
<del> } else {
<del> $this->out(__d('cake_console', "Please verify that you selected a valid model"));
<del> }
<del> break;
<del> case preg_match("/^routes\s+reload/i", $command, $tmp):
<del> if (!$this->_loadRoutes()) {
<del> $this->err(__d('cake_console', "There was an error loading the routes config. Please check that the file exists and is free of parse errors."));
<del> break;
<ide> }
<del> $this->out(__d('cake_console', "Routes configuration reloaded, %d routes connected", count(Router::$routes)));
<del> break;
<del> case preg_match("/^routes\s+show/i", $command, $tmp):
<del> $this->out(print_r(Hash::combine(Router::$routes, '{n}.template', '{n}.defaults'), true));
<del> break;
<del> case (preg_match("/^route\s+(\(.*\))$/i", $command, $tmp) == true):
<del> //@codingStandardsIgnoreStart
<del> if ($url = eval('return array' . $tmp[1] . ';')) {
<del> //@codingStandardsIgnoreEnd
<del> $this->out(Router::url($url));
<del> }
<del> break;
<del> case preg_match("/^route\s+(.*)/i", $command, $tmp):
<del> $this->out(var_export(Router::parse($tmp[1]), true));
<del> break;
<del> default:
<del> $this->out(__d('cake_console', "Invalid command"));
<del> $this->out();
<add> }
<add> } else {
<add> $this->out();
<add> $this->out(__d('cake_console', "No result set found"));
<ide> }
<del> $command = '';
<add> } else {
<add> $this->out(__d('cake_console', "%s is not a valid model", $modelToCheck));
<add> }
<add> }
<add>
<add>/**
<add> * Save a record
<add> *
<add> * @param mixed $command
<add> * @return void
<add> */
<add> protected function _save($command) {
<add> // Validate the model we're trying to save here
<add> $command = strip_tags($command);
<add> $command = str_replace($this->badCommandChars, "", $command);
<add> list($modelToSave, $tmp) = explode("->", $command);
<add>
<add> if ($this->_isValidModel($modelToSave)) {
<add> // Extract the array of data we are trying to build
<add> list(, $data) = explode("->save", $command);
<add> $data = preg_replace('/^\(*(array)?\(*(.+?)\)*$/i', '\\2', $data);
<add> $saveCommand = "\$this->{$modelToSave}->save(array('{$modelToSave}' => array({$data})));";
<add> //@codingStandardsIgnoreStart
<add> @eval($saveCommand);
<add> //@codingStandardsIgnoreEnd
<add> $this->out(__d('cake_console', 'Saved record for %s', $modelToSave));
<ide> }
<ide> }
<ide>
<add>/**
<add> * Show the columns for a model
<add> *
<add> * @param mixed $command
<add> * @return void
<add> */
<add> protected function _columns($command) {
<add> preg_match($this->_methodPatterns[__FUNCTION__], $command, $tmp);
<add>
<add> $modelToCheck = strip_tags(str_replace($this->badCommandChars, "", $tmp[1]));
<add>
<add> if ($this->_isValidModel($modelToCheck)) {
<add> // Get the column info for this model
<add> $fieldsCommand = "\$data = \$this->{$modelToCheck}->getColumnTypes();";
<add> //@codingStandardsIgnoreStart
<add> @eval($fieldsCommand);
<add> //@codingStandardsIgnoreEnd
<add>
<add> if (is_array($data)) {
<add> foreach ($data as $field => $type) {
<add> $this->out("\t{$field}: {$type}");
<add> }
<add> }
<add> } else {
<add> $this->out(__d('cake_console', "Please verify that you selected a valid model"));
<add> }
<add> }
<add>
<add>/**
<add> * Reload route definitions
<add> *
<add> * @return void
<add> */
<add> protected function _routesReload() {
<add> if (!$this->_loadRoutes()) {
<add> $this->err(__d('cake_console', "There was an error loading the routes config. Please check that the file exists and is free of parse errors."));
<add> break;
<add> }
<add> $this->out(__d('cake_console', "Routes configuration reloaded, %d routes connected", count(Router::$routes)));
<add> }
<add>
<add>/**
<add> * Show all routes
<add> *
<add> * @return void
<add> */
<add> protected function _routesShow() {
<add> $this->out(print_r(Hash::combine(Router::$routes, '{n}.template', '{n}.defaults'), true));
<add> }
<add>
<add>/**
<add> * Parse an array url and show the equivalent url as a string
<add> *
<add> * @param mixed $command
<add> * @return void
<add> */
<add> protected function _routeToString($command) {
<add> preg_match($this->_methodPatterns[__FUNCTION__], $command, $tmp);
<add>
<add> //@codingStandardsIgnoreStart
<add> if ($url = eval('return array' . $tmp[1] . ';')) {
<add> //@codingStandardsIgnoreEnd
<add> $this->out(Router::url($url));
<add> }
<add> }
<add>
<add>/**
<add> * Parse a string url and show as an array
<add> *
<add> * @param mixed $command
<add> * @return void
<add> */
<add> protected function _routeToArray($command) {
<add> preg_match($this->_methodPatterns[__FUNCTION__], $command, $tmp);
<add>
<add> $this->out(var_export(Router::parse($tmp[1]), true));
<add> }
<add>
<ide> /**
<ide> * Tells if the specified model is included in the list of available models
<ide> * | 1 |
Javascript | Javascript | add unit test | d7bc95f7cdda6b2b887f9fe89ccecbedf0d16193 | <ide><path>test/ModuleFilenameHelpers.unittest.js
<add>"use strict";
<add>
<add>const { createFilename } = require("../lib/ModuleFilenameHelpers");
<add>const { createFsFromVolume, Volume } = require("memfs");
<add>const path = require("path");
<add>
<add>let webpack;
<add>
<add>const createSimpleCompiler = progressOptions => {
<add> const compiler = webpack({
<add> context: path.join(__dirname, "fixtures"),
<add> entry: "./a.js",
<add> infrastructureLogging: {
<add> debug: /Progress/
<add> }
<add> });
<add>
<add> compiler.outputFileSystem = createFsFromVolume(new Volume());
<add>
<add> new webpack.ProgressPlugin({
<add> activeModules: true,
<add> ...progressOptions
<add> }).apply(compiler);
<add>
<add> return compiler;
<add>};
<add>
<add>describe("ModuelFilenameHelpers", () => {
<add> beforeEach(() => {
<add> webpack = require("..");
<add> });
<add> describe("createFilename", () => {
<add> // next.js uses filesnames like [id] to indicate route parameters
<add> // Webpack should preserve these patterns when generating source maps
<add> it("Should not truncate [id] characters from a filename", () => {
<add> const compiler = createSimpleCompiler();
<add> const compilation = compiler.createCompilation();
<add>
<add> const options = {
<add> moduleFilenameTemplate: "webpack://[namespace]/[resourcePath]",
<add> namespace: "_N_E"
<add> };
<add>
<add> const module1 = "webpackUser/nextjs/pages/items/[id].tsx";
<add> const result1 = createFilename(module1, options, {
<add> requestShortener: compilation.runtimeTemplate.requestShortener,
<add> chunkGraph: compiler.chunkGraph
<add> });
<add>
<add> expect(result1).toBe(
<add> "webpack://_N_E/webpackUser/nextjs/pages/items/[id].tsx"
<add> );
<add>
<add> // As there's special rules for [id] we should make sure other names work too
<add> const module2 = "webpackUser/nextjs/pages/items/[test].tsx";
<add> const result2 = createFilename(module2, options, {
<add> requestShortener: compilation.runtimeTemplate.requestShortener,
<add> chunkGraph: compiler.chunkGraph
<add> });
<add>
<add> expect(result2).toBe(
<add> "webpack://_N_E/webpackUser/nextjs/pages/items/[test].tsx"
<add> );
<add> });
<add> });
<add>}); | 1 |
PHP | PHP | add test to check model scopes are accessible | 3276dc36f7540fae5197173c35937f9e0046bcef | <ide><path>tests/Integration/Database/EloquentWhereHasMorphTest.php
<ide> public function testOrWhereDoesntHaveMorph()
<ide>
<ide> $this->assertEquals([1, 2, 3], $comments->pluck('id')->all());
<ide> }
<add>
<add> public function testModelScopesAreAccessible()
<add> {
<add> $comments = Comment::whereHasMorph('commentable', [Post::class, Video::class], function (Builder $query) {
<add> $query->someSharedModelScope();
<add> })->get();
<add>
<add> $this->assertEquals([1, 4], $comments->pluck('id')->all());
<add> }
<ide> }
<ide>
<ide> class Comment extends Model
<ide> class Post extends Model
<ide> public $timestamps = false;
<ide>
<ide> protected $guarded = ['id'];
<add>
<add> public function scopeSomeSharedModelScope($query)
<add> {
<add> $query->where('title', '=', 'foo');
<add> }
<ide> }
<ide>
<ide> class Video extends Model
<ide> {
<ide> public $timestamps = false;
<ide>
<ide> protected $guarded = ['id'];
<add>
<add> public function scopeSomeSharedModelScope($query)
<add> {
<add> $query->where('title', '=', 'foo');
<add> }
<ide> } | 1 |
Text | Text | reword sentences in contributing docs for clarity | 0fcef173d408022e59cd8825da79106c01f6c65c | <ide><path>guides/source/contributing_to_ruby_on_rails.md
<ide> As a next step beyond reporting issues, you can help the core team resolve exist
<ide>
<ide> For starters, it helps just to verify bug reports. Can you reproduce the reported issue on your own computer? If so, you can add a comment to the issue saying that you're seeing the same thing.
<ide>
<del>If something is very vague, can you help squash it down into something specific? Maybe you can provide additional information to help reproduce a bug, or help by eliminating needless steps that aren't required to demonstrate the problem.
<add>If an issue is very vague, can you help narrow it down to something more specific? Maybe you can provide additional information to help reproduce a bug, or help by eliminating needless steps that aren't required to demonstrate the problem.
<ide>
<ide> If you find a bug report without a test, it's very useful to contribute a failing test. This is also a great way to get started exploring the source code: looking at the existing test files will teach you how to write more tests. New tests are best contributed in the form of a patch, as explained later on in the "Contributing to the Rails Code" section.
<ide>
<del>Anything you can do to make bug reports more succinct or easier to reproduce is a help to folks trying to write code to fix those bugs - whether you end up writing the code yourself or not.
<add>Anything you can do to make bug reports more succinct or easier to reproduce helps folks trying to write code to fix those bugs - whether you end up writing the code yourself or not.
<ide>
<ide> ### Testing Patches
<ide>
<ide> Once you're happy that the pull request contains a good change, comment on the G
<ide>
<ide> >I like the way you've restructured that code in generate_finder_sql - much nicer. The tests look good too.
<ide>
<del>If your comment simply says "+1", then odds are that other reviewers aren't going to take it too seriously. Show that you took the time to review the pull request.
<add>If your comment simply reads "+1", then odds are that other reviewers aren't going to take it too seriously. Show that you took the time to review the pull request.
<ide>
<ide> Contributing to the Rails Documentation
<ide> --------------------------------------- | 1 |
Text | Text | correct a small typo in exceptions documentation | 0c02bbbfa728267909ea73aeac57b2e99aba5857 | <ide><path>docs/api-guide/exceptions.md
<ide> Any example validation error might look like this:
<ide>
<ide> You can implement custom exception handling by creating a handler function that converts exceptions raised in your API views into response objects. This allows you to control the style of error responses used by your API.
<ide>
<del>The function must take a pair of arguments, this first is the exception to be handled, and the second is a dictionary containing any extra context such as the view currently being handled. The exception handler function should either return a `Response` object, or return `None` if the exception cannot be handled. If the handler returns `None` then the exception will be re-raised and Django will return a standard HTTP 500 'server error' response.
<add>The function must take a pair of arguments, the first is the exception to be handled, and the second is a dictionary containing any extra context such as the view currently being handled. The exception handler function should either return a `Response` object, or return `None` if the exception cannot be handled. If the handler returns `None` then the exception will be re-raised and Django will return a standard HTTP 500 'server error' response.
<ide>
<ide> For example, you might want to ensure that all error responses include the HTTP status code in the body of the response, like so:
<ide> | 1 |
Mixed | Javascript | support util.promisify for fs.read/fs.write | fbcb4f50b81cc57774998b2c3172626b6e4288be | <ide><path>doc/api/fs.md
<ide> If `position` is `null`, data will be read from the current file position.
<ide>
<ide> The callback is given the three arguments, `(err, bytesRead, buffer)`.
<ide>
<add>If this method is invoked as its [`util.promisify()`][]ed version, it returns
<add>a Promise for an object with `bytesRead` and `buffer` properties.
<add>
<ide> ## fs.readdir(path[, options], callback)
<ide> <!-- YAML
<ide> added: v0.1.8
<ide> an integer specifying the number of bytes to write.
<ide> should be written. If `typeof position !== 'number'`, the data will be written
<ide> at the current position. See pwrite(2).
<ide>
<del>The callback will be given three arguments `(err, written, buffer)` where
<del>`written` specifies how many _bytes_ were written from `buffer`.
<add>The callback will be given three arguments `(err, bytesWritten, buffer)` where
<add>`bytesWritten` specifies how many _bytes_ were written from `buffer`.
<add>
<add>If this method is invoked as its [`util.promisify()`][]ed version, it returns
<add>a Promise for an object with `bytesWritten` and `buffer` properties.
<ide>
<ide> Note that it is unsafe to use `fs.write` multiple times on the same file
<ide> without waiting for the callback. For this scenario,
<ide> The following constants are meant for use with the [`fs.Stats`][] object's
<ide> [`net.Socket`]: net.html#net_class_net_socket
<ide> [`stat()`]: fs.html#fs_fs_stat_path_callback
<ide> [`util.inspect(stats)`]: util.html#util_util_inspect_object_options
<add>[`util.promisify()`]: util.html#util_util_promisify_original
<ide> [Caveats]: #fs_caveats
<ide> [Common System Errors]: errors.html#errors_common_system_errors
<ide> [FS Constants]: #fs_fs_constants_1
<ide><path>lib/fs.js
<ide> fs.read = function(fd, buffer, offset, length, position, callback) {
<ide> binding.read(fd, buffer, offset, length, position, req);
<ide> };
<ide>
<add>Object.defineProperty(fs.read, internalUtil.customPromisifyArgs,
<add> { value: ['bytesRead', 'buffer'], enumerable: false });
<add>
<ide> fs.readSync = function(fd, buffer, offset, length, position) {
<ide> if (length === 0) {
<ide> return 0;
<ide> fs.write = function(fd, buffer, offset, length, position, callback) {
<ide> return binding.writeString(fd, buffer, offset, length, req);
<ide> };
<ide>
<add>Object.defineProperty(fs.write, internalUtil.customPromisifyArgs,
<add> { value: ['bytesWritten', 'buffer'], enumerable: false });
<add>
<ide> // usage:
<ide> // fs.writeSync(fd, buffer[, offset[, length[, position]]]);
<ide> // OR
<ide><path>test/parallel/test-fs-promisified.js
<add>'use strict';
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const fs = require('fs');
<add>const path = require('path');
<add>const { promisify } = require('util');
<add>
<add>common.crashOnUnhandledRejection();
<add>
<add>const read = promisify(fs.read);
<add>const write = promisify(fs.write);
<add>
<add>{
<add> const fd = fs.openSync(__filename, 'r');
<add> read(fd, Buffer.alloc(1024), 0, 1024, null).then(common.mustCall((obj) => {
<add> assert.strictEqual(typeof obj.bytesRead, 'number');
<add> assert(obj.buffer instanceof Buffer);
<add> fs.closeSync(fd);
<add> }));
<add>}
<add>
<add>common.refreshTmpDir();
<add>{
<add> const filename = path.join(common.tmpDir, 'write-promise.txt');
<add> const fd = fs.openSync(filename, 'w');
<add> write(fd, Buffer.from('foobar')).then(common.mustCall((obj) => {
<add> assert.strictEqual(typeof obj.bytesWritten, 'number');
<add> assert.strictEqual(obj.buffer.toString(), 'foobar');
<add> fs.closeSync(fd);
<add> }));
<add>} | 3 |
Text | Text | add note for next-connect-redux | bdd5b9ef3d25fb35fde33966520474c5fc4ec894 | <ide><path>examples/with-redux/README.md
<ide> Our page is located at `pages/index.js` so it will map the route `/`. To get the
<ide>
<ide> For safety it is recommended to wrap all pages, no matter if they use Redux or not, so that you should not care about it anymore in all child components.
<ide>
<del>`withRedux` function accepts `makeStore` as first argument, all other arguments are internally passed to `react-redux connect()` function. `makeStore` function will receive initialState as one argument and should return a new instance of redux store each time when called, no memoization needed here. See the [full example](https://github.com/kirill-konshin/next-redux-wrapper#usage) in the Next Redux Wrapper repository.
<add>`withRedux` function accepts `makeStore` as first argument, all other arguments are internally passed to `react-redux connect()` function. `makeStore` function will receive initialState as one argument and should return a new instance of redux store each time when called, no memoization needed here. See the [full example](https://github.com/kirill-konshin/next-redux-wrapper#usage) in the Next Redux Wrapper repository. And there's another package [next-connect-redux](https://github.com/huzidaha/next-connect-redux) available with similar features.
<ide>
<ide> To pass the initial state from the server to the client we pass it as a prop called `initialState` so then it's available when the client takes over.
<ide> | 1 |
Go | Go | make client.notfound error match errdefs.notfound | 5d8ece522b4e0509fe5e8267977c229ee62ccaf4 | <ide><path>client/errors.go
<ide> func ErrorConnectionFailed(host string) error {
<ide>
<ide> type notFound interface {
<ide> error
<del> NotFound() bool // Is the error a NotFound error
<add> NotFound()
<ide> }
<ide>
<ide> // IsErrNotFound returns true if the error is a NotFound error, which is returned
<ide> type objectNotFoundError struct {
<ide> id string
<ide> }
<ide>
<del>func (e objectNotFoundError) NotFound() bool {
<del> return true
<del>}
<add>func (e objectNotFoundError) NotFound() {}
<ide>
<ide> func (e objectNotFoundError) Error() string {
<ide> return fmt.Sprintf("Error: No such %s: %s", e.object, e.id) | 1 |
Text | Text | add v3.16.1 to changelog.md | e3b272e01914c8149a66dd06f83e8122fe82daa4 | <ide><path>CHANGELOG.md
<ide>
<ide> - [#18688](https://github.com/emberjs/ember.js/pull/18688) / [#18621](https://github.com/emberjs/ember.js/pull/18621) Updates Glimmer-VM to v0.46
<ide>
<add>### v3.16.1 (January 31, 2020)
<add>
<add>- [#18691](https://github.com/emberjs/ember.js/pull/18691) [BUGFIX] Updated `component` and `helper` blueprints to use `import { hbs } from 'ember-cli-htmlbars'`.
<add>- [#18698](https://github.com/emberjs/ember.js/pull/18698) [BUGFIX] Ensure tag updates are buffered
<add>- [#18713](https://github.com/emberjs/ember.js/pull/18713) [BUGFIX] Update `@glimmer/syntax` to ensure that using `<Title />`, `<Script />`, and `<Style />` does not error.
<add>- [#18717](https://github.com/emberjs/ember.js/pull/18717) [BUGFIX] Ensure instantiation cannot happen after destruction.
<add>- [#18720](https://github.com/emberjs/ember.js/pull/18720) [BUGFIX] Ensure correct `@ember/edition-utils` is used (1.2.0).
<add>
<ide> ### v3.16.0 (January 20, 2020)
<ide>
<ide> - [#18436](https://github.com/emberjs/ember.js/pull/18436) [DEPRECATION] Deprecate globals resolver per [RFC #331](https://github.com/emberjs/rfcs/blob/master/text/0331-deprecate-globals-resolver.md). | 1 |
Text | Text | use code markup/markdown in headers | 7034a3e68d3281548e9410a5a9b2bb73923a96fd | <ide><path>doc/api/stream.md
<ide> myStream.write('some more data');
<ide> myStream.end('done writing data');
<ide> ```
<ide>
<del>#### Class: stream.Writable
<add>#### Class: `stream.Writable`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide>
<ide> <!--type=class-->
<ide>
<del>##### Event: 'close'
<add>##### Event: `'close'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> changes:
<ide> that no more events will be emitted, and no further computation will occur.
<ide> A [`Writable`][] stream will always emit the `'close'` event if it is
<ide> created with the `emitClose` option.
<ide>
<del>##### Event: 'drain'
<add>##### Event: `'drain'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> function writeOneMillionTimes(writer, data, encoding, callback) {
<ide> }
<ide> ```
<ide>
<del>##### Event: 'error'
<add>##### Event: `'error'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> stream.
<ide> After `'error'`, no further events other than `'close'` *should* be emitted
<ide> (including `'error'` events).
<ide>
<del>##### Event: 'finish'
<add>##### Event: `'finish'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> writer.on('finish', () => {
<ide> writer.end('This is the end\n');
<ide> ```
<ide>
<del>##### Event: 'pipe'
<add>##### Event: `'pipe'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> writer.on('pipe', (src) => {
<ide> reader.pipe(writer);
<ide> ```
<ide>
<del>##### Event: 'unpipe'
<add>##### Event: `'unpipe'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> reader.pipe(writer);
<ide> reader.unpipe(writer);
<ide> ```
<ide>
<del>##### writable.cork()
<add>##### `writable.cork()`
<ide> <!-- YAML
<ide> added: v0.11.2
<ide> -->
<ide> buffered writes in a more optimized manner.
<ide>
<ide> See also: [`writable.uncork()`][].
<ide>
<del>##### writable.destroy(\[error\])
<add>##### `writable.destroy([error])`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> the `'drain'` event before destroying the stream.
<ide> Implementors should not override this method,
<ide> but instead implement [`writable._destroy()`][writable-_destroy].
<ide>
<del>##### writable.destroyed
<add>##### `writable.destroyed`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> added: v8.0.0
<ide>
<ide> Is `true` after [`writable.destroy()`][writable-destroy] has been called.
<ide>
<del>##### writable.end(\[chunk\[, encoding\]\]\[, callback\])
<add>##### `writable.end([chunk[, encoding]][, callback])`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> changes:
<ide> file.end('world!');
<ide> // Writing more now is not allowed!
<ide> ```
<ide>
<del>##### writable.setDefaultEncoding(encoding)
<add>##### `writable.setDefaultEncoding(encoding)`
<ide> <!-- YAML
<ide> added: v0.11.15
<ide> changes:
<ide> changes:
<ide> The `writable.setDefaultEncoding()` method sets the default `encoding` for a
<ide> [`Writable`][] stream.
<ide>
<del>##### writable.uncork()
<add>##### `writable.uncork()`
<ide> <!-- YAML
<ide> added: v0.11.2
<ide> -->
<ide> process.nextTick(() => {
<ide>
<ide> See also: [`writable.cork()`][].
<ide>
<del>##### writable.writable
<add>##### `writable.writable`
<ide> <!-- YAML
<ide> added: v11.4.0
<ide> -->
<ide> added: v11.4.0
<ide>
<ide> Is `true` if it is safe to call [`writable.write()`][stream-write].
<ide>
<del>##### writable.writableEnded
<add>##### `writable.writableEnded`
<ide> <!-- YAML
<ide> added: v12.9.0
<ide> -->
<ide> Is `true` after [`writable.end()`][] has been called. This property
<ide> does not indicate whether the data has been flushed, for this use
<ide> [`writable.writableFinished`][] instead.
<ide>
<del>##### writable.writableCorked
<add>##### `writable.writableCorked`
<ide> <!-- YAML
<ide> added: v13.2.0
<ide> -->
<ide> added: v13.2.0
<ide> Number of times [`writable.uncork()`][stream-uncork] needs to be
<ide> called in order to fully uncork the stream.
<ide>
<del>##### writable.writableFinished
<add>##### `writable.writableFinished`
<ide> <!-- YAML
<ide> added: v12.6.0
<ide> -->
<ide> added: v12.6.0
<ide>
<ide> Is set to `true` immediately before the [`'finish'`][] event is emitted.
<ide>
<del>##### writable.writableHighWaterMark
<add>##### `writable.writableHighWaterMark`
<ide> <!-- YAML
<ide> added: v9.3.0
<ide> -->
<ide> added: v9.3.0
<ide> Return the value of `highWaterMark` passed when constructing this
<ide> `Writable`.
<ide>
<del>##### writable.writableLength
<add>##### `writable.writableLength`
<ide> <!-- YAML
<ide> added: v9.4.0
<ide> -->
<ide> This property contains the number of bytes (or objects) in the queue
<ide> ready to be written. The value provides introspection data regarding
<ide> the status of the `highWaterMark`.
<ide>
<del>##### writable.writableObjectMode
<add>##### `writable.writableObjectMode`
<ide> <!-- YAML
<ide> added: v12.3.0
<ide> -->
<ide> added: v12.3.0
<ide>
<ide> Getter for the property `objectMode` of a given `Writable` stream.
<ide>
<del>##### writable.write(chunk\[, encoding\]\[, callback\])
<add>##### `writable.write(chunk[, encoding][, callback])`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> changes:
<ide> require more fine-grained control over the transfer and generation of data can
<ide> use the [`EventEmitter`][] and `readable.on('readable')`/`readable.read()`
<ide> or the `readable.pause()`/`readable.resume()` APIs.
<ide>
<del>#### Class: stream.Readable
<add>#### Class: `stream.Readable`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide>
<ide> <!--type=class-->
<ide>
<del>##### Event: 'close'
<add>##### Event: `'close'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> changes:
<ide> that no more events will be emitted, and no further computation will occur.
<ide> A [`Readable`][] stream will always emit the `'close'` event if it is
<ide> created with the `emitClose` option.
<ide>
<del>##### Event: 'data'
<add>##### Event: `'data'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> readable.on('data', (chunk) => {
<ide> });
<ide> ```
<ide>
<del>##### Event: 'end'
<add>##### Event: `'end'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> readable.on('end', () => {
<ide> });
<ide> ```
<ide>
<del>##### Event: 'error'
<add>##### Event: `'error'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> to push an invalid chunk of data.
<ide>
<ide> The listener callback will be passed a single `Error` object.
<ide>
<del>##### Event: 'pause'
<add>##### Event: `'pause'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide>
<ide> The `'pause'` event is emitted when [`stream.pause()`][stream-pause] is called
<ide> and `readableFlowing` is not `false`.
<ide>
<del>##### Event: 'readable'
<add>##### Event: `'readable'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> changes:
<ide> If there are `'data'` listeners when `'readable'` is removed, the stream
<ide> will start flowing, i.e. `'data'` events will be emitted without calling
<ide> `.resume()`.
<ide>
<del>##### Event: 'resume'
<add>##### Event: `'resume'`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide>
<ide> The `'resume'` event is emitted when [`stream.resume()`][stream-resume] is
<ide> called and `readableFlowing` is not `true`.
<ide>
<del>##### readable.destroy(\[error\])
<add>##### `readable.destroy([error])`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> will be ignored.
<ide> Implementors should not override this method, but instead implement
<ide> [`readable._destroy()`][readable-_destroy].
<ide>
<del>##### readable.destroyed
<add>##### `readable.destroyed`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> added: v8.0.0
<ide>
<ide> Is `true` after [`readable.destroy()`][readable-destroy] has been called.
<ide>
<del>##### readable.isPaused()
<add>##### `readable.isPaused()`
<ide> <!-- YAML
<ide> added: v0.11.14
<ide> -->
<ide> readable.resume();
<ide> readable.isPaused(); // === false
<ide> ```
<ide>
<del>##### readable.pause()
<add>##### `readable.pause()`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> readable.on('data', (chunk) => {
<ide> The `readable.pause()` method has no effect if there is a `'readable'`
<ide> event listener.
<ide>
<del>##### readable.pipe(destination\[, options\])
<add>##### `readable.pipe(destination[, options])`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> to prevent memory leaks.
<ide> The [`process.stderr`][] and [`process.stdout`][] `Writable` streams are never
<ide> closed until the Node.js process exits, regardless of the specified options.
<ide>
<del>##### readable.read(\[size\])
<add>##### `readable.read([size])`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> also be emitted.
<ide> Calling [`stream.read([size])`][stream-read] after the [`'end'`][] event has
<ide> been emitted will return `null`. No runtime error will be raised.
<ide>
<del>##### readable.readable
<add>##### `readable.readable`
<ide> <!-- YAML
<ide> added: v11.4.0
<ide> -->
<ide> added: v11.4.0
<ide>
<ide> Is `true` if it is safe to call [`readable.read()`][stream-read].
<ide>
<del>##### readable.readableEncoding
<add>##### `readable.readableEncoding`
<ide> <!-- YAML
<ide> added: v12.7.0
<ide> -->
<ide> added: v12.7.0
<ide> Getter for the property `encoding` of a given `Readable` stream. The `encoding`
<ide> property can be set using the [`readable.setEncoding()`][] method.
<ide>
<del>##### readable.readableEnded
<add>##### `readable.readableEnded`
<ide> <!-- YAML
<ide> added: v12.9.0
<ide> -->
<ide> added: v12.9.0
<ide>
<ide> Becomes `true` when [`'end'`][] event is emitted.
<ide>
<del>##### readable.readableFlowing
<add>##### `readable.readableFlowing`
<ide> <!-- YAML
<ide> added: v9.4.0
<ide> -->
<ide> added: v9.4.0
<ide> This property reflects the current state of a `Readable` stream as described
<ide> in the [Stream Three States][] section.
<ide>
<del>##### readable.readableHighWaterMark
<add>##### `readable.readableHighWaterMark`
<ide> <!-- YAML
<ide> added: v9.3.0
<ide> -->
<ide> added: v9.3.0
<ide> Returns the value of `highWaterMark` passed when constructing this
<ide> `Readable`.
<ide>
<del>##### readable.readableLength
<add>##### `readable.readableLength`
<ide> <!-- YAML
<ide> added: v9.4.0
<ide> -->
<ide> This property contains the number of bytes (or objects) in the queue
<ide> ready to be read. The value provides introspection data regarding
<ide> the status of the `highWaterMark`.
<ide>
<del>##### readable.readableObjectMode
<add>##### `readable.readableObjectMode`
<ide> <!-- YAML
<ide> added: v12.3.0
<ide> -->
<ide> added: v12.3.0
<ide>
<ide> Getter for the property `objectMode` of a given `Readable` stream.
<ide>
<del>##### readable.resume()
<add>##### `readable.resume()`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> changes:
<ide> getReadableStreamSomehow()
<ide> The `readable.resume()` method has no effect if there is a `'readable'`
<ide> event listener.
<ide>
<del>##### readable.setEncoding(encoding)
<add>##### `readable.setEncoding(encoding)`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> readable.on('data', (chunk) => {
<ide> });
<ide> ```
<ide>
<del>##### readable.unpipe(\[destination\])
<add>##### `readable.unpipe([destination])`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> setTimeout(() => {
<ide> }, 1000);
<ide> ```
<ide>
<del>##### readable.unshift(chunk\[, encoding\])
<add>##### `readable.unshift(chunk[, encoding])`
<ide> <!-- YAML
<ide> added: v0.9.11
<ide> changes:
<ide> custom stream). Following the call to `readable.unshift()` with an immediate
<ide> however it is best to simply avoid calling `readable.unshift()` while in the
<ide> process of performing a read.
<ide>
<del>##### readable.wrap(stream)
<add>##### `readable.wrap(stream)`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> myReader.on('readable', () => {
<ide> });
<ide> ```
<ide>
<del>##### readable\[Symbol.asyncIterator\]()
<add>##### `readable[Symbol.asyncIterator]()`
<ide> <!-- YAML
<ide> added: v10.0.0
<ide> changes:
<ide> has less then 64kb of data because no `highWaterMark` option is provided to
<ide>
<ide> ### Duplex and Transform Streams
<ide>
<del>#### Class: stream.Duplex
<add>#### Class: `stream.Duplex`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> changes:
<ide> Examples of `Duplex` streams include:
<ide> * [zlib streams][zlib]
<ide> * [crypto streams][crypto]
<ide>
<del>#### Class: stream.Transform
<add>#### Class: `stream.Transform`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> Examples of `Transform` streams include:
<ide> * [zlib streams][zlib]
<ide> * [crypto streams][crypto]
<ide>
<del>##### transform.destroy(\[error\])
<add>##### `transform.destroy([error])`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> Implementors should not override this method, but instead implement
<ide> The default implementation of `_destroy()` for `Transform` also emit `'close'`
<ide> unless `emitClose` is set in false.
<ide>
<del>### stream.finished(stream\[, options\], callback)
<add>### `stream.finished(stream[, options], callback)`
<ide> <!-- YAML
<ide> added: v10.0.0
<ide> -->
<ide> const cleanup = finished(rs, (err) => {
<ide> });
<ide> ```
<ide>
<del>### stream.pipeline(...streams, callback)
<add>### `stream.pipeline(...streams, callback)`
<ide> <!-- YAML
<ide> added: v10.0.0
<ide> -->
<ide> run().catch(console.error);
<ide> after the `callback` has been invoked. In the case of reuse of streams after
<ide> failure, this can cause event listener leaks and swallowed errors.
<ide>
<del>### stream.Readable.from(iterable, \[options\])
<add>### `stream.Readable.from(iterable, [options])`
<ide> <!-- YAML
<ide> added: v12.3.0
<ide> -->
<ide> Custom `Writable` streams *must* call the `new stream.Writable([options])`
<ide> constructor and implement the `writable._write()` and/or `writable._writev()`
<ide> method.
<ide>
<del>#### Constructor: new stream.Writable(\[options\])
<add>#### Constructor: `new stream.Writable([options])`
<ide> <!-- YAML
<ide> changes:
<ide> - version: v10.0.0
<ide> const myWritable = new Writable({
<ide> });
<ide> ```
<ide>
<del>#### writable.\_write(chunk, encoding, callback)
<add>#### `writable._write(chunk, encoding, callback)`
<ide> <!-- YAML
<ide> changes:
<ide> - version: v12.11.0
<ide> The `writable._write()` method is prefixed with an underscore because it is
<ide> internal to the class that defines it, and should never be called directly by
<ide> user programs.
<ide>
<del>#### writable.\_writev(chunks, callback)
<add>#### `writable._writev(chunks, callback)`
<ide>
<ide> * `chunks` {Object[]} The chunks to be written. Each chunk has following
<ide> format: `{ chunk: ..., encoding: ... }`.
<ide> The `writable._writev()` method is prefixed with an underscore because it is
<ide> internal to the class that defines it, and should never be called directly by
<ide> user programs.
<ide>
<del>#### writable.\_destroy(err, callback)
<add>#### `writable._destroy(err, callback)`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> added: v8.0.0
<ide> The `_destroy()` method is called by [`writable.destroy()`][writable-destroy].
<ide> It can be overridden by child classes but it **must not** be called directly.
<ide>
<del>#### writable.\_final(callback)
<add>#### `writable._final(callback)`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> The `stream.Readable` class is extended to implement a [`Readable`][] stream.
<ide> Custom `Readable` streams *must* call the `new stream.Readable([options])`
<ide> constructor and implement the `readable._read()` method.
<ide>
<del>#### new stream.Readable(\[options\])
<add>#### `new stream.Readable([options])`
<ide> <!-- YAML
<ide> changes:
<ide> - version: v11.2.0
<ide> const myReadable = new Readable({
<ide> });
<ide> ```
<ide>
<del>#### readable.\_read(size)
<add>#### `readable._read(size)`
<ide> <!-- YAML
<ide> added: v0.9.4
<ide> -->
<ide> The `readable._read()` method is prefixed with an underscore because it is
<ide> internal to the class that defines it, and should never be called directly by
<ide> user programs.
<ide>
<del>#### readable.\_destroy(err, callback)
<add>#### `readable._destroy(err, callback)`
<ide> <!-- YAML
<ide> added: v8.0.0
<ide> -->
<ide> added: v8.0.0
<ide> The `_destroy()` method is called by [`readable.destroy()`][readable-destroy].
<ide> It can be overridden by child classes but it **must not** be called directly.
<ide>
<del>#### readable.push(chunk\[, encoding\])
<add>#### `readable.push(chunk[, encoding])`
<ide> <!-- YAML
<ide> changes:
<ide> - version: v8.0.0
<ide> Custom `Duplex` streams *must* call the `new stream.Duplex([options])`
<ide> constructor and implement *both* the `readable._read()` and
<ide> `writable._write()` methods.
<ide>
<del>#### new stream.Duplex(options)
<add>#### `new stream.Duplex(options)`
<ide> <!-- YAML
<ide> changes:
<ide> - version: v8.4.0
<ide> Care must be taken when using `Transform` streams in that data written to the
<ide> stream can cause the `Writable` side of the stream to become paused if the
<ide> output on the `Readable` side is not consumed.
<ide>
<del>#### new stream.Transform(\[options\])
<add>#### `new stream.Transform([options])`
<ide>
<ide> * `options` {Object} Passed to both `Writable` and `Readable`
<ide> constructors. Also has the following fields:
<ide> const myTransform = new Transform({
<ide> });
<ide> ```
<ide>
<del>#### Events: 'finish' and 'end'
<add>#### Events: `'finish'` and `'end'`
<ide>
<ide> The [`'finish'`][] and [`'end'`][] events are from the `stream.Writable`
<ide> and `stream.Readable` classes, respectively. The `'finish'` event is emitted
<ide> after all data has been output, which occurs after the callback in
<ide> [`transform._flush()`][stream-_flush] has been called. In the case of an error,
<ide> neither `'finish'` nor `'end'` should be emitted.
<ide>
<del>#### transform.\_flush(callback)
<add>#### `transform._flush(callback)`
<ide>
<ide> * `callback` {Function} A callback function (optionally with an error
<ide> argument and data) to be called when remaining data has been flushed.
<ide> The `transform._flush()` method is prefixed with an underscore because it is
<ide> internal to the class that defines it, and should never be called directly by
<ide> user programs.
<ide>
<del>#### transform.\_transform(chunk, encoding, callback)
<add>#### `transform._transform(chunk, encoding, callback)`
<ide>
<ide> * `chunk` {Buffer|string|any} The `Buffer` to be transformed, converted from
<ide> the `string` passed to [`stream.write()`][stream-write]. If the stream's
<ide> user programs.
<ide> queue mechanism, and to receive the next chunk, `callback` must be
<ide> called, either synchronously or asynchronously.
<ide>
<del>#### Class: stream.PassThrough
<add>#### Class: `stream.PassThrough`
<ide>
<ide> The `stream.PassThrough` class is a trivial implementation of a [`Transform`][]
<ide> stream that simply passes the input bytes across to the output. Its purpose is | 1 |
Javascript | Javascript | fix travis ci issues with resize-manager tests | 3744df168009f064f4056f766edd08d29bae7bb9 | <ide><path>src/js/resize-manager.js
<ide> class ResizeManager extends Component {
<ide> this.resizeObserver_ = null;
<ide> this.debouncedHandler_ = debounce(() => {
<ide> this.resizeHandler();
<del> }, 100, false, player);
<add> }, 100, false, this);
<ide>
<ide> if (RESIZE_OBSERVER_AVAILABLE) {
<ide> this.resizeObserver_ = new this.ResizeObserver(this.debouncedHandler_);
<ide> this.resizeObserver_.observe(player.el());
<ide>
<ide> } else {
<ide> this.loadListener_ = () => {
<del> if (this.el_.contentWindow) {
<del> Events.on(this.el_.contentWindow, 'resize', this.debouncedHandler_);
<add> if (!this.el_ || this.el_.contentWindow) {
<add> return;
<ide> }
<del> this.off('load', this.loadListener_);
<add>
<add> Events.on(this.el_.contentWindow, 'resize', this.debouncedHandler_);
<ide> };
<ide>
<del> this.on('load', this.loadListener_);
<add> this.one('load', this.loadListener_);
<ide> }
<ide> }
<ide>
<ide> class ResizeManager extends Component {
<ide> * @event Player#playerresize
<ide> * @type {EventTarget~Event}
<ide> */
<add> // make sure player is still around to trigger
<add> // prevents this from causing an error after dispose
<add> if (!this.player_ || !this.player_.trigger) {
<add> return;
<add> }
<add>
<ide> this.player_.trigger('playerresize');
<ide> }
<ide>
<ide> dispose() {
<add> if (this.debouncedHandler_) {
<add> this.debouncedHandler_.cancel();
<add> }
<add>
<ide> if (this.resizeObserver_) {
<ide> if (this.player_.el()) {
<ide> this.resizeObserver_.unobserve(this.player_.el());
<ide> class ResizeManager extends Component {
<ide> this.off('load', this.loadListener_);
<ide> }
<ide>
<del> if (this.debouncedHandler_) {
<del> this.debouncedHandler_.cancel();
<del> }
<del>
<ide> this.ResizeObserver = null;
<ide> this.resizeObserver = null;
<ide> this.debouncedHandler_ = null;
<ide> this.loadListener_ = null;
<del> super.dispose();
<ide> }
<ide>
<ide> } | 1 |
Python | Python | add service_account to google ml engine operator | 2d854c3505ccad66e9a7d94267e51bed800433c2 | <ide><path>airflow/providers/google/cloud/operators/mlengine.py
<ide> class MLEngineStartTrainingJobOperator(BaseOperator):
<ide> :param job_dir: A Google Cloud Storage path in which to store training
<ide> outputs and other data needed for training. (templated)
<ide> :type job_dir: str
<add> :param service_account: Optional service account to use when running the training application.
<add> (templated)
<add> The specified service account must have the `iam.serviceAccounts.actAs` role. The
<add> Google-managed Cloud ML Engine service account must have the `iam.serviceAccountAdmin` role
<add> for the specified service account.
<add> If set to None or missing, the Google-managed Cloud ML Engine service account will be used.
<add> :type service_account: str
<ide> :param project_id: The Google Cloud project name within which MLEngine training job should run.
<ide> If set to None or missing, the default project_id from the Google Cloud connection is used.
<ide> (templated)
<ide> class MLEngineStartTrainingJobOperator(BaseOperator):
<ide> '_runtime_version',
<ide> '_python_version',
<ide> '_job_dir',
<add> '_service_account',
<ide> '_impersonation_chain',
<ide> ]
<ide>
<ide> def __init__(
<ide> runtime_version: Optional[str] = None,
<ide> python_version: Optional[str] = None,
<ide> job_dir: Optional[str] = None,
<add> service_account: Optional[str] = None,
<ide> project_id: Optional[str] = None,
<ide> gcp_conn_id: str = 'google_cloud_default',
<ide> delegate_to: Optional[str] = None,
<ide> def __init__(
<ide> self._runtime_version = runtime_version
<ide> self._python_version = python_version
<ide> self._job_dir = job_dir
<add> self._service_account = service_account
<ide> self._gcp_conn_id = gcp_conn_id
<ide> self._delegate_to = delegate_to
<ide> self._mode = mode
<ide> def execute(self, context):
<ide> if self._job_dir:
<ide> training_request['trainingInput']['jobDir'] = self._job_dir
<ide>
<add> if self._service_account:
<add> training_request['trainingInput']['serviceAccount'] = self._service_account
<add>
<ide> if self._scale_tier is not None and self._scale_tier.upper() == "CUSTOM":
<ide> training_request['trainingInput']['masterType'] = self._master_type
<ide>
<ide><path>tests/providers/google/cloud/operators/test_mlengine.py
<ide> def test_success_create_training_job_with_optional_args(self, mock_hook):
<ide> training_input['trainingInput']['runtimeVersion'] = '1.6'
<ide> training_input['trainingInput']['pythonVersion'] = '3.5'
<ide> training_input['trainingInput']['jobDir'] = 'gs://some-bucket/jobs/test_training'
<add> training_input['trainingInput']['serviceAccount'] = '[email protected]'
<ide>
<ide> success_response = self.TRAINING_INPUT.copy()
<ide> success_response['state'] = 'SUCCEEDED'
<ide> def test_success_create_training_job_with_optional_args(self, mock_hook):
<ide> runtime_version='1.6',
<ide> python_version='3.5',
<ide> job_dir='gs://some-bucket/jobs/test_training',
<add> service_account='[email protected]',
<ide> **self.TRAINING_DEFAULT_ARGS,
<ide> )
<ide> training_op.execute(MagicMock()) | 2 |
Python | Python | fix typo in docs for error_handler_spec | c021e58775eaf8e2dc7a91c903dc1c95dc18e829 | <ide><path>flask/app.py
<ide> def __init__(self, import_name, static_path=None, static_url_path=None,
<ide> #: A dictionary of all registered error handlers. The key is `None`
<ide> #: for error handlers active on the application, otherwise the key is
<ide> #: the name of the blueprint. Each key points to another dictionary
<del> #: where they key is the status code of the http exception. The
<add> #: where the key is the status code of the http exception. The
<ide> #: special key `None` points to a list of tuples where the first item
<ide> #: is the class for the instance check and the second the error handler
<ide> #: function. | 1 |
PHP | PHP | fix minor spelling error | 8c7d2dc9842f72344cd44dd75e45e3a921e50046 | <ide><path>src/Illuminate/Database/Eloquent/Relations/BelongsToMany.php
<ide> public function getResults()
<ide> public function get($columns = ['*'])
<ide> {
<ide> // First we'll add the proper select columns onto the query so it is run with
<del> // the proper columns. Then, we will get the results and hydrate out pivot
<add> // the proper columns. Then, we will get the results and hydrate our pivot
<ide> // models with the result of those columns as a separate model relation.
<ide> $builder = $this->query->applyScopes();
<ide> | 1 |
Go | Go | remove bash dependency from testrunsetmacaddress | c1a54048e241bd565feabf274b79dfac61c396d4 | <ide><path>integration-cli/docker_cli_run_test.go
<ide> func TestRunNetworkNotInitializedNoneMode(t *testing.T) {
<ide>
<ide> func TestRunSetMacAddress(t *testing.T) {
<ide> mac := "12:34:56:78:9a:bc"
<del> cmd := exec.Command("/bin/bash", "-c", dockerBinary+` run -i --rm --mac-address=`+mac+` busybox /bin/sh -c "ip link show eth0 | tail -1 | awk '{ print \$2 }'"`)
<del> out, _, err := runCommandWithOutput(cmd)
<add>
<add> defer deleteAllContainers()
<add> cmd := exec.Command(dockerBinary, "run", "-i", "--rm", fmt.Sprintf("--mac-address=%s", mac), "busybox", "/bin/sh", "-c", "ip link show eth0 | tail -1 | awk '{print $2}'")
<add> out, ec, err := runCommandWithOutput(cmd)
<ide> if err != nil {
<del> t.Fatal(err)
<add> t.Fatalf("exec failed:\nexit code=%v\noutput=%s", ec, out)
<ide> }
<ide> actualMac := strings.TrimSpace(out)
<ide> if actualMac != mac {
<ide> t.Fatalf("Set MAC address with --mac-address failed. The container has an incorrect MAC address: %q, expected: %q", actualMac, mac)
<ide> }
<ide>
<del> deleteAllContainers()
<ide> logDone("run - setting MAC address with --mac-address")
<ide> }
<ide> | 1 |
Text | Text | add docs for casks that use git urls | 237a4181c08de37d87966a68f3f7aeaf9ce9c414 | <ide><path>docs/Cask-Cookbook.md
<ide> In rare cases, a distribution may not be available over ordinary HTTP/S. Subvers
<ide> | `revision:` | a string identifying the subversion revision to download
<ide> | `trust_cert:` | set to `true` to automatically trust the certificate presented by the server (avoiding an interactive prompt)
<ide>
<add>#### Git URLs
<add>
<add>Artifacts also may be distributed via git repositories. URLs that end in `.git` are automatically assumed to be git repositories, and the following key/value pairs may be appended to `url`:
<add>
<add>| key | value |
<add>| ------------------ | ----------- |
<add>| `using:` | the symbol `:git` is the only legal value
<add>| `tag:` | a string identifying the git tag to download
<add>| `revision:` | a string identifying the git revision to download
<add>| `branch:` | a string identifying the git branch to download
<add>| `only_path:` | a path within the repository to limit the checkout to. If only a single directory of a large repository is required, using this option can signficantly speed up downloads. If provided, artifact paths are relative to this path.
<add>
<ide> #### SourceForge/OSDN URLs
<ide>
<ide> SourceForge and OSDN (formerly `SourceForge.JP`) projects are common ways to distribute binaries, but they provide many different styles of URLs to get to the goods. | 1 |
Javascript | Javascript | expose manifest.js.map during development | 4757836a67c58e2dfe2d1dce645e69399dc92b5b | <ide><path>server/index.js
<ide> export default class Server {
<ide> await this.serveStatic(req, res, p)
<ide> },
<ide>
<add> '/_next/:buildId/manifest.js.map': async (req, res, params) => {
<add> if (!this.dev) return this.send404(res)
<add>
<add> this.handleBuildId(params.buildId, res)
<add> const p = join(this.dir, this.dist, 'manifest.js.map')
<add> await this.serveStatic(req, res, p)
<add> },
<add>
<ide> '/_next/:buildId/main.js': async (req, res, params) => {
<ide> if (this.dev) {
<ide> this.handleBuildId(params.buildId, res) | 1 |
PHP | PHP | fix more strict errors | a7fcb0a61c84a08567beded1e1c329859d97385f | <ide><path>lib/Cake/Core/App.php
<ide> protected static function _loadClass($name, $plugin, $type, $originalType, $pare
<ide> * @param boolean $return whether this function should return the contents of the file after being parsed by php or just a success notice
<ide> * @return mixed if $return contents of the file after php parses it, boolean indicating success otherwise
<ide> */
<del> protected function _loadFile($name, $plugin, $search, $file, $return) {
<add> protected static function _loadFile($name, $plugin, $search, $file, $return) {
<ide> $mapped = self::_mapped($name, $plugin);
<ide> if ($mapped) {
<ide> $file = $mapped;
<ide><path>lib/Cake/Test/Case/Core/ObjectTest.php
<ide> public function testRequestActionPlugins() {
<ide> App::build(array(
<ide> 'plugins' => array(CAKE . 'Test' . DS . 'test_app' . DS . 'Plugin' . DS),
<ide> ), true);
<del> CakePlugin::loadAll();
<add> CakePlugin::load('TestPlugin');
<ide> Router::reload();
<ide>
<ide> $result = $this->object->requestAction('/test_plugin/tests/index', array('return'));
<ide><path>lib/Cake/Test/Case/Network/CakeResponseTest.php
<ide> public function testCache() {
<ide> }
<ide>
<ide> /**
<del>* Tests the compress method
<del>*
<del>*/
<add> * Tests the compress method
<add> *
<add> * @return void
<add> */
<ide> public function testCompress() {
<del> $this->skipIf(php_sapi_name() !== 'cli', 'The response compression can only be tested in cli.');
<add> if (php_sapi_name() !== 'cli') {
<add> $this->markTestSkipped('The response compression can only be tested in cli.');
<add> }
<ide>
<ide> $response = new CakeResponse();
<ide> if (ini_get("zlib.output_compression") === '1' || !extension_loaded("zlib")) {
<ide><path>lib/Cake/Test/test_app/Plugin/TestPlugin/Model/Datasource/TestSource.php
<ide> public function describe($model) {
<ide> return compact('model');
<ide> }
<ide>
<del> public function listSources() {
<add> public function listSources($data = null) {
<ide> return array('test_source');
<ide> }
<ide>
<del> public function create($model, $fields = array(), $values = array()) {
<add> public function create(Model $model, $fields = array(), $values = array()) {
<ide> return compact('model', 'fields', 'values');
<ide> }
<ide>
<del> public function read($model, $queryData = array()) {
<add> public function read(Model $model, $queryData = array()) {
<ide> return compact('model', 'queryData');
<ide> }
<ide>
<del> public function update($model, $fields = array(), $values = array()) {
<add> public function update(Model $model, $fields = array(), $values = array()) {
<ide> return compact('model', 'fields', 'values');
<ide> }
<ide>
<del> public function delete($model, $id) {
<add> public function delete(Model $model, $id = null) {
<ide> return compact('model', 'id');
<ide> }
<ide> } | 4 |
PHP | PHP | apply fixes from styleci | c7278f172ecd0447f6f6ce21516bd48273d7c9b7 | <ide><path>src/Illuminate/View/Concerns/ManagesLoops.php
<ide> trait ManagesLoops
<ide> public function addLoop($data)
<ide> {
<ide> $length = is_countable($data) && ! $data instanceof LazyCollection
<del> ? count($data)
<add> ? count($data)
<ide> : null;
<ide>
<ide> $parent = Arr::last($this->loopsStack); | 1 |
Text | Text | remove redundant items from 1.4.3 changes | 528cedaa0cff894c6123526ef511b06f52b2f2f9 | <ide><path>CHANGELOG.md
<ide> - **$animateCss:** ensure animations execute if only a keyframeStyle is provided
<ide> ([97d79eec](https://github.com/angular/angular.js/commit/97d79eec80092f5fae3336c23aa881a72436de55),
<ide> [#12124](https://github.com/angular/angular.js/issues/12124), [#12340](https://github.com/angular/angular.js/issues/12340))
<del>- **$browser:** prevent infinite digest if changing hash when there is no hashPrefix
<del> ([f81ff3be](https://github.com/angular/angular.js/commit/f81ff3beb0c9d19d494c5878086fb57476442b8b),
<del> [#10423](https://github.com/angular/angular.js/issues/10423), [#12145](https://github.com/angular/angular.js/issues/12145))
<del>- **$compile:**
<del> - throw error when requestng new and isolate scopes (async)
<del> ([6333d65b](https://github.com/angular/angular.js/commit/6333d65b76e0796cfbab8a2953af0c8014dba2e1),
<del> [#12215](https://github.com/angular/angular.js/issues/12215), [#12217](https://github.com/angular/angular.js/issues/12217))
<del> - do not write @-bound properties if attribute is not present
<del> ([8a1eb162](https://github.com/angular/angular.js/commit/8a1eb1625c080445ce1e519762e1f2d4fd842b72),
<del> [#12151](https://github.com/angular/angular.js/issues/12151), [#12144](https://github.com/angular/angular.js/issues/12144))
<del> - workaround for IE11 MutationObserver
<del> ([f3b1d0b7](https://github.com/angular/angular.js/commit/f3b1d0b723298a5f8ea21d0704405649cce1b5fc),
<del> [#11781](https://github.com/angular/angular.js/issues/11781))
<del> - exception when using "watch" as isolated scope binding variable in Firefox
<del> ([a6339d30](https://github.com/angular/angular.js/commit/a6339d30d1379689da5eec9647a953f64821f8b0),
<del> [#11627](https://github.com/angular/angular.js/issues/11627))
<del>- **$location:**
<del> - allow navigating outside the original base URL
<del> ([6903b5ec](https://github.com/angular/angular.js/commit/6903b5ec4c04ed6b7c80ef7d638c48639ccdc4bb),
<del> [#11302](https://github.com/angular/angular.js/issues/11302), [#4776](https://github.com/angular/angular.js/issues/4776))
<del> - do not get caught in infinite digest in IE9
<del> ([91b60226](https://github.com/angular/angular.js/commit/91b602263b96b6fce1331208462e18eb647f4d60),
<del> [#11439](https://github.com/angular/angular.js/issues/11439), [#11675](https://github.com/angular/angular.js/issues/11675), [#11935](https://github.com/angular/angular.js/issues/11935), [#12083](https://github.com/angular/angular.js/issues/12083))
<del>- **$parse:**
<del> - set null reference properties to `undefined`
<del> ([71fc3f4f](https://github.com/angular/angular.js/commit/71fc3f4fa0cd12eff335d57efed7c033554749f4),
<del> [#12099](https://github.com/angular/angular.js/issues/12099))
<del> - set null reference properties to `undefined`
<del> ([d19504a1](https://github.com/angular/angular.js/commit/d19504a179355d7801d59a8db0285a1322e04601),
<del> [#11959](https://github.com/angular/angular.js/issues/11959))
<del>- **$sanitize:** dont not remove tab index property
<del> ([799353c7](https://github.com/angular/angular.js/commit/799353c75de28e6fbf52dac6e0721e85b578575a),
<del> [#8371](https://github.com/angular/angular.js/issues/8371), [#5853](https://github.com/angular/angular.js/issues/5853))
<del>- **compile:** assign ctrl return values correctly for multiple directives
<del> ([8caf1802](https://github.com/angular/angular.js/commit/8caf1802e0e93389dec626ef35e04a302aa6c39d),
<del> [#12029](https://github.com/angular/angular.js/issues/12029), [#12036](https://github.com/angular/angular.js/issues/12036))
<del>- **copy:** do not copy the same object twice
<del> ([0e622f7b](https://github.com/angular/angular.js/commit/0e622f7b5bc3d5d0ab0fbc1a1bc69404bd7216d5))
<del>- **forms:** parse exponential notation in numberInputType parser
<del> ([ebd0fbba](https://github.com/angular/angular.js/commit/ebd0fbba8ff90bee0cd016d574643d56a7f81ed0),
<del> [#12121](https://github.com/angular/angular.js/issues/12121), [#12122](https://github.com/angular/angular.js/issues/12122))
<del>- **linky:** allow case insensitive scheme detection
<del> ([8dc09e6d](https://github.com/angular/angular.js/commit/8dc09e6dabb84c2c611cdc9e40adfac989648200),
<del> [#12073](https://github.com/angular/angular.js/issues/12073), [#12073](https://github.com/angular/angular.js/issues/12073))
<ide> - **loader:** define isFunction
<ide> ([9ea52d81](https://github.com/angular/angular.js/commit/9ea52d818bcd2fb3ea8ccc85bf47f9fd5af68843))
<del>- **merge:** treat dates as atomic values instead of objects.
<del> ([6cbbd966](https://github.com/angular/angular.js/commit/6cbbd966479448591f819cbf904e0a3b757613dc),
<del> [#11720](https://github.com/angular/angular.js/issues/11720), [#11720](https://github.com/angular/angular.js/issues/11720))
<ide> - **ngAnimate:** ensure that orphaned elements do not throw errors when animated
<ide> ([e4aeae0c](https://github.com/angular/angular.js/commit/e4aeae0c7303b94135e6df20e6c5e25f2aa0f586),
<ide> [#11975](https://github.com/angular/angular.js/issues/11975), [#12338](https://github.com/angular/angular.js/issues/12338))
<del>- **ngAria:**
<del> - update `aria-valuemin/max` when `min/max` change
<del> ([ebaa0f59](https://github.com/angular/angular.js/commit/ebaa0f598501702ae64d59ada0ae492eaf0e2db6),
<del> [#11770](https://github.com/angular/angular.js/issues/11770), [#11774](https://github.com/angular/angular.js/issues/11774))
<del> - ensure boolean values for aria-hidden and aria-disabled
<del> ([59273354](https://github.com/angular/angular.js/commit/59273354b57dd8d1ad2cd2f4740ffa8923e480f9),
<del> [#11365](https://github.com/angular/angular.js/issues/11365))
<del>- **ngModel:** form validation when there is an Object.prototype enumerable value
<del> ([0934b76b](https://github.com/angular/angular.js/commit/0934b76b72cec86093414834ac4cb7f0946b651d),
<del> [#12066](https://github.com/angular/angular.js/issues/12066))
<del>- **ngOptions:**
<del> - only watch numeric properties of an array
<del> ([14638f4a](https://github.com/angular/angular.js/commit/14638f4a60053b085565e597fc74bd31cf0d372b))
<del> - do not watch properties starting with $
<del> ([34a6da24](https://github.com/angular/angular.js/commit/34a6da24c17356d4ffc70aec3f621a140a9a61ab),
<del> [#11930](https://github.com/angular/angular.js/issues/11930), [#12010](https://github.com/angular/angular.js/issues/12010))
<del> - use reference check only when not using trackBy
<del> ([d7dc14dc](https://github.com/angular/angular.js/commit/d7dc14dc0cdeb9c187d227e19acc8aca7df9d740),
<del> [#11936](https://github.com/angular/angular.js/issues/11936), [#11996](https://github.com/angular/angular.js/issues/11996))
<del>- **orderBy:** ensure correct ordering with arrays of objects and no predicate
<del> ([48e1f560](https://github.com/angular/angular.js/commit/48e1f5605edd32a63318fd78f5165c7d1f1a20f9),
<del> [#11866](https://github.com/angular/angular.js/issues/11866), [#11312](https://github.com/angular/angular.js/issues/11312), [#4282](https://github.com/angular/angular.js/issues/4282))
<del>
<del>
<del>## Features
<del>
<del>- **$compile:** show module name during multidir error
<del> ([351fe4b7](https://github.com/angular/angular.js/commit/351fe4b79c50a45a11af2fcd2aa7b6fd3b70058d),
<del> [#11775](https://github.com/angular/angular.js/issues/11775))
<del>- **$q:** $q.resolve as an alias for $q.when
<del> ([3ef52980](https://github.com/angular/angular.js/commit/3ef529806fef28b41ca4af86a330f39a95699cf6),
<del> [#11944](https://github.com/angular/angular.js/issues/11944), [#11987](https://github.com/angular/angular.js/issues/11987))
<del>- **ngAria:** add option to disable role=button
<del> ([1f5e42e8](https://github.com/angular/angular.js/commit/1f5e42e8821217026ef36a46d36f84d7cd32830a),
<del> [#11580](https://github.com/angular/angular.js/issues/11580), [#12234](https://github.com/angular/angular.js/issues/12234))
<del>
<del>
<del>## Performance Improvements
<del>
<del>- **$compile:** avoid jquery data calls when there is no data
<del> ([9efb0d5e](https://github.com/angular/angular.js/commit/9efb0d5ee961b57c8fc144a3138a15955e4010e2))
<ide>
<ide>
<ide> | 1 |
Python | Python | add the oauth2authentication class | da9d7fb8ec19f289d9d2777738a45007c41a1289 | <ide><path>rest_framework/authentication.py
<ide> from django.utils.encoding import DjangoUnicodeDecodeError
<ide> from rest_framework import exceptions, HTTP_HEADER_ENCODING
<ide> from rest_framework.compat import CsrfViewMiddleware
<add>from rest_framework.compat import oauth2_provider, oauth2
<ide> from rest_framework.authtoken.models import Token
<ide> import base64
<ide>
<ide> def authenticate_header(self, request):
<ide> return 'Token'
<ide>
<ide>
<del># TODO: OAuthAuthentication
<add>class OAuth2Authentication(BaseAuthentication):
<add> """
<add> OAuth 2 authentication backend using `django-oauth2-provider`
<add> """
<add> require_active = True
<add>
<add> def __init__(self, **kwargs):
<add> super(OAuth2Authentication, self).__init__(**kwargs)
<add> if oauth2_provider is None:
<add> raise ImproperlyConfigured("The 'django-oauth2-provider' package could not be imported. It is required for use with the 'OAuth2Authentication' class.")
<add>
<add> def authenticate(self, request):
<add> """
<add> The Bearer type is the only finalized type
<add>
<add> Read the spec for more details
<add> http://tools.ietf.org/html/rfc6749#section-7.1
<add> """
<add> auth = request.META.get('HTTP_AUTHORIZATION', '').split()
<add> print auth
<add> if not auth or auth[0].lower() != "bearer":
<add> return None
<add>
<add> if len(auth) != 2:
<add> raise exceptions.AuthenticationFailed('Invalid token header')
<add>
<add> return self.authenticate_credentials(request, auth[1])
<add>
<add> def authenticate_credentials(self, request, access_token):
<add> """
<add> :returns: two-tuple of (user, auth) if authentication succeeds, or None otherwise.
<add> """
<add>
<add> # authenticate the client
<add> oauth2_client_form = oauth2.forms.ClientAuthForm(request.REQUEST)
<add> if not oauth2_client_form.is_valid():
<add> raise exceptions.AuthenticationFailed("Client could not be validated")
<add> client = oauth2_client_form.cleaned_data.get('client')
<add>
<add> # retrieve the `oauth2.models.OAuth2AccessToken` instance from the access_token
<add> auth_backend = oauth2.backends.AccessTokenBackend()
<add> token = auth_backend.authenticate(access_token, client)
<add> if token is None:
<add> raise exceptions.AuthenticationFailed("Invalid token") # does not exist or is expired
<add>
<add> # TODO check scope
<add> # try:
<add> # self.validate_token(request, consumer, token)
<add> # except oauth2.Error, e:
<add> # print "got e"
<add> # raise exceptions.AuthenticationFailed(e.message)
<add>
<add> if not self.check_active(token.user):
<add> raise exceptions.AuthenticationFailed('User not active: %s' % token.user.username)
<add>
<add> if client and token:
<add> request.user = token.user
<add> return (request.user, None)
<add>
<add> raise exceptions.AuthenticationFailed(
<add> 'You are not allowed to access this resource.')
<add>
<add> return None
<add>
<add> def authenticate_header(self, request):
<add> """
<add> Bearer is the only finalized type currently
<add>
<add> Check details on the `OAuth2Authentication.authenticate` method
<add> """
<add> return 'Bearer'
<add>
<add> def check_active(self, user):
<add> """
<add> Ensures the user has an active account.
<add>
<add> Optimized for the ``django.contrib.auth.models.User`` case.
<add> """
<add> if not self.require_active:
<add> # Ignore & move on.
<add> return True
<add>
<add> return user.is_active
<ide><path>rest_framework/compat.py
<ide> def apply_markdown(text):
<ide> import defusedxml.ElementTree as etree
<ide> except ImportError:
<ide> etree = None
<add>
<add>
<add># OAuth 2 support is optional
<add>try:
<add> import provider as oauth2_provider
<add>except ImportError:
<add> oauth2_provider = None
<add>try:
<add> import provider.oauth2 as oauth2
<add>except ImportError:
<add> oauth2 = None | 2 |
Ruby | Ruby | fix coverage on travis ci | 5768e32ad20208e8c5c1029ef2c9b47abd2caa2d | <ide><path>Library/Homebrew/dev-cmd/test-bot.rb
<ide> def homebrew
<ide>
<ide> if @tap.nil?
<ide> tests_args = []
<add> tests_args_coverage = []
<ide> if RUBY_TWO
<ide> tests_args << "--official-cmd-taps"
<del> tests_args << "--coverage" if ENV["TRAVIS"]
<add> tests_args_coverage << "--coverage" if ENV["TRAVIS"]
<ide> end
<ide> test "brew", "tests", *tests_args
<ide> test "brew", "tests", "--generic", "--only=integration_cmds",
<ide> *tests_args
<del> test "brew", "tests", "--no-compat"
<add> test "brew", "tests", "--no-compat", *tests_args_coverage
<ide> test "brew", "readall", "--syntax"
<ide> # test update from origin/master to current commit.
<ide> test "brew", "update-test" | 1 |
Javascript | Javascript | add test coverage for readcontext() on the server | fe2ecd276e9b9e57a49ddf2d86ff01677b69b493 | <ide><path>packages/react-dom/src/__tests__/ReactDOMServerIntegrationNewContext-test.js
<ide> describe('ReactDOMServerIntegration', () => {
<ide> });
<ide>
<ide> describe('context', function() {
<del> let PurpleContext, RedContext, Consumer;
<add> let Context, PurpleContextProvider, RedContextProvider, Consumer;
<ide> beforeEach(() => {
<del> let Context = React.createContext('none');
<add> Context = React.createContext('none');
<ide>
<ide> class Parent extends React.Component {
<ide> render() {
<ide> describe('ReactDOMServerIntegration', () => {
<ide> }
<ide> }
<ide> Consumer = Context.Consumer;
<del> PurpleContext = props => <Parent text="purple">{props.children}</Parent>;
<del> RedContext = props => <Parent text="red">{props.children}</Parent>;
<add> PurpleContextProvider = props => (
<add> <Parent text="purple">{props.children}</Parent>
<add> );
<add> RedContextProvider = props => (
<add> <Parent text="red">{props.children}</Parent>
<add> );
<ide> });
<ide>
<ide> itRenders('class child with context', async render => {
<ide> describe('ReactDOMServerIntegration', () => {
<ide> }
<ide>
<ide> const e = await render(
<del> <PurpleContext>
<add> <PurpleContextProvider>
<ide> <ClassChildWithContext />
<del> </PurpleContext>,
<add> </PurpleContextProvider>,
<ide> );
<ide> expect(e.textContent).toBe('purple');
<ide> });
<ide> describe('ReactDOMServerIntegration', () => {
<ide> }
<ide>
<ide> const e = await render(
<del> <PurpleContext>
<add> <PurpleContextProvider>
<ide> <FunctionChildWithContext />
<del> </PurpleContext>,
<add> </PurpleContextProvider>,
<ide> );
<ide> expect(e.textContent).toBe('purple');
<ide> });
<ide> describe('ReactDOMServerIntegration', () => {
<ide> const Child = props => <Grandchild />;
<ide>
<ide> const e = await render(
<del> <PurpleContext>
<add> <PurpleContextProvider>
<ide> <Child />
<del> </PurpleContext>,
<add> </PurpleContextProvider>,
<ide> );
<ide> expect(e.textContent).toBe('purple');
<ide> });
<ide> describe('ReactDOMServerIntegration', () => {
<ide> };
<ide>
<ide> const e = await render(
<del> <PurpleContext>
<del> <RedContext>
<add> <PurpleContextProvider>
<add> <RedContextProvider>
<ide> <Grandchild />
<del> </RedContext>
<del> </PurpleContext>,
<add> </RedContextProvider>
<add> </PurpleContextProvider>,
<ide> );
<ide> expect(e.textContent).toBe('red');
<ide> });
<ide>
<add> itRenders('readContext() in different components', async render => {
<add> function readContext(Ctx, observedBits) {
<add> const dispatcher =
<add> React.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED
<add> .ReactCurrentDispatcher.current;
<add> return dispatcher.readContext(Ctx, observedBits);
<add> }
<add>
<add> class Cls extends React.Component {
<add> render() {
<add> return readContext(Context);
<add> }
<add> }
<add> function Fn() {
<add> return readContext(Context);
<add> }
<add> const Memo = React.memo(() => {
<add> return readContext(Context);
<add> });
<add> const FwdRef = React.forwardRef((props, ref) => {
<add> return readContext(Context);
<add> });
<add>
<add> const e = await render(
<add> <PurpleContextProvider>
<add> <RedContextProvider>
<add> <span>
<add> <Fn />
<add> <Cls />
<add> <Memo />
<add> <FwdRef />
<add> <Consumer>{() => readContext(Context)}</Consumer>
<add> </span>
<add> </RedContextProvider>
<add> </PurpleContextProvider>,
<add> );
<add> expect(e.textContent).toBe('redredredredred');
<add> });
<add>
<ide> itRenders('multiple contexts', async render => {
<ide> const Theme = React.createContext('dark');
<ide> const Language = React.createContext('french'); | 1 |
Text | Text | use kbd element in readline doc prose | dd43715caaffcfe2dafddd3e792cc0fb2b7b9d20 | <ide><path>doc/api/readline.md
<ide> added: v0.1.98
<ide>
<ide> The `'line'` event is emitted whenever the `input` stream receives an
<ide> end-of-line input (`\n`, `\r`, or `\r\n`). This usually occurs when the user
<del>presses the `<Enter>`, or `<Return>` keys.
<add>presses <kbd>Enter</kbd> or <kbd>Return</kbd>.
<ide>
<ide> The listener function is called with a string containing the single line of
<ide> received input. | 1 |
Javascript | Javascript | add sigint kill script | 020a25bb4f4bd6fda898fa7a6dcb819b0893cf51 | <ide><path>server/server.js
<ide> if (process.env.OPBEAT_ID) {
<ide> });
<ide> }
<ide>
<del>var _ = require('lodash'),
<del> Rx = require('rx'),
<del> loopback = require('loopback'),
<del> boot = require('loopback-boot'),
<del> expressState = require('express-state'),
<del> path = require('path'),
<del> setupPassport = require('./component-passport');
<add>const _ = require('lodash');
<add>const Rx = require('rx');
<add>const loopback = require('loopback');
<add>const boot = require('loopback-boot');
<add>const expressState = require('express-state');
<add>const path = require('path');
<add>const setupPassport = require('./component-passport');
<ide>
<ide> Rx.config.longStackSupport = process.env.NODE_DEBUG !== 'production';
<del>var app = loopback();
<del>var isBeta = !!process.env.BETA;
<add>const app = loopback();
<add>const isBeta = !!process.env.BETA;
<ide>
<ide> expressState.extend(app);
<ide> app.set('state namespace', '__fcc__');
<ide> boot(app, {
<ide>
<ide> setupPassport(app);
<ide>
<add>const { db } = app.datasources;
<add>db.on('connected', _.once(() => console.log('db connected')));
<ide> app.start = _.once(function() {
<del> app.listen(app.get('port'), function() {
<add> const server = app.listen(app.get('port'), function() {
<ide> app.emit('started');
<ide> console.log(
<ide> 'freeCodeCamp server listening on port %d in %s',
<ide> app.start = _.once(function() {
<ide> if (isBeta) {
<ide> console.log('freeCodeCamp is in beta mode');
<ide> }
<add> console.log(`connecting to db at ${db.settings.url}`);
<add> });
<add>
<add> process.on('SIGINT', () => {
<add> console.log('Shutting down server');
<add> server.close(() => {
<add> console.log('Server is closed');
<add> });
<add> console.log('closing db connection');
<add> db.disconnect()
<add> .then(() => {
<add> console.log('DB connection closed');
<add> // exit process
<add> // this may close kept alive sockets
<add> // eslint-disable-next-line no-process-exit
<add> process.exit(0);
<add> });
<ide> });
<ide> });
<ide> | 1 |
Go | Go | add lxc version to docker info in debug mode | 921c6994b1ad41c940bdb08732225b8db74b68f2 | <ide><path>api_params.go
<ide> type APIInfo struct {
<ide> Debug bool
<ide> Containers int
<ide> Images int
<del> NFd int `json:",omitempty"`
<del> NGoroutines int `json:",omitempty"`
<del> MemoryLimit bool `json:",omitempty"`
<del> SwapLimit bool `json:",omitempty"`
<add> NFd int `json:",omitempty"`
<add> NGoroutines int `json:",omitempty"`
<add> MemoryLimit bool `json:",omitempty"`
<add> SwapLimit bool `json:",omitempty"`
<add> LXCVersion string `json:",omitempty"`
<ide> }
<ide>
<ide> type APITop struct {
<ide><path>commands.go
<ide> func (cli *DockerCli) CmdInfo(args ...string) error {
<ide> fmt.Fprintf(cli.out, "Debug mode (client): %v\n", os.Getenv("DEBUG") != "")
<ide> fmt.Fprintf(cli.out, "Fds: %d\n", out.NFd)
<ide> fmt.Fprintf(cli.out, "Goroutines: %d\n", out.NGoroutines)
<add> fmt.Fprintf(cli.out, "LXC Version: %s\n", out.LXCVersion)
<ide> }
<ide> if !out.MemoryLimit {
<ide> fmt.Fprintf(cli.err, "WARNING: No memory limit support\n")
<ide><path>server.go
<ide> func (srv *Server) DockerInfo() *APIInfo {
<ide> } else {
<ide> imgcount = len(images)
<ide> }
<add> lxcVersion := ""
<add> if output, err := exec.Command("lxc-version").CombinedOutput(); err == nil {
<add> outputStr := string(output)
<add> if len(strings.SplitN(outputStr, ":", 2)) == 2 {
<add> lxcVersion = strings.TrimSpace(strings.SplitN(string(output), ":", 2)[1])
<add> }
<add> }
<add>
<ide> return &APIInfo{
<ide> Containers: len(srv.runtime.List()),
<ide> Images: imgcount,
<ide> func (srv *Server) DockerInfo() *APIInfo {
<ide> Debug: os.Getenv("DEBUG") != "",
<ide> NFd: utils.GetTotalUsedFds(),
<ide> NGoroutines: runtime.NumGoroutine(),
<add> LXCVersion: lxcVersion,
<ide> }
<ide> }
<ide> | 3 |
Ruby | Ruby | add missing file for revision | aaccd182ea3935b8fa2fd9965a0c6d57ec380a98 | <ide><path>activerecord/lib/active_record/serializers/xml_serializer.rb
<ide> module Serialization
<ide> # <id type="integer">1</id>
<ide> # <rating type="integer">1</rating>
<ide> # <name>37signals</name>
<del> # <clients>
<add> # <clients type="array">
<ide> # <client>
<ide> # <rating type="integer">1</rating>
<ide> # <name>Summit</name> | 1 |
Python | Python | expand test to locate flakiness | d3adb985d1fb97f68acc2e4b8ae54f2bfca05fda | <ide><path>tests/test_trainer.py
<ide> def test_training_arguments_are_left_untouched(self):
<ide> trainer = get_regression_trainer()
<ide> trainer.train()
<ide> args = TrainingArguments("./regression")
<del> self.assertEqual(args.to_dict(), trainer.args.to_dict())
<add> dict1, dict2 = args.to_dict(), trainer.args.to_dict()
<add> for key in dict1.keys():
<add> self.assertEqual(dict1[key], dict2[key])
<ide>
<ide> def test_reproducible_training(self):
<ide> # Checks that training worked, model trained and seed made a reproducible training. | 1 |
Javascript | Javascript | set clientopts.port property | 7a33c2cc7cb7710973d57f9f95c216fb55ef5915 | <ide><path>test/parallel/test-tls-cnnic-whitelist.js
<ide> const testCases = [
<ide> rejectUnauthorized: true,
<ide> ca: [loadPEM('fake-cnnic-root-cert')]
<ide> },
<del> errorCode: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE'
<add> errorCode: 'CERT_HAS_EXPIRED'
<ide> },
<ide> // Test 1: for the fix of node#2061
<ide> // agent6-cert.pem is signed by intermediate cert of ca3.
<ide> function runTest(tindex) {
<ide> const server = tls.createServer(tcase.serverOpts, (s) => {
<ide> s.resume();
<ide> }).listen(0, common.mustCall(function() {
<del> tcase.clientOpts = this.address().port;
<add> tcase.clientOpts.port = this.address().port;
<ide> const client = tls.connect(tcase.clientOpts);
<ide> client.on('error', common.mustCall((e) => {
<ide> assert.strictEqual(e.code, tcase.errorCode); | 1 |
Java | Java | use metadata slice | e2baf7277bdfa8b4a4da34fb0f0853413bec1a8f | <ide><path>spring-messaging/src/main/java/org/springframework/messaging/rsocket/DefaultMetadataExtractor.java
<ide> public Map<String, Object> extract(Payload payload, MimeType metadataMimeType) {
<ide> }
<ide> }
<ide> else {
<del> extractEntry(payload.metadata(), metadataMimeType.toString(), result);
<add> extractEntry(payload.metadata().slice(), metadataMimeType.toString(), result);
<ide> }
<ide> return result;
<ide> }
<ide><path>spring-messaging/src/test/java/org/springframework/messaging/rsocket/DefaultMetadataExtractorTests.java
<ide> */
<ide> package org.springframework.messaging.rsocket;
<ide>
<add>import java.nio.charset.StandardCharsets;
<ide> import java.time.Duration;
<ide> import java.util.Collections;
<ide> import java.util.Map;
<ide> import org.junit.jupiter.api.BeforeEach;
<ide> import org.junit.jupiter.api.Test;
<ide>
<add>import org.springframework.core.ResolvableType;
<add>import org.springframework.core.codec.AbstractDataBufferDecoder;
<ide> import org.springframework.core.codec.ByteArrayDecoder;
<ide> import org.springframework.core.codec.StringDecoder;
<ide> import org.springframework.core.io.buffer.DataBuffer;
<ide> import org.springframework.core.io.buffer.DataBufferFactory;
<add>import org.springframework.core.io.buffer.DataBufferUtils;
<add>import org.springframework.lang.Nullable;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.MimeType;
<ide> import org.springframework.util.MimeTypeUtils;
<ide> public void routeWithCustomFormatting() {
<ide> .containsEntry("entry1", "text data");
<ide> }
<ide>
<add> @Test
<add> public void nonCompositeMetadataCanBeReadTwice() {
<add> DefaultMetadataExtractor extractor = new DefaultMetadataExtractor(new TestDecoder());
<add> extractor.metadataToExtract(TEXT_PLAIN, String.class, "name");
<add>
<add> MetadataEncoder encoder = new MetadataEncoder(TEXT_PLAIN, this.strategies).metadata("value", null);
<add> DataBuffer metadata = encoder.encode();
<add> Payload payload = createPayload(metadata);
<add>
<add> Map<String, Object> result = extractor.extract(payload, TEXT_PLAIN);
<add> assertThat(result).hasSize(1).containsEntry("name", "value");
<add>
<add> result = extractor.extract(payload, TEXT_PLAIN);
<add> assertThat(result).hasSize(1).containsEntry("name", "value");
<add>
<add> payload.release();
<add> }
<add>
<ide> @Test
<ide> public void noDecoder() {
<ide> DefaultMetadataExtractor extractor =
<ide> private Payload createPayload(DataBuffer metadata) {
<ide> return PayloadUtils.createPayload(this.strategies.dataBufferFactory().allocateBuffer(), metadata);
<ide> }
<ide>
<add>
<add> /**
<add> * Like StringDecoder but consumes the reader index in order to prove that
<add> * extraction uses a slice and can be read twice.
<add> */
<add> private static class TestDecoder extends AbstractDataBufferDecoder<String> {
<add>
<add> public TestDecoder() {
<add> super(TEXT_PLAIN);
<add> }
<add>
<add> @Override
<add> public String decode(DataBuffer dataBuffer, ResolvableType elementType,
<add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<add>
<add> byte[] bytes = new byte[dataBuffer.readableByteCount()];
<add> dataBuffer.read(bytes);
<add> DataBufferUtils.release(dataBuffer);
<add> return new String(bytes, StandardCharsets.UTF_8);
<add> }
<add> }
<ide> } | 2 |
Python | Python | fix convolutional tests | efe5916109e220a429a2cff110edb952d747466f | <ide><path>keras/layers/convolutional.py
<ide> def __init__(self, nb_filter, filter_length,
<ide>
<ide> self.W_constraint = constraints.get(W_constraint)
<ide> self.b_constraint = constraints.get(b_constraint)
<del> self.constraints = [self.W_constraint, self.b_constraint]
<ide>
<ide> self.input_spec = [InputSpec(ndim=3)]
<ide> self.initial_weights = weights
<ide> def build(self, input_shape):
<ide> self.activity_regularizer.set_layer(self)
<ide> self.regularizers.append(self.activity_regularizer)
<ide>
<add> self.constraints = {}
<add> if self.W_constraint:
<add> self.constraints[self.W] = self.W_constraint
<add> if self.b_constraint:
<add> self.constraints[self.b] = self.b_constraint
<add>
<ide> if self.initial_weights is not None:
<ide> self.set_weights(self.initial_weights)
<ide> del self.initial_weights
<ide> def __init__(self, nb_filter, nb_row, nb_col,
<ide>
<ide> self.W_constraint = constraints.get(W_constraint)
<ide> self.b_constraint = constraints.get(b_constraint)
<del> self.constraints = [self.W_constraint, self.b_constraint]
<ide>
<ide> self.input_spec = [InputSpec(ndim=4)]
<ide> self.initial_weights = weights
<ide> def build(self, input_shape):
<ide> self.activity_regularizer.set_layer(self)
<ide> self.regularizers.append(self.activity_regularizer)
<ide>
<add> self.constraints = {}
<add> if self.W_constraint:
<add> self.constraints[self.W] = self.W_constraint
<add> if self.b_constraint:
<add> self.constraints[self.b] = self.b_constraint
<add>
<ide> if self.initial_weights is not None:
<ide> self.set_weights(self.initial_weights)
<ide> del self.initial_weights
<ide> def __init__(self, nb_filter, kernel_dim1, kernel_dim2, kernel_dim3,
<ide>
<ide> self.W_constraint = constraints.get(W_constraint)
<ide> self.b_constraint = constraints.get(b_constraint)
<del> self.constraints = [self.W_constraint, self.b_constraint]
<ide>
<ide> self.input_spec = [InputSpec(ndim=5)]
<ide> self.initial_weights = weights
<ide> def build(self, input_shape):
<ide> self.activity_regularizer.set_layer(self)
<ide> self.regularizers.append(self.activity_regularizer)
<ide>
<add> self.constraints = {}
<add> if self.W_constraint:
<add> self.constraints[self.W] = self.W_constraint
<add> if self.b_constraint:
<add> self.constraints[self.b] = self.b_constraint
<add>
<ide> if self.initial_weights is not None:
<ide> self.set_weights(self.initial_weights)
<ide> del self.initial_weights
<ide> class UpSampling1D(Layer):
<ide> '''
<ide>
<ide> def __init__(self, length=2, **kwargs):
<del> super(UpSampling1D, self).__init__(**kwargs)
<ide> self.length = length
<ide> self.input_spec = [InputSpec(ndim=3)]
<add> super(UpSampling1D, self).__init__(**kwargs)
<ide>
<ide> def get_output_shape_for(self, input_shape):
<ide> return (input_shape[0], self.length * input_shape[1], input_shape[2])
<ide> class UpSampling2D(Layer):
<ide> '''
<ide>
<ide> def __init__(self, size=(2, 2), dim_ordering='th', **kwargs):
<del> super(UpSampling2D, self).__init__(**kwargs)
<ide> self.size = tuple(size)
<ide> assert dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}'
<ide> self.dim_ordering = dim_ordering
<ide> self.input_spec = [InputSpec(ndim=4)]
<add> super(UpSampling2D, self).__init__(**kwargs)
<ide>
<ide> def get_output_shape_for(self, input_shape):
<ide> if self.dim_ordering == 'th':
<ide> class UpSampling3D(Layer):
<ide> def __init__(self, size=(2, 2, 2), dim_ordering='th', **kwargs):
<ide> if K._BACKEND != 'theano':
<ide> raise Exception(self.__class__.__name__ +
<del> ' is currently only working with Theano backend.')
<del> super(UpSampling3D, self).__init__(**kwargs)
<add> ' is currently only working with Theano backend.')
<ide> self.size = tuple(size)
<ide> assert dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}'
<ide> self.dim_ordering = dim_ordering
<ide> self.input_spec = [InputSpec(ndim=5)]
<add> super(UpSampling3D, self).__init__(**kwargs)
<ide>
<ide> def get_output_shape_for(self, input_shape):
<ide> if self.dim_ordering == 'th':
<ide><path>tests/keras/layers/test_convolutional.py
<ide> import numpy as np
<ide> from numpy.testing import assert_allclose
<ide>
<add>from keras.utils.test_utils import layer_test
<ide> from keras import backend as K
<ide> from keras.layers import convolutional
<ide>
<ide>
<ide> def test_convolution_1d():
<del> nb_samples = 9
<del> nb_steps = 7
<del> input_dim = 10
<del> filter_length = 6
<del> nb_filter = 5
<add> nb_samples = 2
<add> nb_steps = 8
<add> input_dim = 5
<add> filter_length = 3
<add> nb_filter = 4
<ide>
<del> weights_in = [np.ones((nb_filter, input_dim, filter_length, 1)),
<del> np.ones(nb_filter)]
<del>
<del> input = np.ones((nb_samples, nb_steps, input_dim))
<del> for weight in [None, weights_in]:
<del> for border_mode in ['valid', 'same']:
<del> for subsample_length in [1]:
<del> if border_mode == 'same' and subsample_length != 1:
<del> continue
<del> for W_regularizer in [None, 'l2']:
<del> for b_regularizer in [None, 'l2']:
<del> for act_regularizer in [None, 'l2']:
<del> layer = convolutional.Convolution1D(
<del> nb_filter, filter_length,
<del> weights=weight,
<del> border_mode=border_mode,
<del> W_regularizer=W_regularizer,
<del> b_regularizer=b_regularizer,
<del> activity_regularizer=act_regularizer,
<del> subsample_length=subsample_length,
<del> input_shape=(None, input_dim))
<del>
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> assert input.shape[0] == out.shape[0]
<del> if border_mode == 'same' and subsample_length == 1:
<del> assert input.shape[1] == out.shape[1]
<del> layer.get_config()
<add> for border_mode in ['valid', 'same']:
<add> for subsample_length in [1]:
<add> if border_mode == 'same' and subsample_length != 1:
<add> continue
<add> layer_test(convolutional.Convolution1D,
<add> kwargs={'nb_filter': nb_filter,
<add> 'filter_length': filter_length,
<add> 'border_mode': border_mode,
<add> 'subsample_length': subsample_length},
<add> input_shape=(nb_samples, nb_steps, input_dim))
<add>
<add> layer_test(convolutional.Convolution1D,
<add> kwargs={'nb_filter': nb_filter,
<add> 'filter_length': filter_length,
<add> 'border_mode': border_mode,
<add> 'W_regularizer': 'l2',
<add> 'b_regularizer': 'l2',
<add> 'activity_regularizer': 'activity_l2',
<add> 'subsample_length': subsample_length},
<add> input_shape=(nb_samples, nb_steps, input_dim))
<ide>
<ide>
<ide> def test_maxpooling_1d():
<del> nb_samples = 9
<del> nb_steps = 7
<del> input_dim = 10
<del>
<del> input = np.ones((nb_samples, nb_steps, input_dim))
<ide> for stride in [1, 2]:
<del> layer = convolutional.MaxPooling1D(stride=stride,
<del> border_mode='valid')
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> K.eval(layer.get_output(train))
<del> layer.get_config()
<add> layer_test(convolutional.MaxPooling1D,
<add> kwargs={'stride': stride,
<add> 'border_mode': 'valid'},
<add> input_shape=(3, 5, 4))
<ide>
<ide>
<ide> def test_averagepooling_1d():
<del> nb_samples = 9
<del> nb_steps = 7
<del> input_dim = 10
<del>
<del> input = np.ones((nb_samples, nb_steps, input_dim))
<ide> for stride in [1, 2]:
<del> layer = convolutional.AveragePooling1D(stride=stride,
<del> border_mode='valid')
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> K.eval(layer.get_output(train))
<del> layer.get_config()
<add> layer_test(convolutional.AveragePooling1D,
<add> kwargs={'stride': stride,
<add> 'border_mode': 'valid'},
<add> input_shape=(3, 5, 4))
<ide>
<ide>
<ide> def test_convolution_2d():
<ide> nb_samples = 8
<del> nb_filter = 9
<del> stack_size = 7
<add> nb_filter = 3
<add> stack_size = 4
<ide> nb_row = 10
<ide> nb_col = 6
<ide>
<del> input_nb_row = 11
<del> input_nb_col = 12
<del>
<del> weights_in = [np.ones((nb_filter, stack_size, nb_row, nb_col)), np.ones(nb_filter)]
<del>
<del> input = np.ones((nb_samples, stack_size, input_nb_row, input_nb_col))
<del> for weight in [None, weights_in]:
<del> for border_mode in ['valid', 'same']:
<del> for subsample in [(1, 1), (2, 2)]:
<del> if border_mode == 'same' and subsample != (1, 1):
<del> continue
<del> for W_regularizer in [None, 'l2']:
<del> for b_regularizer in [None, 'l2']:
<del> for act_regularizer in [None, 'l2']:
<del> layer = convolutional.Convolution2D(
<del> nb_filter, nb_row, nb_col,
<del> weights=weight,
<del> border_mode=border_mode,
<del> W_regularizer=W_regularizer,
<del> b_regularizer=b_regularizer,
<del> activity_regularizer=act_regularizer,
<del> subsample=subsample,
<del> input_shape=(stack_size, None, None))
<del>
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> if border_mode == 'same' and subsample == (1, 1):
<del> assert out.shape[2:] == input.shape[2:]
<del> layer.get_config()
<del>
<del>
<del>def test_convolution_2d_dim_ordering():
<del> nb_filter = 4
<del> nb_row = 3
<del> nb_col = 2
<del> stack_size = 3
<del>
<del> np.random.seed(1337)
<del> weights = [np.random.random((nb_filter, stack_size, nb_row, nb_col)),
<del> np.random.random(nb_filter)]
<del> input = np.random.random((1, stack_size, 10, 10))
<del>
<del> layer = convolutional.Convolution2D(
<del> nb_filter, nb_row, nb_col,
<del> weights=weights,
<del> input_shape=input.shape[1:],
<del> dim_ordering='th')
<del> layer.input = K.variable(input)
<del> out_th = K.eval(layer.get_output(False))
<del>
<del> input = np.transpose(input, (0, 2, 3, 1))
<del> weights[0] = np.transpose(weights[0], (2, 3, 1, 0))
<del> layer = convolutional.Convolution2D(
<del> nb_filter, nb_row, nb_col,
<del> weights=weights,
<del> input_shape=input.shape[1:],
<del> dim_ordering='tf')
<del> layer.input = K.variable(input)
<del> out_tf = K.eval(layer.get_output(False))
<del>
<del> assert_allclose(out_tf, np.transpose(out_th, (0, 2, 3, 1)), atol=1e-05)
<add> for border_mode in ['valid', 'same']:
<add> for subsample in [(1, 1), (2, 2)]:
<add> if border_mode == 'same' and subsample != (1, 1):
<add> continue
<add>
<add> layer_test(convolutional.Convolution2D,
<add> kwargs={'nb_filter': nb_filter,
<add> 'nb_row': 3,
<add> 'nb_col': 3,
<add> 'border_mode': border_mode,
<add> 'subsample': subsample},
<add> input_shape=(nb_samples, stack_size, nb_row, nb_col))
<add>
<add> layer_test(convolutional.Convolution2D,
<add> kwargs={'nb_filter': nb_filter,
<add> 'nb_row': 3,
<add> 'nb_col': 3,
<add> 'border_mode': border_mode,
<add> 'W_regularizer': 'l2',
<add> 'b_regularizer': 'l2',
<add> 'activity_regularizer': 'activity_l2',
<add> 'subsample': subsample},
<add> input_shape=(nb_samples, stack_size, nb_row, nb_col))
<ide>
<ide>
<ide> def test_maxpooling_2d():
<del> nb_samples = 9
<del> stack_size = 7
<del> input_nb_row = 11
<del> input_nb_col = 12
<ide> pool_size = (3, 3)
<ide>
<del> input = np.ones((nb_samples, stack_size, input_nb_row, input_nb_col))
<ide> for strides in [(1, 1), (2, 2)]:
<del> layer = convolutional.MaxPooling2D(strides=strides,
<del> border_mode='valid',
<del> pool_size=pool_size)
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> K.eval(layer.get_output(train))
<del> layer.get_config()
<del>
<del>
<del>def test_maxpooling_2d_dim_ordering():
<del> stack_size = 3
<del>
<del> input = np.random.random((1, stack_size, 10, 10))
<del>
<del> layer = convolutional.MaxPooling2D(
<del> (2, 2),
<del> input_shape=input.shape[1:],
<del> dim_ordering='th')
<del> layer.input = K.variable(input)
<del> out_th = K.eval(layer.get_output(False))
<del>
<del> input = np.transpose(input, (0, 2, 3, 1))
<del> layer = convolutional.MaxPooling2D(
<del> (2, 2),
<del> input_shape=input.shape[1:],
<del> dim_ordering='tf')
<del> layer.input = K.variable(input)
<del> out_tf = K.eval(layer.get_output(False))
<del>
<del> assert_allclose(out_tf, np.transpose(out_th, (0, 2, 3, 1)), atol=1e-05)
<add> layer_test(convolutional.MaxPooling2D,
<add> kwargs={'strides': strides,
<add> 'border_mode': 'valid',
<add> 'pool_size': pool_size},
<add> input_shape=(3, 4, 11, 12))
<ide>
<ide>
<ide> def test_averagepooling_2d():
<del> nb_samples = 9
<del> stack_size = 7
<del> input_nb_row = 11
<del> input_nb_col = 12
<add> pool_size = (3, 3)
<ide>
<del> input = np.ones((nb_samples, stack_size, input_nb_row, input_nb_col))
<ide> for border_mode in ['valid', 'same']:
<ide> for pool_size in [(2, 2), (3, 3), (4, 4), (5, 5)]:
<ide> for strides in [(1, 1), (2, 2)]:
<del> layer = convolutional.AveragePooling2D(strides=strides,
<del> border_mode=border_mode,
<del> pool_size=pool_size)
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> if border_mode == 'same' and strides == (1, 1):
<del> assert input.shape == out.shape
<del> layer.get_config()
<add> layer_test(convolutional.MaxPooling2D,
<add> kwargs={'strides': strides,
<add> 'border_mode': border_mode,
<add> 'pool_size': pool_size},
<add> input_shape=(3, 4, 11, 12))
<ide>
<ide>
<ide> @pytest.mark.skipif(K._BACKEND != 'theano', reason="Requires Theano backend")
<ide> def test_convolution_3d():
<del> nb_samples = 8
<del> nb_filter = 9
<del> stack_size = 7
<del> len_conv_dim1 = 2
<del> len_conv_dim2 = 10
<del> len_conv_dim3 = 6
<add> nb_samples = 2
<add> nb_filter = 5
<add> stack_size = 4
<add> kernel_dim1 = 2
<add> kernel_dim2 = 3
<add> kernel_dim3 = 1
<ide>
<ide> input_len_dim1 = 10
<ide> input_len_dim2 = 11
<ide> input_len_dim3 = 12
<ide>
<del> weights_in = [np.ones((nb_filter, stack_size, len_conv_dim1, len_conv_dim2, len_conv_dim3)),
<del> np.ones(nb_filter)]
<del>
<del> input = np.ones((nb_samples, stack_size, input_len_dim1,
<del> input_len_dim2, input_len_dim3))
<del> for weight in [None, weights_in]:
<del> for border_mode in ['same', 'valid']:
<del> for subsample in [(1, 1, 1), (2, 2, 2)]:
<del> if border_mode == 'same' and subsample != (1, 1, 1):
<del> continue
<del> for W_regularizer in [None, 'l2']:
<del> for b_regularizer in [None, 'l2']:
<del> for act_regularizer in [None, 'l2']:
<del> layer = convolutional.Convolution3D(
<del> nb_filter, len_conv_dim1, len_conv_dim2, len_conv_dim3,
<del> weights=weight,
<del> border_mode=border_mode,
<del> W_regularizer=W_regularizer,
<del> b_regularizer=b_regularizer,
<del> activity_regularizer=act_regularizer,
<del> subsample=subsample,
<del> input_shape=(stack_size, None, None, None))
<del>
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> if border_mode == 'same' and subsample == (1, 1, 1):
<del> assert out.shape[2:] == input.shape[2:]
<del> layer.get_config()
<add> for border_mode in ['same', 'valid']:
<add> for subsample in [(1, 1, 1), (2, 2, 2)]:
<add> if border_mode == 'same' and subsample != (1, 1, 1):
<add> continue
<add>
<add> layer_test(convolutional.Convolution3D,
<add> kwargs={'nb_filter': nb_filter,
<add> 'kernel_dim1': kernel_dim1,
<add> 'kernel_dim2': kernel_dim2,
<add> 'kernel_dim3': kernel_dim3,
<add> 'border_mode': border_mode,
<add> 'subsample': subsample},
<add> input_shape=(nb_samples, stack_size, nb_row, nb_col))
<add>
<add> layer_test(convolutional.Convolution3D,
<add> kwargs={'nb_filter': nb_filter,
<add> 'kernel_dim1': kernel_dim1,
<add> 'kernel_dim2': kernel_dim2,
<add> 'kernel_dim3': kernel_dim3,
<add> 'border_mode': border_mode,
<add> 'W_regularizer': 'l2',
<add> 'b_regularizer': 'l2',
<add> 'activity_regularizer': 'activity_l2',
<add> 'subsample': subsample},
<add> input_shape=(nb_samples, stack_size,
<add> input_len_dim1, input_len_dim2, input_len_dim3))
<ide>
<ide>
<ide> @pytest.mark.skipif(K._BACKEND != 'theano', reason="Requires Theano backend")
<ide> def test_maxpooling_3d():
<del> nb_samples = 9
<del> stack_size = 7
<del> input_len_dim1 = 10
<del> input_len_dim2 = 11
<del> input_len_dim3 = 12
<ide> pool_size = (3, 3, 3)
<ide>
<del> input = np.ones((nb_samples, stack_size, input_len_dim1,
<del> input_len_dim2, input_len_dim3))
<ide> for strides in [(1, 1, 1), (2, 2, 2)]:
<del> layer = convolutional.MaxPooling3D(strides=strides,
<del> border_mode='valid',
<del> pool_size=pool_size)
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> K.eval(layer.get_output(train))
<del> layer.get_config()
<add> layer_test(convolutional.MaxPooling3D,
<add> kwargs={'strides': strides,
<add> 'border_mode': 'valid',
<add> 'pool_size': pool_size},
<add> input_shape=(3, 4, 11, 12, 10))
<ide>
<ide>
<ide> @pytest.mark.skipif(K._BACKEND != 'theano', reason="Requires Theano backend")
<ide> def test_averagepooling_3d():
<del> nb_samples = 9
<del> stack_size = 7
<del> input_len_dim1 = 10
<del> input_len_dim2 = 11
<del> input_len_dim3 = 12
<ide> pool_size = (3, 3, 3)
<ide>
<del> input = np.ones((nb_samples, stack_size, input_len_dim1,
<del> input_len_dim2, input_len_dim3))
<ide> for strides in [(1, 1, 1), (2, 2, 2)]:
<del> layer = convolutional.AveragePooling3D(strides=strides,
<del> border_mode='valid',
<del> pool_size=pool_size)
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> K.eval(layer.get_output(train))
<del> layer.get_config()
<add> layer_test(convolutional.AveragePooling3D,
<add> kwargs={'strides': strides,
<add> 'border_mode': 'valid',
<add> 'pool_size': pool_size},
<add> input_shape=(3, 4, 11, 12, 10))
<ide>
<ide>
<ide> def test_zero_padding_2d():
<ide> def test_zero_padding_2d():
<ide> input_nb_col = 12
<ide>
<ide> input = np.ones((nb_samples, stack_size, input_nb_row, input_nb_col))
<add>
<add> # basic test
<add> layer_test(convolutional.ZeroPadding2D,
<add> kwargs={'padding': (2, 2)},
<add> input_shape=input.shape)
<add>
<add> # correctness test
<ide> layer = convolutional.ZeroPadding2D(padding=(2, 2))
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> for offset in [0, 1, -1, -2]:
<del> assert_allclose(out[:, :, offset, :], 0.)
<del> assert_allclose(out[:, :, :, offset], 0.)
<del> assert_allclose(out[:, :, 2:-2, 2:-2], 1.)
<add> layer.set_input(K.variable(input), shape=input.shape)
<add>
<add> out = K.eval(layer.output)
<add> for offset in [0, 1, -1, -2]:
<add> assert_allclose(out[:, :, offset, :], 0.)
<add> assert_allclose(out[:, :, :, offset], 0.)
<add> assert_allclose(out[:, :, 2:-2, 2:-2], 1.)
<ide> layer.get_config()
<ide>
<ide>
<ide> def test_zero_padding_3d():
<ide>
<ide> input = np.ones((nb_samples, stack_size, input_len_dim1,
<ide> input_len_dim2, input_len_dim3))
<add>
<add> # basic test
<add> layer_test(convolutional.ZeroPadding3D,
<add> kwargs={'padding': (2, 2, 2)},
<add> input_shape=input.shape)
<add>
<add> # correctness test
<ide> layer = convolutional.ZeroPadding3D(padding=(2, 2, 2))
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> for offset in [0, 1, -1, -2]:
<del> assert_allclose(out[:, :, offset, :, :], 0.)
<del> assert_allclose(out[:, :, :, offset, :], 0.)
<del> assert_allclose(out[:, :, :, :, offset], 0.)
<del> assert_allclose(out[:, :, 2:-2, 2:-2, 2:-2], 1.)
<add> layer.set_input(K.variable(input), shape=input.shape)
<add> out = K.eval(layer.output)
<add> for offset in [0, 1, -1, -2]:
<add> assert_allclose(out[:, :, offset, :, :], 0.)
<add> assert_allclose(out[:, :, :, offset, :], 0.)
<add> assert_allclose(out[:, :, :, :, offset], 0.)
<add> assert_allclose(out[:, :, 2:-2, 2:-2, 2:-2], 1.)
<ide> layer.get_config()
<ide>
<ide>
<ide> def test_upsampling_1d():
<del> nb_samples = 9
<del> nb_steps = 7
<del> input_dim = 10
<del>
<del> input = np.ones((nb_samples, nb_steps, input_dim))
<del> for length in [2, 3, 9]:
<del> layer = convolutional.UpSampling1D(length=length)
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> assert out.shape[1] == length * nb_steps
<del> layer.get_config()
<add> layer_test(convolutional.UpSampling1D,
<add> kwargs={'length': 2},
<add> input_shape=(3, 5, 4))
<ide>
<ide>
<ide> def test_upsampling_2d():
<ide> def test_upsampling_2d():
<ide> for length_col in [2, 3, 9]:
<ide> layer = convolutional.UpSampling2D(
<ide> size=(length_row, length_col),
<del> input_shape=input.shape[1:],
<ide> dim_ordering=dim_ordering)
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> if dim_ordering == 'th':
<del> assert out.shape[2] == length_row * input_nb_row
<del> assert out.shape[3] == length_col * input_nb_col
<del> else: # tf
<del> assert out.shape[1] == length_row * input_nb_row
<del> assert out.shape[2] == length_col * input_nb_col
<add> layer.set_input(K.variable(input), shape=input.shape)
<ide>
<del> # compare with numpy
<del> if dim_ordering == 'th':
<del> expected_out = np.repeat(input, length_row, axis=2)
<del> expected_out = np.repeat(expected_out, length_col, axis=3)
<del> else: # tf
<del> expected_out = np.repeat(input, length_row, axis=1)
<del> expected_out = np.repeat(expected_out, length_col, axis=2)
<add> out = K.eval(layer.output)
<add> if dim_ordering == 'th':
<add> assert out.shape[2] == length_row * input_nb_row
<add> assert out.shape[3] == length_col * input_nb_col
<add> else: # tf
<add> assert out.shape[1] == length_row * input_nb_row
<add> assert out.shape[2] == length_col * input_nb_col
<ide>
<del> assert_allclose(out, expected_out)
<add> # compare with numpy
<add> if dim_ordering == 'th':
<add> expected_out = np.repeat(input, length_row, axis=2)
<add> expected_out = np.repeat(expected_out, length_col, axis=3)
<add> else: # tf
<add> expected_out = np.repeat(input, length_row, axis=1)
<add> expected_out = np.repeat(expected_out, length_col, axis=2)
<ide>
<del> layer.get_config()
<add> assert_allclose(out, expected_out)
<ide>
<ide>
<ide> @pytest.mark.skipif(K._BACKEND != 'theano', reason="Requires Theano backend")
<ide> def test_upsampling_3d():
<ide> for length_dim3 in [2, 3, 9]:
<ide> layer = convolutional.UpSampling3D(
<ide> size=(length_dim1, length_dim2, length_dim3),
<del> input_shape=input.shape[1:],
<ide> dim_ordering=dim_ordering)
<del> layer.input = K.variable(input)
<del> for train in [True, False]:
<del> out = K.eval(layer.get_output(train))
<del> if dim_ordering == 'th':
<del> assert out.shape[2] == length_dim1 * input_len_dim1
<del> assert out.shape[3] == length_dim2 * input_len_dim2
<del> assert out.shape[4] == length_dim3 * input_len_dim3
<del> else: # tf
<del> assert out.shape[1] == length_dim1 * input_len_dim1
<del> assert out.shape[2] == length_dim2 * input_len_dim2
<del> assert out.shape[3] == length_dim3 * input_len_dim3
<del>
<del> # compare with numpy
<del> if dim_ordering == 'th':
<del> expected_out = np.repeat(input, length_dim1, axis=2)
<del> expected_out = np.repeat(expected_out, length_dim2, axis=3)
<del> expected_out = np.repeat(expected_out, length_dim3, axis=4)
<del> else: # tf
<del> expected_out = np.repeat(input, length_dim1, axis=1)
<del> expected_out = np.repeat(expected_out, length_dim2, axis=2)
<del> expected_out = np.repeat(expected_out, length_dim3, axis=3)
<del>
<del> assert_allclose(out, expected_out)
<del>
<del> layer.get_config()
<add> layer.set_input(K.variable(input), shape=input.shape)
<add>
<add> out = K.eval(layer.output)
<add> if dim_ordering == 'th':
<add> assert out.shape[2] == length_dim1 * input_len_dim1
<add> assert out.shape[3] == length_dim2 * input_len_dim2
<add> assert out.shape[4] == length_dim3 * input_len_dim3
<add> else: # tf
<add> assert out.shape[1] == length_dim1 * input_len_dim1
<add> assert out.shape[2] == length_dim2 * input_len_dim2
<add> assert out.shape[3] == length_dim3 * input_len_dim3
<add>
<add> # compare with numpy
<add> if dim_ordering == 'th':
<add> expected_out = np.repeat(input, length_dim1, axis=2)
<add> expected_out = np.repeat(expected_out, length_dim2, axis=3)
<add> expected_out = np.repeat(expected_out, length_dim3, axis=4)
<add> else: # tf
<add> expected_out = np.repeat(input, length_dim1, axis=1)
<add> expected_out = np.repeat(expected_out, length_dim2, axis=2)
<add> expected_out = np.repeat(expected_out, length_dim3, axis=3)
<add>
<add> assert_allclose(out, expected_out)
<ide>
<ide>
<ide> if __name__ == '__main__':
<del> pytest.main([__file__])
<add> # pytest.main([__file__])
<add> test_convolution_1d() | 2 |
Java | Java | add marble diagrams to the single.delay method | fd76594dc7e19d4da889613f2d9afdf6043879b9 | <ide><path>src/main/java/io/reactivex/Single.java
<ide> public final Flowable<T> concatWith(SingleSource<? extends T> other) {
<ide> /**
<ide> * Delays the emission of the success signal from the current Single by the specified amount.
<ide> * An error signal will not be delayed.
<add> * <p>
<add> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.png" alt="">
<ide> * <dl>
<ide> * <dt><b>Scheduler:</b></dt>
<ide> * <dd>{@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd>
<ide> public final Single<T> delay(long time, TimeUnit unit) {
<ide>
<ide> /**
<ide> * Delays the emission of the success or error signal from the current Single by the specified amount.
<add> * <p>
<add> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.e.png" alt="">
<ide> * <dl>
<ide> * <dt><b>Scheduler:</b></dt>
<ide> * <dd>{@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd>
<ide> public final Single<T> delay(long time, TimeUnit unit, boolean delayError) {
<ide> /**
<ide> * Delays the emission of the success signal from the current Single by the specified amount.
<ide> * An error signal will not be delayed.
<add> * <p>
<add> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.s.png" alt="">
<ide> * <dl>
<ide> * <dt><b>Scheduler:</b></dt>
<ide> * <dd>you specify the {@link Scheduler} where the non-blocking wait and emission happens</dd>
<ide> public final Single<T> delay(final long time, final TimeUnit unit, final Schedul
<ide>
<ide> /**
<ide> * Delays the emission of the success or error signal from the current Single by the specified amount.
<add> * <p>
<add> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.se.png" alt="">
<ide> * <dl>
<ide> * <dt><b>Scheduler:</b></dt>
<ide> * <dd>you specify the {@link Scheduler} where the non-blocking wait and emission happens</dd> | 1 |
Python | Python | fix mistaken word in nanprod docstring | fa54e448d759fb0265c8ef95caae4fd97233ce9f | <ide><path>numpy/lib/nanfunctions.py
<ide> def nanprod(a, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> Parameters
<ide> ----------
<ide> a : array_like
<del> Array containing numbers whose sum is desired. If `a` is not an
<add> Array containing numbers whose product is desired. If `a` is not an
<ide> array, a conversion is attempted.
<ide> axis : int, optional
<ide> Axis along which the product is computed. The default is to compute | 1 |
Ruby | Ruby | fix button tag without options | 4a7c8ef793dd5b13026efcc4a5a004bf96b0d1ea | <ide><path>actionview/lib/action_view/helpers/form_tag_helper.rb
<ide> def submit_tag(value = "Save changes", options = {})
<ide> # # => <button data-disable-with="Please wait..." name="button" type="submit">Checkout</button>
<ide> #
<ide> def button_tag(content_or_options = nil, options = nil, &block)
<del> if block_given? && content_or_options.is_a?(Hash)
<add> if block_given?
<ide> options = button_tag_options_with_defaults(content_or_options)
<ide> content_tag :button, options, &block
<ide> else
<ide> def sanitize_to_id(name)
<ide>
<ide> def button_tag_options_with_defaults(options)
<ide> options = options || {}
<del> options.stringify_keys!
<add> options = options.stringify_keys
<ide>
<ide> default_options = { 'name' => 'button', 'type' => 'submit' }
<ide> options.reverse_merge default_options | 1 |
Javascript | Javascript | fix webcrypto generatekey() with empty usages | 677bd668b7a0b2141fa01e9b4760c8f5fa98e1d9 | <ide><path>lib/internal/crypto/webcrypto.js
<ide> async function generateKey(
<ide> algorithm = normalizeAlgorithm(algorithm);
<ide> validateBoolean(extractable, 'extractable');
<ide> validateArray(keyUsages, 'keyUsages');
<add> if (keyUsages.length === 0) {
<add> throw lazyDOMException(
<add> 'Usages cannot be empty when creating a key',
<add> 'SyntaxError');
<add> }
<ide> switch (algorithm.name) {
<ide> case 'RSASSA-PKCS1-v1_5':
<ide> // Fall through
<ide><path>test/parallel/test-webcrypto-keygen.js
<ide> const vectors = {
<ide> // Test bad usages
<ide> {
<ide> async function test(name) {
<add> await assert.rejects(
<add> subtle.generateKey(
<add> {
<add> name, ...vectors[name].algorithm
<add> },
<add> true,
<add> []),
<add> { message: /Usages cannot be empty/ });
<add>
<ide> const invalidUsages = [];
<ide> allUsages.forEach((usage) => {
<ide> if (!vectors[name].usages.includes(usage))
<ide><path>test/parallel/test-webcrypto-sign-verify-hmac.js
<ide> async function testSign({ hash,
<ide> }
<ide>
<ide> await assert.rejects(
<del> subtle.generateKey({ name }, false, []), {
<add> subtle.generateKey({ name }, false, ['sign', 'verify']), {
<ide> name: 'TypeError',
<ide> code: 'ERR_MISSING_OPTION',
<ide> message: 'algorithm.hash is required' | 3 |
Python | Python | pass historical masks using the taps of scan | 7278db105dc3ae1d4c1e38b497e54c863a7836e3 | <ide><path>keras/layers/recurrent.py
<ide> from ..layers.core import Layer, default_mask_val
<ide> from six.moves import range
<ide>
<add>def get_mask(X, mask_val, steps_back=0):
<add> '''
<add> Given X, a (timesteps, nb_samples, n_dimensions) tensor, returns a mask
<add> tensor with dimension (timesteps + steps_back, nb_samples, 1). This
<add> matrix is left-padded with `steps_back` zeros in the time dimension, and
<add> elsewhere has a 1 for every entry except for those corresponding to a
<add> vector in X that has every entry equal to mask_val.
<add> '''
<add> mask = T.neq(X, mask_val).sum(axis=2) > 0 # (time, nb_samples) matrix with a 1 for every unmasked entry
<add> mask = T.addbroadcast(mask[:, :, np.newaxis], 2) # (time, nb_samples, 1) matrix.
<add> if steps_back > 0:
<add> # left-pad in time with 0
<add> pad = alloc_zeros_matrix(steps_back, mask.shape[1], 1).astype('uint8')
<add> mask = T.concatenate([pad, mask], axis=0)
<add> return mask
<add>
<add>
<add>
<ide> class SimpleRNN(Layer):
<ide> '''
<ide> Fully connected RNN where output is to fed back to input.
<ide> def get_output(self, train):
<ide> # new shape: (time, nb_samples, input_dim) -> because theano.scan iterates over main dimension
<ide> X = X.dimshuffle((1,0,2))
<ide>
<del> mask = T.neq(X, self.mask_val).sum(axis=2) > 0 # (time, nb_samples) matrix with a 1 for every unmasked entry
<del> mask = T.addbroadcast(mask[:, :, np.newaxis], 2)
<del>
<del> mask_tm1 = alloc_zeros_matrix(*mask.shape).astype('int8')
<del> mask_tm1 = T.addbroadcast(T.set_subtensor(mask_tm1[1:, :, :], mask[:-1, :, :]), 2)
<add> mask = get_mask(X, self.mask_val, steps_back=1)
<ide>
<ide> x = T.dot(X, self.W) + self.b
<ide>
<ide> def get_output(self, train):
<ide> # Iterate over the first dimension of the x array (=time).
<ide> outputs, updates = theano.scan(
<ide> self._step, # this will be called with arguments (sequences[i], outputs[i-1], non_sequences[i])
<del> sequences=[x, mask, mask_tm1], # tensors to iterate over, inputs to _step
<add> sequences=[x, dict(input=mask,taps=[0, -1])], # tensors to iterate over, inputs to _step
<ide> # initialization of the output. Input to _step with default tap=-1.
<ide> outputs_info=T.unbroadcast(alloc_zeros_matrix(X.shape[1], self.output_dim), 1),
<ide> non_sequences=self.U, # static inputs to _step | 1 |
Ruby | Ruby | add test run by line, and report error by line | ff0a7611cf912ce7ae32966cc42da5dbdff3a0bc | <ide><path>railties/lib/rails/commands/test.rb
<ide> ENV["RAILS_ENV"] = "test"
<del>
<ide> require "rails/test_unit/runner"
<del>options = Rails::TestRunner::Options.parse(ARGV)
<add>require "rails/test_unit/reporter"
<ide>
<add>options = Rails::TestRunner::Options.parse(ARGV)
<ide> $: << File.expand_path("../../test", APP_PATH)
<ide>
<add>$runner = Rails::TestRunner.new(options)
<add>
<add>def Minitest.plugin_rails_init(options)
<add> self.reporter << Rails::TestUnitReporter.new(options[:io], options)
<add> if method = $runner.find_method
<add> options[:filter] = "/^(#{method})$/"
<add> end
<add>end
<add>Minitest.extensions << 'rails'
<add>
<ide> # Config Rails backtrace in tests.
<del>Rails::TestRunner.new(options).run
<add>$runner.run
<ide><path>railties/lib/rails/test_unit/reporter.rb
<add>require "minitest"
<add>
<add>module Rails
<add> class TestUnitReporter < Minitest::StatisticsReporter
<add> def report
<add> io.puts
<add> io.puts "Failed test:"
<add> io.puts
<add> io.puts aggregated_results
<add> end
<add>
<add> def aggregated_results # :nodoc:
<add> filtered_results = results.dup
<add> filtered_results.reject!(&:skipped?) unless options[:verbose]
<add> filtered_results.map do |result|
<add> result.failures.map { |failure|
<add> "bin/rails test #{failure.location}\n"
<add> }.join "\n"
<add> end.join
<add> end
<add> end
<add>end
<ide><path>railties/lib/rails/test_unit/runner.rb
<ide> require "ostruct"
<ide> require "optparse"
<ide> require "rake/file_list"
<add>require "method_source"
<ide>
<ide> module Rails
<ide> class TestRunner
<ide> def run
<ide> run_tests
<ide> end
<ide>
<add> def find_method
<add> return if @line.blank?
<add> method = test_methods.find do |test_method, start_line, end_line|
<add> (start_line..end_line).include?(@line.to_i)
<add> end
<add> method.first if method
<add> end
<add>
<ide> private
<ide> def run_tests
<ide> test_files.to_a.each do |file|
<ide> def run_tests
<ide> def test_files
<ide> if @options[:pattern]
<ide> pattern = NAMED_PATTERNS[@options[:pattern]]
<add> unless pattern
<add> filename, @line = @options[:pattern].split(':')
<add> return [filename] if filename
<add> end
<ide> else
<ide> pattern = "test/**/*_test.rb"
<ide> end
<ide> def enable_backtrace
<ide> def filter_tests_by_name
<ide> ARGV.push("-n", @options[:name])
<ide> end
<add>
<add> def test_methods
<add> methods_map = []
<add> suites = Minitest::Runnable.runnables.shuffle
<add> suites.each do |suite_class|
<add> suite_class.runnable_methods.each do |test_method|
<add> method = suite_class.instance_method(test_method)
<add> start_line = method.source_location.last
<add> end_line = method.source.split("\n").size + start_line - 1
<add> methods_map << [test_method, start_line, end_line]
<add> end
<add> end
<add> methods_map
<add> end
<ide> end
<ide> end | 3 |
Text | Text | use sentence case for headers in building.md | e0ac501b92c20c3f112b7edca68c419801824d6f | <ide><path>BUILDING.md
<ide> If you can reproduce a test failure, search for it in the
<ide> [Node.js issue tracker](https://github.com/nodejs/node/issues) or
<ide> file a new issue.
<ide>
<del>## Table of Contents
<add>## Table of contents
<ide>
<ide> * [Supported platforms](#supported-platforms)
<ide> * [Input](#input)
<ide> To install this version of Node.js into a system directory:
<ide> [sudo] make install
<ide> ```
<ide>
<del>#### Running Tests
<add>#### Running tests
<ide>
<ide> To verify the build:
<ide>
<ide> You can use
<ide> [node-code-ide-configs](https://github.com/nodejs/node-code-ide-configs)
<ide> to run/debug tests, if your IDE configs are present.
<ide>
<del>#### Running Coverage
<add>#### Running coverage
<ide>
<ide> It's good practice to ensure any code you add or change is covered by tests.
<ide> You can do so by running the test suite with coverage enabled: | 1 |
Text | Text | fix changelog.md table formatting | f3f2468bdc414e2c57af9a8c767e04bf5e4c4c93 | <ide><path>CHANGELOG.md
<ide> release.
<ide>
<ide> <table>
<ide> <tr>
<del> <th><a href="doc/changelogs/CHANGELOG_V7.md">v7</a><sup>Current</sup></td>
<del> <th title="LTS Until 2019-04"><a href="doc/changelogs/CHANGELOG_V6.md">v6</a><sup>LTS</sup</th>
<del> <th><a href="doc/changelogs/CHANGELOG_V5.md">v5</a></th>
<add> <th><a href="doc/changelogs/CHANGELOG_V7.md">v7</a><sup>Current</sup></th>
<add> <th title="LTS Until 2019-04"><a href="doc/changelogs/CHANGELOG_V6.md">v6</a><sup>LTS</sup></th>
<add> <th title="Unsupported Since 2016-07-01"><a href="doc/changelogs/CHANGELOG_V5.md">v5</a><sup>EOL</sup></th>
<ide> <th title="LTS Until 2018-04"><a href="doc/changelogs/CHANGELOG_V4.md">v4</a><sup>LTS</sup></th>
<del> <th title="LTS Until 2016-12-31"><a href="doc/changelogs/CHANGELOG_V012.md">v0.12</a><sup>LTS</sup></th>
<del> <th title="LTS Until 2016-10-31" colspan="3"><a href="doc/changelogs/CHANGELOG_V010.md">v0.10</a><sup>LTS</sup></th>
<add> <th title="Unsupported Since 2017-01-01"><a href="doc/changelogs/CHANGELOG_V012.md">v0.12</a><sup>EOL</sup></th>
<add> <th title="Unsupported Since 2016-11-01" colspan="3"><a href="doc/changelogs/CHANGELOG_V010.md">v0.10</a><sup>EOL</sup></th>
<ide> </tr>
<ide> <tr>
<ide> <td valign="top">
<ide> release.
<ide> <a href="doc/changelogs/CHANGELOG_V6.md#6.0.0">6.0.0</a><br/>
<ide> </td>
<ide> <td valign="top">
<del><b><a href="doc/changelogs/CHANGELOG_V5.md#5.11.1">5.11.1</a></b><br/>
<add><a href="doc/changelogs/CHANGELOG_V5.md#5.11.1">5.11.1</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V5.md#5.11.0">5.11.0</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V5.md#5.10.1">5.10.1</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V5.md#5.10.0">5.10.0</a><br/>
<ide> release.
<ide> <a href="doc/changelogs/CHANGELOG_V4.md#4.0.0">4.0.0</a><br/>
<ide> </td>
<ide> <td valign="top">
<del><b><a href="doc/changelogs/CHANGELOG_V012.md#0.12.18">0.12.18</a></b><br/>
<add><a href="doc/changelogs/CHANGELOG_V012.md#0.12.18">0.12.18</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V012.md#0.12.17">0.12.17</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V012.md#0.12.16">0.12.16</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V012.md#0.12.15">0.12.15</a><br/>
<ide> release.
<ide> <a href="doc/changelogs/CHANGELOG_V012.md#0.12.0">0.12.0</a><br/>
<ide> </td>
<ide> <td valign="top">
<del><b><a href="doc/changelogs/CHANGELOG_V010.md#0.10.48">0.10.48</a></b><br/>
<add><a href="doc/changelogs/CHANGELOG_V010.md#0.10.48">0.10.48</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V010.md#0.10.47">0.10.47</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V010.md#0.10.46">0.10.46</a><br/>
<ide> <a href="doc/changelogs/CHANGELOG_V010.md#0.10.45">0.10.45</a><br/> | 1 |
Python | Python | fix bug in test_dtype_with_converters_and_usecols | 4b1aab31cc66d4a5abd3d2d6b40e461cadd79d2a | <ide><path>numpy/lib/tests/test_io.py
<ide> def test_dtype_with_converters(self):
<ide> def test_dtype_with_converters_and_usecols(self):
<ide> dstr = "1,5,-1,1:1\n2,8,-1,1:n\n3,3,-2,m:n\n"
<ide> dmap = {'1:1':0, '1:n':1, 'm:1':2, 'm:n':3}
<del> dtyp = [('E1','i4'),('E2','i4'),('E3','i2'),('N', 'i1')]
<add> dtyp = [('e1','i4'),('e2','i4'),('e3','i2'),('n', 'i1')]
<ide> conv = {0: int, 1: int, 2: int, 3: lambda r: dmap[r.decode()]}
<ide> test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
<ide> names=None, converters=conv)
<ide> control = np.rec.array([[1,5,-1,0], [2,8,-1,1], [3,3,-2,3]], dtype=dtyp)
<ide> assert_equal(test, control)
<del> dtyp = [('E1','i4'),('E2','i4'),('N', 'i1')]
<add> dtyp = [('e1','i4'),('e2','i4'),('n', 'i1')]
<ide> test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
<ide> usecols=(0,1,3), names=None, converters=conv)
<ide> control = np.rec.array([[1,5,0], [2,8,1], [3,3,3]], dtype=dtyp) | 1 |
Javascript | Javascript | treat thrown errors as regular rejections | e13eeabd7e34a78becec06cfbe72c23f2dcb85f9 | <ide><path>lib/promises-aplus/promises-aplus-test-adapter.js
<ide> minErr,
<ide> extend
<ide> */
<del>/* eslint-disable no-unused-vars */
<ide>
<del>var isFunction = function isFunction(value) {return typeof value === 'function';};
<del>var isPromiseLike = function isPromiseLike(obj) {return obj && isFunction(obj.then);};
<del>var isObject = function isObject(value) {return value != null && typeof value === 'object';};
<del>var isUndefined = function isUndefined(value) {return typeof value === 'undefined';};
<add>/* eslint-disable no-unused-vars */
<add>function isFunction(value) { return typeof value === 'function'; }
<add>function isPromiseLike(obj) { return obj && isFunction(obj.then); }
<add>function isObject(value) { return value !== null && typeof value === 'object'; }
<add>function isUndefined(value) { return typeof value === 'undefined'; }
<ide>
<del>var minErr = function minErr(module, constructor) {
<add>function minErr(module, constructor) {
<ide> return function() {
<ide> var ErrorConstructor = constructor || Error;
<ide> throw new ErrorConstructor(module + arguments[0] + arguments[1]);
<ide> };
<del>};
<add>}
<ide>
<del>var extend = function extend(dst) {
<add>function extend(dst) {
<ide> for (var i = 1, ii = arguments.length; i < ii; i++) {
<ide> var obj = arguments[i];
<ide> if (obj) {
<ide> var extend = function extend(dst) {
<ide> }
<ide> }
<ide> return dst;
<del>};
<add>}
<add>/* eslint-enable */
<ide>
<ide> var $q = qFactory(process.nextTick, function noopExceptionHandler() {});
<ide>
<ide> exports.resolved = $q.resolve;
<ide> exports.rejected = $q.reject;
<del>exports.deferred = function() {
<del> var deferred = $q.defer();
<del>
<del> return {
<del> promise: deferred.promise,
<del> resolve: deferred.resolve,
<del> reject: deferred.reject
<del> };
<del>};
<add>exports.deferred = $q.defer;
<ide><path>src/ng/compile.js
<ide> function $CompileProvider($provide, $$sanitizeUriProvider) {
<ide> childBoundTranscludeFn);
<ide> }
<ide> linkQueue = null;
<add> }).catch(function(error) {
<add> if (error instanceof Error) {
<add> $exceptionHandler(error);
<add> }
<ide> }).catch(noop);
<ide>
<ide> return function delayedNodeLinkFn(ignoreChildLinkFn, scope, node, rootElement, boundTranscludeFn) {
<ide><path>src/ng/q.js
<ide> * A service that helps you run functions asynchronously, and use their return values (or exceptions)
<ide> * when they are done processing.
<ide> *
<del> * This is an implementation of promises/deferred objects inspired by
<del> * [Kris Kowal's Q](https://github.com/kriskowal/q).
<add> * This is a [Promises/A+](https://promisesaplus.com/)-compliant implementation of promises/deferred
<add> * objects inspired by [Kris Kowal's Q](https://github.com/kriskowal/q).
<ide> *
<ide> * $q can be used in two fashions --- one which is more similar to Kris Kowal's Q or jQuery's Deferred
<ide> * implementations, and the other which resembles ES6 (ES2015) promises to some degree.
<ide> function qFactory(nextTick, exceptionHandler, errorOnUnhandledRejections) {
<ide> }
<ide> } catch (e) {
<ide> deferred.reject(e);
<del> exceptionHandler(e);
<ide> }
<ide> }
<ide> } finally {
<ide> function qFactory(nextTick, exceptionHandler, errorOnUnhandledRejections) {
<ide> } else {
<ide> this.$$resolve(val);
<ide> }
<del>
<ide> },
<ide>
<ide> $$resolve: function(val) {
<ide> var then;
<ide> var that = this;
<ide> var done = false;
<ide> try {
<del> if ((isObject(val) || isFunction(val))) then = val && val.then;
<add> if (isObject(val) || isFunction(val)) then = val.then;
<ide> if (isFunction(then)) {
<ide> this.promise.$$state.status = -1;
<ide> then.call(val, resolvePromise, rejectPromise, simpleBind(this, this.notify));
<ide> function qFactory(nextTick, exceptionHandler, errorOnUnhandledRejections) {
<ide> }
<ide> } catch (e) {
<ide> rejectPromise(e);
<del> exceptionHandler(e);
<ide> }
<ide>
<ide> function resolvePromise(val) {
<ide><path>src/ng/templateRequest.js
<ide> function $TemplateRequestProvider() {
<ide> *
<ide> * @property {number} totalPendingRequests total amount of pending template requests being downloaded.
<ide> */
<del> this.$get = ['$templateCache', '$http', '$q', '$sce', function($templateCache, $http, $q, $sce) {
<add> this.$get = ['$exceptionHandler', '$templateCache', '$http', '$q', '$sce',
<add> function($exceptionHandler, $templateCache, $http, $q, $sce) {
<ide>
<del> function handleRequestFn(tpl, ignoreRequestError) {
<del> handleRequestFn.totalPendingRequests++;
<add> function handleRequestFn(tpl, ignoreRequestError) {
<add> handleRequestFn.totalPendingRequests++;
<ide>
<del> // We consider the template cache holds only trusted templates, so
<del> // there's no need to go through whitelisting again for keys that already
<del> // are included in there. This also makes Angular accept any script
<del> // directive, no matter its name. However, we still need to unwrap trusted
<del> // types.
<del> if (!isString(tpl) || isUndefined($templateCache.get(tpl))) {
<del> tpl = $sce.getTrustedResourceUrl(tpl);
<del> }
<add> // We consider the template cache holds only trusted templates, so
<add> // there's no need to go through whitelisting again for keys that already
<add> // are included in there. This also makes Angular accept any script
<add> // directive, no matter its name. However, we still need to unwrap trusted
<add> // types.
<add> if (!isString(tpl) || isUndefined($templateCache.get(tpl))) {
<add> tpl = $sce.getTrustedResourceUrl(tpl);
<add> }
<ide>
<del> var transformResponse = $http.defaults && $http.defaults.transformResponse;
<add> var transformResponse = $http.defaults && $http.defaults.transformResponse;
<ide>
<del> if (isArray(transformResponse)) {
<del> transformResponse = transformResponse.filter(function(transformer) {
<del> return transformer !== defaultHttpResponseTransform;
<del> });
<del> } else if (transformResponse === defaultHttpResponseTransform) {
<del> transformResponse = null;
<del> }
<add> if (isArray(transformResponse)) {
<add> transformResponse = transformResponse.filter(function(transformer) {
<add> return transformer !== defaultHttpResponseTransform;
<add> });
<add> } else if (transformResponse === defaultHttpResponseTransform) {
<add> transformResponse = null;
<add> }
<ide>
<del> return $http.get(tpl, extend({
<del> cache: $templateCache,
<del> transformResponse: transformResponse
<del> }, httpOptions))
<del> .finally(function() {
<del> handleRequestFn.totalPendingRequests--;
<del> })
<del> .then(function(response) {
<del> $templateCache.put(tpl, response.data);
<del> return response.data;
<del> }, handleError);
<add> return $http.get(tpl, extend({
<add> cache: $templateCache,
<add> transformResponse: transformResponse
<add> }, httpOptions))
<add> .finally(function() {
<add> handleRequestFn.totalPendingRequests--;
<add> })
<add> .then(function(response) {
<add> $templateCache.put(tpl, response.data);
<add> return response.data;
<add> }, handleError);
<ide>
<del> function handleError(resp) {
<del> if (!ignoreRequestError) {
<del> throw $templateRequestMinErr('tpload', 'Failed to load template: {0} (HTTP status: {1} {2})',
<del> tpl, resp.status, resp.statusText);
<add> function handleError(resp) {
<add> if (!ignoreRequestError) {
<add> resp = $templateRequestMinErr('tpload',
<add> 'Failed to load template: {0} (HTTP status: {1} {2})',
<add> tpl, resp.status, resp.statusText);
<add>
<add> $exceptionHandler(resp);
<add> }
<add>
<add> return $q.reject(resp);
<ide> }
<del> return $q.reject(resp);
<ide> }
<del> }
<ide>
<del> handleRequestFn.totalPendingRequests = 0;
<add> handleRequestFn.totalPendingRequests = 0;
<ide>
<del> return handleRequestFn;
<del> }];
<add> return handleRequestFn;
<add> }
<add> ];
<ide> }
<ide><path>test/ng/compileSpec.js
<ide> describe('$compile', function() {
<ide> ));
<ide>
<ide>
<del> it('should throw an error and clear element content if the template fails to load', inject(
<del> function($compile, $httpBackend, $rootScope) {
<del> $httpBackend.expect('GET', 'hello.html').respond(404, 'Not Found!');
<del> element = $compile('<div><b class="hello">content</b></div>')($rootScope);
<add> it('should throw an error and clear element content if the template fails to load',
<add> inject(function($compile, $exceptionHandler, $httpBackend, $rootScope) {
<add> $httpBackend.expect('GET', 'hello.html').respond(404, 'Not Found!');
<add> element = $compile('<div><b class="hello">content</b></div>')($rootScope);
<ide>
<del> expect(function() {
<del> $httpBackend.flush();
<del> }).toThrowMinErr('$compile', 'tpload', 'Failed to load template: hello.html');
<del> expect(sortedHtml(element)).toBe('<div><b class="hello"></b></div>');
<del> }
<del> ));
<add> $httpBackend.flush();
<add>
<add> expect(sortedHtml(element)).toBe('<div><b class="hello"></b></div>');
<add> expect($exceptionHandler.errors[0].message).toMatch(
<add> /^\[\$compile:tpload] Failed to load template: hello\.html/);
<add> })
<add> );
<ide>
<ide>
<ide> it('should prevent multiple templates per element', function() {
<ide> describe('$compile', function() {
<ide> templateUrl: 'template.html'
<ide> }));
<ide> });
<del> inject(function($compile, $httpBackend) {
<add> inject(function($compile, $exceptionHandler, $httpBackend) {
<ide> $httpBackend.whenGET('template.html').respond('<p>template.html</p>');
<del> expect(function() {
<del> $compile('<div><div class="sync async"></div></div>');
<del> $httpBackend.flush();
<del> }).toThrowMinErr('$compile', 'multidir', 'Multiple directives [async, sync] asking for template on: ' +
<del> '<div class="sync async">');
<add>
<add> $compile('<div><div class="sync async"></div></div>');
<add> $httpBackend.flush();
<add>
<add> expect($exceptionHandler.errors[0].message).toMatch(new RegExp(
<add> '^\\[\\$compile:multidir] Multiple directives \\[async, sync] asking for ' +
<add> 'template on: <div class="sync async">'));
<ide> });
<ide> });
<ide>
<ide> describe('$compile', function() {
<ide> );
<ide>
<ide> it('should not allow more than one isolate/new scope creation per element regardless of `templateUrl`',
<del> inject(function($httpBackend) {
<add> inject(function($exceptionHandler, $httpBackend) {
<ide> $httpBackend.expect('GET', 'tiscope.html').respond('<div>Hello, world !</div>');
<ide>
<del> expect(function() {
<del> compile('<div class="tiscope-a; scope-b"></div>');
<del> $httpBackend.flush();
<del> }).toThrowMinErr('$compile', 'multidir', 'Multiple directives [scopeB, tiscopeA] ' +
<del> 'asking for new/isolated scope on: <div class="tiscope-a; scope-b ng-scope">');
<add> compile('<div class="tiscope-a; scope-b"></div>');
<add> $httpBackend.flush();
<add>
<add> expect($exceptionHandler.errors[0].message).toMatch(new RegExp(
<add> '^\\[\\$compile:multidir] Multiple directives \\[scopeB, tiscopeA] ' +
<add> 'asking for new/isolated scope on: <div class="tiscope-a; scope-b ng-scope">'));
<ide> })
<ide> );
<ide>
<ide> describe('$compile', function() {
<ide> '<div class="foo" ng-transclude></div>' +
<ide> '</div>',
<ide> transclude: true
<del>
<ide> }));
<ide>
<ide> $compileProvider.directive('noTransBar', valueFn({
<ide> templateUrl: 'noTransBar.html',
<ide> transclude: false
<del>
<ide> }));
<ide> });
<ide>
<del> inject(function($compile, $rootScope, $templateCache) {
<add> inject(function($compile, $exceptionHandler, $rootScope, $templateCache) {
<ide> $templateCache.put('noTransBar.html',
<ide> '<div>' +
<ide> // This ng-transclude is invalid. It should throw an error.
<ide> '<div class="bar" ng-transclude></div>' +
<ide> '</div>');
<ide>
<del> expect(function() {
<del> element = $compile('<div trans-foo>content</div>')($rootScope);
<del> $rootScope.$apply();
<del> }).toThrowMinErr('ngTransclude', 'orphan',
<del> 'Illegal use of ngTransclude directive in the template! No parent directive that requires a transclusion found. Element: <div class="bar" ng-transclude="">');
<add> element = $compile('<div trans-foo>content</div>')($rootScope);
<add> $rootScope.$digest();
<add>
<add> expect($exceptionHandler.errors[0][1]).toBe('<div class="bar" ng-transclude="">');
<add> expect($exceptionHandler.errors[0][0].message).toMatch(new RegExp(
<add> '^\\[ngTransclude:orphan] Illegal use of ngTransclude directive in the ' +
<add> 'template! No parent directive that requires a transclusion found. Element: ' +
<add> '<div class="bar" ng-transclude="">'));
<ide> });
<ide> });
<ide>
<ide> describe('$compile', function() {
<ide> transclude: 'element'
<ide> }));
<ide> });
<del> inject(function($compile, $httpBackend) {
<add> inject(function($compile, $exceptionHandler, $httpBackend) {
<ide> $httpBackend.expectGET('template.html').respond('<p second>template.html</p>');
<add>
<ide> $compile('<div template first></div>');
<del> expect(function() {
<del> $httpBackend.flush();
<del> }).toThrowMinErr('$compile', 'multidir', /Multiple directives \[first, second\] asking for transclusion on: <p .+/);
<add> $httpBackend.flush();
<add>
<add> expect($exceptionHandler.errors[0].message).toMatch(new RegExp(
<add> '^\\[\\$compile:multidir] Multiple directives \\[first, second] asking for ' +
<add> 'transclusion on: <p '));
<ide> });
<ide> });
<ide>
<ide><path>test/ng/httpSpec.js
<ide> describe('$http', function() {
<ide>
<ide>
<ide> it('should increment/decrement `outstandingRequestCount` on error in `transformRequest`',
<del> inject(function($exceptionHandler) {
<add> function() {
<ide> expect(incOutstandingRequestCountSpy).not.toHaveBeenCalled();
<ide> expect(completeOutstandingRequestSpy).not.toHaveBeenCalled();
<ide>
<ide> describe('$http', function() {
<ide>
<ide> expect(incOutstandingRequestCountSpy).toHaveBeenCalledOnce();
<ide> expect(completeOutstandingRequestSpy).toHaveBeenCalledOnce();
<del>
<del> expect($exceptionHandler.errors).toEqual([jasmine.any(Error)]);
<del> $exceptionHandler.errors = [];
<del> })
<add> }
<ide> );
<ide>
<ide>
<ide> it('should increment/decrement `outstandingRequestCount` on error in `transformResponse`',
<del> inject(function($exceptionHandler) {
<add> function() {
<ide> expect(incOutstandingRequestCountSpy).not.toHaveBeenCalled();
<ide> expect(completeOutstandingRequestSpy).not.toHaveBeenCalled();
<ide>
<ide> describe('$http', function() {
<ide>
<ide> expect(incOutstandingRequestCountSpy).toHaveBeenCalledOnce();
<ide> expect(completeOutstandingRequestSpy).toHaveBeenCalledOnce();
<del>
<del> expect($exceptionHandler.errors).toEqual([jasmine.any(Error)]);
<del> $exceptionHandler.errors = [];
<del> })
<add> }
<ide> );
<ide> });
<ide>
<ide><path>test/ng/qSpec.js
<ide> describe('q', function() {
<ide>
<ide>
<ide> describe('in then', function() {
<del> it('should log exceptions thrown in a success callback and reject the derived promise',
<add> it('should NOT log exceptions thrown in a success callback but reject the derived promise',
<ide> function() {
<ide> var success1 = success(1, 'oops', true);
<ide> promise.then(success1).then(success(2), error(2)).catch(noop);
<ide> syncResolve(deferred, 'done');
<ide> expect(logStr()).toBe('success1(done)->throw(oops); error2(oops)->reject(oops)');
<del> expect(mockExceptionLogger.log).toEqual(['oops']);
<add> expect(mockExceptionLogger.log).toEqual([]);
<ide> });
<ide>
<ide>
<ide> describe('q', function() {
<ide> });
<ide>
<ide>
<del> it('should log exceptions thrown in a errback and reject the derived promise', function() {
<add> it('should NOT log exceptions thrown in an errback but reject the derived promise',
<add> function() {
<ide> var error1 = error(1, 'oops', true);
<ide> promise.then(null, error1).then(success(2), error(2)).catch(noop);
<ide> syncReject(deferred, 'nope');
<ide> expect(logStr()).toBe('error1(nope)->throw(oops); error2(oops)->reject(oops)');
<del> expect(mockExceptionLogger.log).toEqual(['oops']);
<add> expect(mockExceptionLogger.log).toEqual([]);
<ide> });
<ide>
<ide>
<ide> describe('q', function() {
<ide>
<ide>
<ide> describe('in when', function() {
<del> it('should log exceptions thrown in a success callback and reject the derived promise',
<add> it('should NOT log exceptions thrown in a success callback but reject the derived promise',
<ide> function() {
<ide> var success1 = success(1, 'oops', true);
<ide> q.when('hi', success1, error()).then(success(), error(2)).catch(noop);
<ide> mockNextTick.flush();
<ide> expect(logStr()).toBe('success1(hi)->throw(oops); error2(oops)->reject(oops)');
<del> expect(mockExceptionLogger.log).toEqual(['oops']);
<add> expect(mockExceptionLogger.log).toEqual([]);
<ide> });
<ide>
<ide>
<ide> describe('q', function() {
<ide> });
<ide>
<ide>
<del> it('should log exceptions thrown in a errback and reject the derived promise', function() {
<add> it('should NOT log exceptions thrown in a errback but reject the derived promise', function() {
<ide> var error1 = error(1, 'oops', true);
<ide> q.when(q.reject('sorry'), success(), error1).then(success(), error(2)).catch(noop);
<ide> mockNextTick.flush();
<ide> expect(logStr()).toBe('error1(sorry)->throw(oops); error2(oops)->reject(oops)');
<del> expect(mockExceptionLogger.log).toEqual(['oops']);
<add> expect(mockExceptionLogger.log).toEqual([]);
<ide> });
<ide>
<ide>
<ide> describe('q', function() {
<ide> expect(exceptionHandlerStr()).toBe('');
<ide> });
<ide> });
<del>
<del>
<del> describe('when exceptionHandler rethrows exceptions, ', function() {
<del> var originalLogExceptions, deferred, errorSpy, exceptionExceptionSpy;
<del>
<del> beforeEach(function() {
<del> // Turn off exception logging for these particular tests
<del> originalLogExceptions = mockNextTick.logExceptions;
<del> mockNextTick.logExceptions = false;
<del>
<del> // Set up spies
<del> exceptionExceptionSpy = jasmine.createSpy('rethrowExceptionHandler')
<del> .and.callFake(function rethrowExceptionHandler(e) {
<del> throw e;
<del> });
<del> errorSpy = jasmine.createSpy('errorSpy');
<del>
<del>
<del> q = qFactory(mockNextTick.nextTick, exceptionExceptionSpy);
<del> deferred = q.defer();
<del> });
<del>
<del>
<del> afterEach(function() {
<del> // Restore the original exception logging mode
<del> mockNextTick.logExceptions = originalLogExceptions;
<del> });
<del>
<del>
<del> it('should still reject the promise, when exception is thrown in success handler, even if exceptionHandler rethrows', function() {
<del> deferred.promise.then(function() { throw new Error('reject'); }).then(null, errorSpy);
<del> deferred.resolve('resolve');
<del> mockNextTick.flush();
<del> expect(exceptionExceptionSpy).toHaveBeenCalled();
<del> expect(errorSpy).toHaveBeenCalled();
<del> });
<del>
<del>
<del> it('should still reject the promise, when exception is thrown in error handler, even if exceptionHandler rethrows', function() {
<del> deferred.promise.then(null, function() { throw new Error('reject again'); }).then(null, errorSpy);
<del> deferred.reject('reject');
<del> mockNextTick.flush();
<del> expect(exceptionExceptionSpy).toHaveBeenCalled();
<del> expect(errorSpy).toHaveBeenCalled();
<del> });
<del> });
<ide> });
<ide><path>test/ng/templateRequestSpec.js
<ide> describe('$templateRequest', function() {
<ide> expect($templateCache.get('tpl.html')).toBe('matias');
<ide> }));
<ide>
<del> it('should throw an error when the template is not found',
<del> inject(function($rootScope, $templateRequest, $httpBackend) {
<del>
<del> $httpBackend.expectGET('tpl.html').respond(404, '', {}, 'Not found');
<del>
<del> $templateRequest('tpl.html');
<del>
<del> $rootScope.$digest();
<del>
<del> expect(function() {
<del> $rootScope.$digest();
<del> $httpBackend.flush();
<del> }).toThrowMinErr('$compile', 'tpload', 'Failed to load template: tpl.html (HTTP status: 404 Not found)');
<del> }));
<del>
<del> it('should not throw when the template is not found and ignoreRequestError is true',
<del> inject(function($rootScope, $templateRequest, $httpBackend) {
<add> it('should call `$exceptionHandler` on request error', function() {
<add> module(function($exceptionHandlerProvider) {
<add> $exceptionHandlerProvider.mode('log');
<add> });
<ide>
<del> $httpBackend.expectGET('tpl.html').respond(404);
<add> inject(function($exceptionHandler, $httpBackend, $templateRequest) {
<add> $httpBackend.expectGET('tpl.html').respond(404, '', {}, 'Not Found');
<ide>
<ide> var err;
<del> $templateRequest('tpl.html', true).catch(function(reason) { err = reason; });
<del>
<del> $rootScope.$digest();
<add> $templateRequest('tpl.html').catch(function(reason) { err = reason; });
<ide> $httpBackend.flush();
<ide>
<del> expect(err.status).toBe(404);
<del> }));
<add> expect(err.message).toMatch(new RegExp(
<add> '^\\[\\$compile:tpload] Failed to load template: tpl\\.html ' +
<add> '\\(HTTP status: 404 Not Found\\)'));
<add> expect($exceptionHandler.errors[0].message).toMatch(new RegExp(
<add> '^\\[\\$compile:tpload] Failed to load template: tpl\\.html ' +
<add> '\\(HTTP status: 404 Not Found\\)'));
<add> });
<add> });
<ide>
<del> it('should not throw an error when the template is empty',
<del> inject(function($rootScope, $templateRequest, $httpBackend) {
<add> it('should not call `$exceptionHandler` on request error when `ignoreRequestError` is true',
<add> function() {
<add> module(function($exceptionHandlerProvider) {
<add> $exceptionHandlerProvider.mode('log');
<add> });
<ide>
<del> $httpBackend.expectGET('tpl.html').respond('');
<add> inject(function($exceptionHandler, $httpBackend, $templateRequest) {
<add> $httpBackend.expectGET('tpl.html').respond(404);
<ide>
<del> $templateRequest('tpl.html');
<add> var err;
<add> $templateRequest('tpl.html', true).catch(function(reason) { err = reason; });
<add> $httpBackend.flush();
<ide>
<del> $rootScope.$digest();
<add> expect(err.status).toBe(404);
<add> expect($exceptionHandler.errors).toEqual([]);
<add> });
<add> }
<add> );
<ide>
<del> expect(function() {
<add> it('should not call `$exceptionHandler` when the template is empty',
<add> inject(function($exceptionHandler, $httpBackend, $rootScope, $templateRequest) {
<add> $httpBackend.expectGET('tpl.html').respond('');
<add>
<add> var onError = jasmine.createSpy('onError');
<add> $templateRequest('tpl.html').catch(onError);
<ide> $rootScope.$digest();
<ide> $httpBackend.flush();
<del> }).not.toThrow();
<del> }));
<add>
<add> expect(onError).not.toHaveBeenCalled();
<add> expect($exceptionHandler.errors).toEqual([]);
<add> })
<add> );
<ide>
<ide> it('should accept empty templates and refuse null or undefined templates in cache',
<ide> inject(function($rootScope, $templateRequest, $templateCache, $sce) {
<ide><path>test/ngRoute/routeSpec.js
<ide> describe('$route', function() {
<ide> });
<ide>
<ide> inject(function($route, $location, $rootScope) {
<add> var onError = jasmine.createSpy('onError');
<add> var onSuccess = jasmine.createSpy('onSuccess');
<add>
<add> $rootScope.$on('$routeChangeError', onError);
<add> $rootScope.$on('$routeChangeSuccess', onSuccess);
<add>
<ide> $location.path('/foo');
<del> expect(function() {
<del> $rootScope.$digest();
<del> }).toThrowMinErr('$sce', 'insecurl', 'Blocked loading resource from url not allowed by ' +
<del> '$sceDelegate policy. URL: http://example.com/foo.html');
<add> $rootScope.$digest();
<add>
<add> expect(onSuccess).not.toHaveBeenCalled();
<add> expect(onError).toHaveBeenCalled();
<add> expect(onError.calls.mostRecent().args[3].message).toMatch(new RegExp(
<add> '^\\[\\$sce:insecurl] Blocked loading resource from url not allowed by ' +
<add> '\\$sceDelegate policy\\. URL: http:\\/\\/example\\.com\\/foo\\.html'));
<ide> });
<ide> });
<ide>
<ide> describe('$route', function() {
<ide>
<ide> it('should catch local factory errors', function() {
<ide> var myError = new Error('MyError');
<del> module(function($routeProvider, $exceptionHandlerProvider) {
<del> $exceptionHandlerProvider.mode('log');
<add> module(function($routeProvider) {
<ide> $routeProvider.when('/locals', {
<ide> resolve: {
<ide> a: function($q) {
<ide> describe('$route', function() {
<ide> });
<ide> });
<ide>
<del> inject(function($location, $route, $rootScope, $exceptionHandler) {
<add> inject(function($location, $route, $rootScope) {
<add> spyOn($rootScope, '$broadcast').and.callThrough();
<add>
<ide> $location.path('/locals');
<ide> $rootScope.$digest();
<del> expect($exceptionHandler.errors).toEqual([myError]);
<add>
<add> expect($rootScope.$broadcast).toHaveBeenCalledWith(
<add> '$routeChangeError', jasmine.any(Object), undefined, myError);
<ide> });
<ide> });
<ide> });
<ide> describe('$route', function() {
<ide> it('should broadcast `$routeChangeError` when redirectTo throws', function() {
<ide> var error = new Error('Test');
<ide>
<del> module(function($exceptionHandlerProvider, $routeProvider) {
<del> $exceptionHandlerProvider.mode('log');
<add> module(function($routeProvider) {
<ide> $routeProvider.when('/foo', {redirectTo: function() { throw error; }});
<ide> });
<ide>
<ide> describe('$route', function() {
<ide> var lastCallArgs = $rootScope.$broadcast.calls.mostRecent().args;
<ide> expect(lastCallArgs[0]).toBe('$routeChangeError');
<ide> expect(lastCallArgs[3]).toBe(error);
<del> expect($exceptionHandler.errors[0]).toBe(error);
<ide> });
<ide> });
<ide> | 9 |
Python | Python | call mapped_dependants only on the original task | 30ac99773c8577718c87703a310ffc454316cfce | <ide><path>airflow/jobs/backfill_job.py
<ide> class _DagRunTaskStatus:
<ide>
<ide> def __init__(
<ide> self,
<del> dag,
<add> dag: DAG,
<ide> start_date=None,
<ide> end_date=None,
<ide> mark_success=False,
<ide> def _manage_executor_state(
<ide> :param running: dict of key, task to verify
<ide> :return: An iterable of expanded TaskInstance per MappedTask
<ide> """
<del> from airflow.models.mappedoperator import MappedOperator
<del>
<ide> executor = self.executor
<ide>
<ide> # TODO: query all instead of refresh from db
<ide> def _manage_executor_state(
<ide> ti.handle_failure_with_callback(error=msg)
<ide> continue
<ide> if ti.state not in self.STATES_COUNT_AS_RUNNING:
<del> for node in ti.task.mapped_dependants():
<del> assert isinstance(node, MappedOperator)
<add> # Don't use ti.task; if this task is mapped, that attribute
<add> # would hold the unmapped task. We need to original task here.
<add> for node in self.dag.get_task(ti.task_id, include_subdags=True).mapped_dependants():
<ide> yield node, ti.run_id, node.expand_mapped_task(ti.run_id, session=session)
<ide>
<ide> @provide_session
<ide> def tabulate_ti_keys_set(ti_keys: Iterable[TaskInstanceKey]) -> str:
<ide>
<ide> return err
<ide>
<del> def _get_dag_with_subdags(self):
<add> def _get_dag_with_subdags(self) -> List[DAG]:
<ide> return [self.dag] + self.dag.subdags
<ide>
<ide> @provide_session
<ide><path>airflow/models/taskmixin.py
<ide> from logging import Logger
<ide>
<ide> from airflow.models.dag import DAG
<add> from airflow.models.mappedoperator import MappedOperator
<ide> from airflow.utils.edgemodifier import EdgeModifier
<ide> from airflow.utils.task_group import TaskGroup
<ide>
<ide> def serialize_for_task_group(self) -> Tuple[DagAttributeTypes, Any]:
<ide> """This is used by SerializedTaskGroup to serialize a task group's content."""
<ide> raise NotImplementedError()
<ide>
<del> def mapped_dependants(self) -> Iterator["DAGNode"]:
<add> def mapped_dependants(self) -> Iterator["MappedOperator"]:
<ide> """Return any mapped nodes that are direct dependencies of the current task
<ide>
<ide> For now, this walks the entire DAG to find mapped nodes that has this
<ide><path>tests/dags/test_mapped_classic.py
<ide> def consumer(value):
<ide>
<ide> with DAG(dag_id='test_mapped_classic', start_date=days_ago(2)) as dag:
<ide> PythonOperator.partial(task_id='consumer', python_callable=consumer).expand(op_args=make_arg_lists())
<add> PythonOperator.partial(task_id='consumer_literal', python_callable=consumer).expand(
<add> op_args=[[1], [2], [3]],
<add> )
<ide><path>tests/dags/test_mapped_taskflow.py
<ide> def consumer(value):
<ide> print(repr(value))
<ide>
<ide> consumer.expand(value=make_list())
<add> consumer.expand(value=[1, 2, 3]) | 4 |
PHP | PHP | drop index before columns | ca74f37573b8c223c010414fe1910d4596544db2 | <ide><path>src/Illuminate/Database/Schema/Blueprint.php
<ide> public function dropRememberToken()
<ide> */
<ide> public function dropMorphs($name, $indexName = null)
<ide> {
<del> $this->dropColumn("{$name}_type", "{$name}_id");
<del>
<ide> $indexName = $indexName ?: $this->createIndexName('index', ["{$name}_type", "{$name}_id"]);
<ide>
<ide> $this->dropIndex($indexName);
<add>
<add> $this->dropColumn("{$name}_type", "{$name}_id");
<ide> }
<ide>
<ide> /**
<ide><path>tests/Database/DatabaseMySqlSchemaGrammarTest.php
<ide> public function testDropMorphs()
<ide> $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar());
<ide>
<ide> $this->assertCount(2, $statements);
<del> $this->assertEquals('alter table `photos` drop `imageable_type`, drop `imageable_id`', $statements[0]);
<del> $this->assertEquals('alter table `photos` drop index `photos_imageable_type_imageable_id_index`', $statements[1]);
<add> $this->assertEquals('alter table `photos` drop index `photos_imageable_type_imageable_id_index`', $statements[0]);
<add> $this->assertEquals('alter table `photos` drop `imageable_type`, drop `imageable_id`', $statements[1]);
<ide> }
<ide>
<ide> public function testRenameTable()
<ide><path>tests/Database/DatabasePostgresSchemaGrammarTest.php
<ide> public function testDropMorphs()
<ide> $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar());
<ide>
<ide> $this->assertCount(2, $statements);
<del> $this->assertEquals('alter table "photos" drop column "imageable_type", drop column "imageable_id"', $statements[0]);
<del> $this->assertEquals('drop index "photos_imageable_type_imageable_id_index"', $statements[1]);
<add> $this->assertEquals('drop index "photos_imageable_type_imageable_id_index"', $statements[0]);
<add> $this->assertEquals('alter table "photos" drop column "imageable_type", drop column "imageable_id"', $statements[1]);
<ide> }
<ide>
<ide> public function testRenameTable()
<ide><path>tests/Database/DatabaseSqlServerSchemaGrammarTest.php
<ide> public function testDropMorphs()
<ide> $statements = $blueprint->toSql($this->getConnection(), $this->getGrammar());
<ide>
<ide> $this->assertCount(2, $statements);
<del> $this->assertEquals('alter table "photos" drop column "imageable_type", "imageable_id"', $statements[0]);
<del> $this->assertEquals('drop index "photos_imageable_type_imageable_id_index" on "photos"', $statements[1]);
<add> $this->assertEquals('drop index "photos_imageable_type_imageable_id_index" on "photos"', $statements[0]);
<add> $this->assertEquals('alter table "photos" drop column "imageable_type", "imageable_id"', $statements[1]);
<ide> }
<ide>
<ide> public function testRenameTable() | 4 |
Javascript | Javascript | remove dead code | 4536128e7c9218f478354292f30720b39f6f4060 | <ide><path>lib/_stream_readable.js
<ide> function copyFromBuffer(n, list) {
<ide> function endReadable(stream) {
<ide> var state = stream._readableState;
<ide>
<del> // If we get here before consuming all the bytes, then that is a
<del> // bug in node. Should never happen.
<del> if (state.length > 0)
<del> throw new Error('"endReadable()" called on non-empty stream');
<del>
<ide> if (!state.endEmitted) {
<ide> state.ended = true;
<ide> process.nextTick(endReadableNT, state, stream); | 1 |
Go | Go | use httperror in a separate test | df697b4318f533556a2e9351d5995b2d1991ae5a | <ide><path>api_test.go
<ide> import (
<ide> "bufio"
<ide> "bytes"
<ide> "encoding/json"
<add> "fmt"
<ide> "github.com/dotcloud/docker/utils"
<ide> "io"
<ide> "net"
<ide> "net/http"
<ide> "net/http/httptest"
<ide> "os"
<ide> "path"
<add> "strings"
<ide> "testing"
<ide> "time"
<ide> )
<ide> func TestGetBoolParam(t *testing.T) {
<ide> }
<ide> }
<ide>
<add>func TesthttpError(t *testing.T) {
<add> r := httptest.NewRecorder()
<add>
<add> httpError(r, fmt.Errorf("No such method"))
<add> if r.Code != http.StatusNotFound {
<add> t.Fatalf("Expected %d, got %d", http.StatusNotFound, r.Code)
<add> }
<add>
<add> httpError(r, fmt.Errorf("This accound hasn't been activated"))
<add> if r.Code != http.StatusForbidden {
<add> t.Fatalf("Expected %d, got %d", http.StatusForbidden, r.Code)
<add> }
<add>
<add> httpError(r, fmt.Errorf("Some error"))
<add> if r.Code != http.StatusInternalServerError {
<add> t.Fatalf("Expected %d, got %d", http.StatusInternalServerError, r.Code)
<add> }
<add>}
<add>
<ide> func TestGetVersion(t *testing.T) {
<ide> var err error
<ide> runtime := mkRuntime(t)
<ide> func TestGetImagesJSON(t *testing.T) {
<ide> t.Fatalf("Error expected, received none")
<ide> }
<ide>
<del> httpError(r4, err)
<add> if !strings.HasPrefix(err.Error(), "Bad parameter") {
<add> t.Fatalf("Error should starts with \"Bad parameter\"")
<add> }
<add> http.Error(r4, err.Error(), http.StatusBadRequest)
<add>
<ide> if r4.Code != http.StatusBadRequest {
<ide> t.Fatalf("%d Bad Request expected, received %d\n", http.StatusBadRequest, r4.Code)
<ide> } | 1 |
PHP | PHP | fix cs errors | 1d488cca3d4fdf309b1d0f80f5e049700b40c385 | <ide><path>src/Controller/ErrorController.php
<ide> */
<ide> namespace Cake\Controller;
<ide>
<del>use Cake\Routing\Router;
<ide> use Cake\Event\Event;
<add>use Cake\Routing\Router;
<ide>
<ide> /**
<ide> * Error Handling Controller
<ide><path>tests/TestCase/Controller/Component/RequestHandlerComponentTest.php
<ide> public function testNoViewClassExtension()
<ide> $event = new Event('Controller.startup', $this->Controller);
<ide> $this->RequestHandler->initialize([]);
<ide> $this->RequestHandler->startup($event);
<del> $this->Controller->eventManager()->on('Controller.beforeRender', function() {
<add> $this->Controller->eventManager()->on('Controller.beforeRender', function () {
<ide> return $this->Controller->response;
<ide> });
<ide> $this->Controller->render();
<ide> public function testRespondAsWithAttachment()
<ide> */
<ide> public function testRenderAsCalledTwice()
<ide> {
<del> $this->Controller->eventManager()->on('Controller.beforeRender', function(\Cake\Event\Event $e) {
<add> $this->Controller->eventManager()->on('Controller.beforeRender', function (\Cake\Event\Event $e) {
<ide> return $e->subject()->response;
<ide> });
<ide> $this->Controller->render();
<ide><path>tests/TestCase/Controller/ControllerTest.php
<ide> public function testViewPathConventions()
<ide> ]);
<ide> $response = $this->getMock('Cake\Network\Response');
<ide> $Controller = new \TestApp\Controller\Admin\PostsController($request, $response);
<del> $Controller->eventManager()->on('Controller.beforeRender', function(\Cake\Event\Event $e) {
<add> $Controller->eventManager()->on('Controller.beforeRender', function (Event $e) {
<ide> return $e->subject()->response;
<ide> });
<ide> $Controller->render();
<ide> public function testViewPathConventions()
<ide> ]);
<ide> $response = $this->getMock('Cake\Network\Response');
<ide> $Controller = new \TestApp\Controller\Admin\PostsController($request, $response);
<del> $Controller->eventManager()->on('Controller.beforeRender', function(\Cake\Event\Event $e) {
<add> $Controller->eventManager()->on('Controller.beforeRender', function (Event $e) {
<ide> return $e->subject()->response;
<ide> });
<ide> $Controller->render();
<ide> $this->assertEquals('Admin' . DS . 'Super' . DS . 'Posts', $Controller->getView()->viewPath);
<ide>
<ide> $request = new Request('pages/home');
<ide> $Controller = new \TestApp\Controller\PagesController($request, $response);
<del> $Controller->eventManager()->on('Controller.beforeRender', function(\Cake\Event\Event $e) {
<add> $Controller->eventManager()->on('Controller.beforeRender', function (Event $e) {
<ide> return $e->subject()->response;
<ide> });
<ide> $Controller->render();
<ide><path>tests/TestCase/Error/ExceptionRendererTest.php
<ide> public function testMissingLayoutPathRenderSafe()
<ide>
<ide> $ExceptionRenderer->controller = $this->getMock('Cake\Controller\Controller', ['render']);
<ide> $ExceptionRenderer->controller->helpers = ['Fail', 'Boom'];
<del> $ExceptionRenderer->controller->eventManager()->on('Controller.beforeRender', function($event) {
<add> $ExceptionRenderer->controller->eventManager()->on('Controller.beforeRender', function (Event $event) {
<ide> $event->subject()->getView()->layoutPath = 'boom';
<ide> });
<ide> $ExceptionRenderer->controller->request = new Request; | 4 |
Javascript | Javascript | move docsearch styles before headcomponents | 1232ccbc0ff6c5d9e80de65cefb352c404973e2f | <ide><path>website/src/html.js
<ide> export default function HTML(props) {
<ide> name="viewport"
<ide> content="width=device-width, initial-scale=1, shrink-to-fit=no"
<ide> />
<del> {props.headComponents}
<ide> <link
<ide> rel="stylesheet"
<ide> href="https://cdn.jsdelivr.net/npm/docsearch.js@2/dist/cdn/docsearch.min.css"
<ide> />
<add> {props.headComponents}
<ide> </head>
<ide> <body {...props.bodyAttributes}>
<ide> {props.preBodyComponents} | 1 |
Javascript | Javascript | add empty tagname to avoid unnecessary div-tag | 6069bb4e65e1272bb7d5d46307387c73ddb89775 | <ide><path>packages/ember-htmlbars/tests/helpers/each_test.js
<ide> QUnit.test("it supports itemController", function() {
<ide> strictEqual(view.childViews[0].get('_arrayController.target'), parentController, "the target property of the child controllers are set correctly");
<ide> });
<ide>
<add>QUnit.test("itemController should not affect the DOM structure", function() {
<add> var Controller = EmberController.extend({
<add> name: computed.alias('model.name')
<add> });
<add>
<add> runDestroy(view);
<add>
<add> registry.register('controller:array', ArrayController.extend());
<add>
<add> view = EmberView.create({
<add> container: container,
<add> template: templateFor(
<add> '<div id="a">{{#each view.people itemController="person" as |person|}}{{person.name}}{{/each}}</div>' +
<add> '<div id="b">{{#each view.people as |person|}}{{person.name}}{{/each}}</div>'
<add> ),
<add> people: people
<add> });
<add>
<add> registry.register('controller:person', Controller);
<add>
<add> runAppend(view);
<add>
<add> equal(view.$('#a').html(), view.$('#b').html());
<add>});
<add>
<ide> QUnit.test("itemController specified in template gets a parentController property", function() {
<ide> // using an ObjectController for this test to verify that parentController does accidentally get set
<ide> // on the proxied model.
<ide><path>packages/ember-views/lib/views/legacy_each_view.js
<ide> import { CONTAINER_MAP } from "ember-views/views/collection_view";
<ide>
<ide> export default View.extend({
<ide> template: legacyEachTemplate,
<add> tagName: '',
<ide>
<ide> _arrayController: computed(function() {
<ide> var itemController = this.getAttr('itemController'); | 2 |
Ruby | Ruby | remove unused require | 064b7f6082773f88f216c3aaab38645a364a1395 | <ide><path>actionview/lib/action_view/helpers/sanitize_helper.rb
<ide> # frozen_string_literal: true
<ide>
<ide> require "rails-html-sanitizer"
<del>require "active_support/deprecation"
<ide>
<ide> module ActionView
<ide> # = Action View Sanitize Helpers | 1 |
Ruby | Ruby | use parser to parse args | 92263f51e97daf1005836069625da2ced2e304a6 | <ide><path>Library/Homebrew/dev-cmd/update-test.rb
<ide> #: If `--keep-tmp` is passed, retain the temporary directory containing
<ide> #: the new repository clone.
<ide>
<add>require "cli_parser"
<add>
<ide> module Homebrew
<ide> module_function
<ide>
<ide> def update_test
<add> args = Homebrew::CLI::Parser.parse do
<add> switch "--to-tag"
<add> switch "--keep-tmp"
<add> flag "--commit", required: true
<add> flag "--before", required: true
<add> end
<add>
<ide> ENV["HOMEBREW_UPDATE_TEST"] = "1"
<ide>
<del> if ARGV.include?("--to-tag")
<add> if args.to_tag?
<ide> ENV["HOMEBREW_UPDATE_TO_TAG"] = "1"
<ide> branch = "stable"
<ide> else
<ide> branch = "master"
<ide> end
<ide>
<ide> cd HOMEBREW_REPOSITORY
<del> start_commit = if commit = ARGV.value("commit")
<add> start_commit = if commit = args.commit
<ide> commit
<del> elsif date = ARGV.value("before")
<add> elsif date = args.before
<ide> Utils.popen_read("git", "rev-list", "-n1", "--before=#{date}", "origin/master").chomp
<del> elsif ARGV.include?("--to-tag")
<add> elsif args.to_tag?
<ide> tags = Utils.popen_read("git", "tag", "--list", "--sort=-version:refname")
<ide> previous_tag = tags.lines[1]
<ide> previous_tag ||= begin
<ide> def update_test
<ide> puts "End commit: #{end_commit}"
<ide>
<ide> mktemp("update-test") do |staging|
<del> staging.retain! if ARGV.keep_tmp?
<add> staging.retain! if args.keep_tmp?
<ide> curdir = Pathname.new(Dir.pwd)
<ide>
<ide> oh1 "Setup test environment..." | 1 |
Javascript | Javascript | fix process.title expectation | 2306acfd944f798aec2668cc2190b65ac1516e78 | <ide><path>test/debugger/test-debugger-repl.js
<ide> 'use strict';
<del>require('../common');
<add>const common = require('../common');
<ide> const repl = require('./helper-debugger-repl.js');
<ide>
<ide> repl.startDebugger('breakpoints.js');
<ide> addTest('c', [
<ide>
<ide> // Execute
<ide> addTest('exec process.title', [
<del> /node/
<add> common.isFreeBSD || common.isOSX || common.isLinux ? /node/ : ''
<ide> ]);
<ide>
<ide> // Execute | 1 |
PHP | PHP | add language entry for starts_with rule | 78cb2685aade6416db81222f6f9b09edf9cdbb9c | <ide><path>resources/lang/en/validation.php
<ide> 'string' => 'The :attribute must be :size characters.',
<ide> 'array' => 'The :attribute must contain :size items.',
<ide> ],
<add> 'starts_with' => 'The :attribute must start with one of the following: :values',
<ide> 'string' => 'The :attribute must be a string.',
<ide> 'timezone' => 'The :attribute must be a valid zone.',
<ide> 'unique' => 'The :attribute has already been taken.', | 1 |
Python | Python | fix adversarial training with recent shape changes | e8dd2bf3dd6d8290cd785bf52310a67d3c7d5cc0 | <ide><path>research/adversarial_text/adversarial_losses.py
<ide> def virtual_adversarial_loss(logits, embedded, inputs,
<ide> between the new logits and the original logits.
<ide>
<ide> Args:
<del> logits: 2-D float Tensor, [num_timesteps*batch_size, m], where m=1 if
<add> logits: 3-D float Tensor, [batch_size, num_timesteps, m], where m=1 if
<ide> num_classes=2, otherwise m=num_classes.
<ide> embedded: 3-D float Tensor, [batch_size, num_timesteps, embedding_dim].
<ide> inputs: VatxtInput.
<ide> def virtual_adversarial_loss(logits, embedded, inputs,
<ide> # Only care about the KL divergence on the final timestep.
<ide> weights = inputs.eos_weights
<ide> assert weights is not None
<add> if FLAGS.single_label:
<add> indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1)
<add> weights = tf.expand_dims(tf.gather_nd(inputs.eos_weights, indices), 1)
<ide>
<ide> # Initialize perturbation with random noise.
<ide> # shape(embedded) = (batch_size, num_timesteps, embedding_dim)
<ide> def virtual_adversarial_loss(logits, embedded, inputs,
<ide> for _ in xrange(FLAGS.num_power_iteration):
<ide> d = _scale_l2(
<ide> _mask_by_length(d, inputs.length), FLAGS.small_constant_for_finite_diff)
<add>
<ide> d_logits = logits_from_embedding_fn(embedded + d)
<ide> kl = _kl_divergence_with_logits(logits, d_logits, weights)
<ide> d, = tf.gradients(
<ide> def virtual_adversarial_loss_bidir(logits, embedded, inputs,
<ide> logits = tf.stop_gradient(logits)
<ide> f_inputs, _ = inputs
<ide> weights = f_inputs.eos_weights
<add> if FLAGS.single_label:
<add> indices = tf.stack([tf.range(FLAGS.batch_size), f_inputs.length - 1], 1)
<add> weights = tf.expand_dims(tf.gather_nd(f_inputs.eos_weights, indices), 1)
<ide> assert weights is not None
<ide>
<ide> perturbs = [
<ide> def _kl_divergence_with_logits(q_logits, p_logits, weights):
<ide>
<ide> Args:
<ide> q_logits: logits for 1st argument of KL divergence shape
<del> [num_timesteps * batch_size, num_classes] if num_classes > 2, and
<del> [num_timesteps * batch_size] if num_classes == 2.
<add> [batch_size, num_timesteps, num_classes] if num_classes > 2, and
<add> [batch_size, num_timesteps] if num_classes == 2.
<ide> p_logits: logits for 2nd argument of KL divergence with same shape q_logits.
<del> weights: 1-D float tensor with shape [num_timesteps * batch_size].
<add> weights: 1-D float tensor with shape [batch_size, num_timesteps].
<ide> Elements should be 1.0 only on end of sequences
<ide>
<ide> Returns:
<ide> def _kl_divergence_with_logits(q_logits, p_logits, weights):
<ide> q = tf.nn.sigmoid(q_logits)
<ide> kl = (-tf.nn.sigmoid_cross_entropy_with_logits(logits=q_logits, labels=q) +
<ide> tf.nn.sigmoid_cross_entropy_with_logits(logits=p_logits, labels=q))
<del> kl = tf.squeeze(kl)
<add> kl = tf.squeeze(kl, 2)
<ide>
<ide> # For softmax regression
<ide> else:
<ide> q = tf.nn.softmax(q_logits)
<ide> kl = tf.reduce_sum(
<del> q * (tf.nn.log_softmax(q_logits) - tf.nn.log_softmax(p_logits)), 1)
<add> q * (tf.nn.log_softmax(q_logits) - tf.nn.log_softmax(p_logits)), -1)
<ide>
<ide> num_labels = tf.reduce_sum(weights)
<ide> num_labels = tf.where(tf.equal(num_labels, 0.), 1., num_labels)
<ide>
<del> kl.get_shape().assert_has_rank(1)
<del> weights.get_shape().assert_has_rank(1)
<add> kl.get_shape().assert_has_rank(2)
<add> weights.get_shape().assert_has_rank(2)
<add>
<ide> loss = tf.identity(tf.reduce_sum(weights * kl) / num_labels, name='kl')
<ide> return loss
<ide><path>research/adversarial_text/graphs.py
<ide> def classifier_graph(self):
<ide>
<ide> if FLAGS.single_label:
<ide> indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1)
<del> labels = tf.gather_nd(inputs.labels, indices)
<del> weights = tf.gather_nd(inputs.weights, indices)
<add> labels = tf.expand_dims(tf.gather_nd(inputs.labels, indices), 1)
<add> weights = tf.expand_dims(tf.gather_nd(inputs.weights, indices), 1)
<ide> else:
<ide> labels = inputs.labels
<ide> weights = inputs.weights
<ide> def eval_graph(self, dataset='test'):
<ide>
<ide> if FLAGS.single_label:
<ide> indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1)
<del> labels = tf.gather_nd(inputs.labels, indices)
<del> weights = tf.gather_nd(inputs.weights, indices)
<add> labels = tf.expand_dims(tf.gather_nd(inputs.labels, indices), 1)
<add> weights = tf.expand_dims(tf.gather_nd(inputs.weights, indices), 1)
<ide> else:
<ide> labels = inputs.labels
<ide> weights = inputs.weights
<ide> def cl_loss_from_embedding(self,
<ide> inputs.length)
<ide> if FLAGS.single_label:
<ide> indices = tf.stack([tf.range(FLAGS.batch_size), inputs.length - 1], 1)
<del> lstm_out = tf.gather_nd(lstm_out, indices)
<del> labels = tf.gather_nd(inputs.labels, indices)
<del> weights = tf.gather_nd(inputs.weights, indices)
<add> lstm_out = tf.expand_dims(tf.gather_nd(lstm_out, indices), 1)
<add> labels = tf.expand_dims(tf.gather_nd(inputs.labels, indices), 1)
<add> weights = tf.expand_dims(tf.gather_nd(inputs.weights, indices), 1)
<ide> else:
<ide> labels = inputs.labels
<ide> weights = inputs.weights
<ide><path>research/adversarial_text/layers.py
<ide> def classification_loss(logits, labels, weights):
<ide> # Logistic loss
<ide> if inner_dim == 1:
<ide> loss = tf.nn.sigmoid_cross_entropy_with_logits(
<del> logits=tf.squeeze(logits), labels=tf.cast(labels, tf.float32))
<add> logits=tf.squeeze(logits, -1), labels=tf.cast(labels, tf.float32))
<ide> # Softmax loss
<ide> else:
<ide> loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
<ide> def predictions(logits):
<ide> with tf.name_scope('predictions'):
<ide> # For binary classification
<ide> if inner_dim == 1:
<del> pred = tf.cast(tf.greater(tf.squeeze(logits), 0.5), tf.int64)
<add> pred = tf.cast(tf.greater(tf.squeeze(logits, -1), 0.5), tf.int64)
<ide> # For multi-class classification
<ide> else:
<ide> pred = tf.argmax(logits, 1) | 3 |
Text | Text | standardize git remote names | 91aff68a2548382e1b8b68677321e95d8124f061 | <ide><path>guides/source/contributing_to_ruby_on_rails.md
<ide> Navigate to the Rails [GitHub repository](https://github.com/rails/rails) and pr
<ide> Add the new remote to your local repository on your local machine:
<ide>
<ide> ```bash
<del>$ git remote add mine https://github.com/<your user name>/rails.git
<add>$ git remote add fork https://github.com/<your user name>/rails.git
<ide> ```
<ide>
<del>Push to your remote:
<del>
<del>```bash
<del>$ git push mine my_new_branch
<del>```
<del>
<del>You might have cloned your forked repository into your machine and might want to add the original Rails repository as a remote instead, if that's the case here's what you have to do.
<del>
<del>In the directory you cloned your fork:
<add>You may have cloned your local repository from rails/rails or you may have cloned from your forked repository. To avoid ambigity the following git commands assume that you have made a "rails" remote that points to rails/rails.
<ide>
<ide> ```bash
<ide> $ git remote add rails https://github.com/rails/rails.git
<ide> Merge the new content:
<ide> ```bash
<ide> $ git checkout master
<ide> $ git rebase rails/master
<add>$ git checkout my_new_branch
<add>$ git rebase rails/master
<ide> ```
<ide>
<ide> Update your fork:
<ide>
<ide> ```bash
<del>$ git push origin master
<del>```
<del>
<del>If you want to update another branch:
<del>
<del>```bash
<del>$ git checkout branch_name
<del>$ git rebase rails/branch_name
<del>$ git push origin branch_name
<add>$ git push fork master
<add>$ git push fork my_new_branch
<ide> ```
<ide>
<del>
<ide> ### Issue a Pull Request
<ide>
<ide> Navigate to the Rails repository you just pushed to (e.g.
<ide> branches, squashing makes it easier to revert bad commits, and the git history
<ide> can be a bit easier to follow. Rails is a large project, and a bunch of
<ide> extraneous commits can add a lot of noise.
<ide>
<del>In order to do this, you'll need to have a git remote that points at the main
<del>Rails repository. This is useful anyway, but just in case you don't have it set
<del>up, make sure that you do this first:
<del>
<del>```bash
<del>$ git remote add upstream https://github.com/rails/rails.git
<del>```
<del>
<del>You can call this remote whatever you'd like, but if you don't use `upstream`,
<del>then change the name to your own in the instructions below.
<del>
<del>Given that your remote branch is called `my_pull_request`, then you can do the
<del>following:
<del>
<ide> ```bash
<del>$ git fetch upstream
<del>$ git checkout my_pull_request
<del>$ git rebase -i upstream/master
<add>$ git fetch rails
<add>$ git checkout my_new_branch
<add>$ git rebase -i rails/master
<ide>
<ide> < Choose 'squash' for all of your commits except the first one. >
<ide> < Edit the commit message to make sense, and describe all your changes. >
<ide>
<del>$ git push origin my_pull_request -f
<add>$ git push fork my_new_branch -f
<ide> ```
<ide>
<ide> You should be able to refresh the pull request on GitHub and see that it has
<ide> you can force push to your branch on GitHub as described earlier in
<ide> squashing commits section:
<ide>
<ide> ```bash
<del>$ git push origin my_pull_request -f
<add>$ git push fork my_new_branch -f
<ide> ```
<ide>
<ide> This will update the branch and pull request on GitHub with your new code. Do
<ide> note that using force push may result in commits being lost on the remote branch
<ide> If you want to add a fix to older versions of Ruby on Rails, you'll need to set up and switch to your own local tracking branch. Here is an example to switch to the 4-0-stable branch:
<ide>
<ide> ```bash
<del>$ git branch --track 4-0-stable origin/4-0-stable
<add>$ git branch --track 4-0-stable rails/4-0-stable
<ide> $ git checkout 4-0-stable
<ide> ```
<ide> | 1 |
PHP | PHP | setup additional configuration for slack client | d0d1596464794a37a81b894869b3c23047a61ee5 | <ide><path>src/Illuminate/Notifications/Channels/SlackWebhookChannel.php
<ide> protected function buildJsonPayload(SlackMessage $message)
<ide> 'channel' => data_get($message, 'channel'),
<ide> ]);
<ide>
<del> return [
<add> $payload = [
<ide> 'json' => array_merge([
<ide> 'text' => $message->content,
<ide> 'attachments' => $this->attachments($message),
<ide> ], $optionalFields),
<ide> ];
<add>
<add> // Add other configuration settings if available.
<add> if (! empty($message->options)) {
<add> $payload = array_merge($payload, $message->options);
<add> }
<add>
<add> return $payload;
<ide> }
<ide>
<ide> /**
<ide><path>src/Illuminate/Notifications/Messages/SlackMessage.php
<ide> class SlackMessage
<ide> */
<ide> public $attachments = [];
<ide>
<add> /**
<add> * Additional settings for Slack HTTP Client.
<add> *
<add> * @var array
<add> */
<add> public $options = [];
<add>
<ide> /**
<ide> * Indicate that the notification gives information about a successful operation.
<ide> *
<ide> public function color()
<ide> return '#F35A00';
<ide> }
<ide> }
<add>
<add> /**
<add> * Setup additional configuration for Slack HTTP Client.
<add> *
<add> * @param array $options
<add> * @return $this
<add> */
<add> public function options($options)
<add> {
<add> $this->options = $options;
<add>
<add> return $this;
<add> }
<ide> } | 2 |
Javascript | Javascript | add rntester cases for animated easing api | b440ab9784d50195fc1e5f52592e4543f549cd9d | <ide><path>packages/rn-tester/js/RNTesterAppShared.js
<ide> import * as React from 'react';
<ide>
<ide> import RNTesterModuleContainer from './components/RNTesterModuleContainer';
<ide> import RNTesterModuleList from './components/RNTesterModuleList';
<del>import RNTesterNavBar from './components/RNTesterNavbar';
<add>import RNTesterNavBar, {navBarHeight} from './components/RNTesterNavbar';
<ide> import RNTesterList from './utils/RNTesterList';
<ide> import {
<ide> Screens,
<ide> const RNTesterApp = (): React.Node => {
<ide> onBack={activeModule ? handleBackPress : null}
<ide> documentationURL={activeModule?.documentationURL}
<ide> />
<del> {activeModule != null ? (
<del> <RNTesterModuleContainer
<del> module={activeModule}
<del> example={activeModuleExample}
<del> onExampleCardPress={handleModuleExampleCardPress}
<del> />
<del> ) : screen === Screens.BOOKMARKS &&
<del> examplesList.bookmarks.length === 0 ? (
<del> <RNTesterEmptyBookmarksState />
<del> ) : (
<del> <RNTesterModuleList
<del> sections={activeExampleList}
<del> toggleBookmark={toggleBookmark}
<del> handleModuleCardPress={handleModuleCardPress}
<del> />
<del> )}
<add> <View style={styles.container}>
<add> {activeModule != null ? (
<add> <RNTesterModuleContainer
<add> module={activeModule}
<add> example={activeModuleExample}
<add> onExampleCardPress={handleModuleExampleCardPress}
<add> />
<add> ) : screen === Screens.BOOKMARKS &&
<add> examplesList.bookmarks.length === 0 ? (
<add> <RNTesterEmptyBookmarksState />
<add> ) : (
<add> <RNTesterModuleList
<add> sections={activeExampleList}
<add> toggleBookmark={toggleBookmark}
<add> handleModuleCardPress={handleModuleCardPress}
<add> />
<add> )}
<add> </View>
<ide> <View style={styles.bottomNavbar}>
<ide> <RNTesterNavBar
<ide> screen={screen || Screens.COMPONENTS}
<ide> const styles = StyleSheet.create({
<ide> flex: 1,
<ide> },
<ide> bottomNavbar: {
<del> bottom: 0,
<del> width: '100%',
<del> display: 'flex',
<del> flexDirection: 'column',
<del> position: 'absolute',
<add> height: navBarHeight,
<ide> },
<ide> hidden: {
<ide> display: 'none',
<ide><path>packages/rn-tester/js/components/RNTesterNavbar.js
<ide> const RNTesterNavbar = ({
<ide> );
<ide> };
<ide>
<add>export const navBarHeight = 65;
<add>
<ide> const styles = StyleSheet.create({
<ide> floatContainer: {
<ide> flex: 1,
<ide> const styles = StyleSheet.create({
<ide> },
<ide> centerBox: {
<ide> flex: 1,
<del> height: 65,
<add> height: navBarHeight,
<ide> },
<ide> navButton: {
<ide> flex: 1,
<del> height: 65,
<add> height: navBarHeight,
<ide> justifyContent: 'center',
<ide> alignItems: 'center',
<ide> },
<ide> const styles = StyleSheet.create({
<ide> },
<ide> });
<ide>
<del>module.exports = RNTesterNavbar;
<add>export default RNTesterNavbar;
<ide><path>packages/rn-tester/js/examples/Animated/AnimatedIndex.js
<ide> import RotatingImagesExample from './RotatingImagesExample';
<ide> import ContinuousInteractionsExample from './ContinuousInteractionsExample';
<ide> import LoopingExample from './LoopingExample';
<ide> import MovingBoxExample from './MovingBoxExample';
<add>import EasingExample from './EasingExample';
<ide> import FadeInViewExample from './FadeInViewExample';
<ide> import ComposeAnimationsWithEasingExample from './ComposeAnimationsWithEasingExample';
<ide> import TransformBounceExample from './TransformBounceExample';
<ide> export default ({
<ide> examples: [
<ide> FadeInViewExample,
<ide> ComposingExample,
<add> EasingExample,
<ide> ComposeAnimationsWithEasingExample,
<ide> RotatingImagesExample,
<ide> MovingBoxExample,
<ide><path>packages/rn-tester/js/examples/Animated/EasingExample.js
<add>/**
<add> * Copyright (c) Facebook, Inc. and its affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @flow strict-local
<add> * @format
<add> */
<add>
<add>import type {RNTesterModuleExample} from '../../types/RNTesterTypes';
<add>import * as React from 'react';
<add>import RNTesterButton from '../../components/RNTesterButton';
<add>import ToggleNativeDriver from './utils/ToggleNativeDriver';
<add>import RNTConfigurationBlock from '../../components/RNTConfigurationBlock';
<add>import {
<add> Text,
<add> StyleSheet,
<add> View,
<add> Animated,
<add> SectionList,
<add> Easing,
<add>} from 'react-native';
<add>
<add>type Props = $ReadOnly<{||}>;
<add>
<add>type EasingListItem = {
<add> title: string,
<add> easing: (value: number) => number,
<add>};
<add>
<add>const easingSections = [
<add> {
<add> title: 'Predefined animations',
<add> data: [
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Bounce', easing: Easing.bounce},
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Ease', easing: Easing.ease},
<add> {title: 'Elastic', easing: Easing.elastic(4)},
<add> ],
<add> },
<add> {
<add> title: 'Standard functions',
<add> data: [
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Linear', easing: Easing.linear},
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Quad', easing: Easing.quad},
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Cubic', easing: Easing.cubic},
<add> ],
<add> },
<add> {
<add> title: 'Additional functions',
<add> data: [
<add> {
<add> title: 'Bezier',
<add> easing: Easing.bezier(0, 2, 1, -1),
<add> },
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Circle', easing: Easing.circle},
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Sin', easing: Easing.sin},
<add> // $FlowFixMe[method-unbinding]
<add> {title: 'Exp', easing: Easing.exp},
<add> ],
<add> },
<add> {
<add> title: 'Combinations',
<add> data: [
<add> {
<add> title: 'In + Bounce',
<add> // $FlowFixMe[method-unbinding]
<add> easing: Easing.in(Easing.bounce),
<add> },
<add> {
<add> title: 'Out + Exp',
<add> // $FlowFixMe[method-unbinding]
<add> easing: Easing.out(Easing.exp),
<add> },
<add> {
<add> title: 'InOut + Elastic',
<add> easing: Easing.inOut(Easing.elastic(1)),
<add> },
<add> ],
<add> },
<add>];
<add>
<add>function EasingItem({
<add> item,
<add> useNativeDriver,
<add>}: {
<add> item: EasingListItem,
<add> useNativeDriver: boolean,
<add>}): React.Node {
<add> const opacityAndScale = React.useRef(new Animated.Value(1));
<add> const animation = React.useRef(
<add> Animated.timing(opacityAndScale.current, {
<add> toValue: 1,
<add> duration: 1200,
<add> easing: item.easing,
<add> useNativeDriver,
<add> }),
<add> );
<add>
<add> const animatedStyles = [
<add> styles.box,
<add> {
<add> opacity: opacityAndScale.current,
<add> transform: [{scale: opacityAndScale.current}],
<add> },
<add> ];
<add>
<add> return (
<add> <View style={styles.itemContainer}>
<add> <View style={styles.itemMeta}>
<add> <Text style={styles.itemTitle}>{item.title}</Text>
<add> <RNTesterButton
<add> onPress={() => {
<add> opacityAndScale.current.setValue(0);
<add> animation.current.start();
<add> }}>
<add> Animate
<add> </RNTesterButton>
<add> </View>
<add> <View style={styles.boxContainer}>
<add> <Animated.View style={animatedStyles} />
<add> </View>
<add> </View>
<add> );
<add>}
<add>
<add>function EasingExample(props: Props): React.Node {
<add> const [useNativeDriver, setUseNativeDriver] = React.useState(false);
<add>
<add> return (
<add> <View>
<add> <RNTConfigurationBlock>
<add> <ToggleNativeDriver
<add> value={useNativeDriver}
<add> onValueChange={setUseNativeDriver}
<add> />
<add> </RNTConfigurationBlock>
<add> <SectionList
<add> sections={easingSections}
<add> renderItem={info => {
<add> const item = (info.item: EasingListItem);
<add>
<add> return (
<add> <EasingItem
<add> key={`${item.title}${useNativeDriver ? 'native' : 'non-native'}`}
<add> item={item}
<add> useNativeDriver={useNativeDriver}
<add> />
<add> );
<add> }}
<add> renderSectionHeader={({section: {title}}) => (
<add> <Text style={styles.sectionHeader}>{title}</Text>
<add> )}
<add> />
<add> </View>
<add> );
<add>}
<add>
<add>const boxSize = 50;
<add>const styles = StyleSheet.create({
<add> sectionHeader: {
<add> paddingHorizontal: 8,
<add> paddingVertical: 4,
<add> backgroundColor: '#f4f4f4',
<add> color: '#999',
<add> fontSize: 12,
<add> },
<add> itemContainer: {
<add> padding: 8,
<add> flexDirection: 'row',
<add> alignItems: 'center',
<add> },
<add> itemMeta: {
<add> flex: 1,
<add> alignItems: 'flex-start',
<add> },
<add> itemTitle: {
<add> fontSize: 18,
<add> fontWeight: '300',
<add> },
<add> boxContainer: {
<add> alignItems: 'center',
<add> justifyContent: 'center',
<add> height: boxSize,
<add> width: boxSize * 2,
<add> },
<add> box: {
<add> borderRadius: 4,
<add> backgroundColor: '#61dafb',
<add> width: boxSize,
<add> height: boxSize,
<add> },
<add>});
<add>
<add>export default ({
<add> title: 'Easing',
<add> name: 'easing',
<add> description:
<add> 'The Easing module implements common easing functions. This module is used by Animated.timing() to convey physically believable motion in animations.',
<add> render: () => <EasingExample />,
<add>}: RNTesterModuleExample); | 4 |
Javascript | Javascript | return typeerror on invalid resolve() input | 758a17f1d5f5985f6d5e3823e0c4759e78252c52 | <ide><path>lib/dns.js
<ide> resolveMap.NAPTR = resolver('queryNaptr');
<ide> resolveMap.SOA = resolver('querySoa');
<ide>
<ide>
<del>function resolve(hostname, type_, callback_) {
<del> var resolver, callback;
<del> if (typeof type_ === 'string') {
<del> resolver = resolveMap[type_];
<del> callback = callback_;
<del> } else if (typeof type_ === 'function') {
<add>function resolve(hostname, rrtype, callback) {
<add> var resolver;
<add> if (typeof rrtype === 'string') {
<add> resolver = resolveMap[rrtype];
<add> } else if (typeof rrtype === 'function') {
<ide> resolver = resolveMap.A;
<del> callback = type_;
<add> callback = rrtype;
<ide> } else {
<del> throw new Error('"type" argument must be a string');
<add> throw new TypeError('"rrtype" argument must be a string');
<ide> }
<ide>
<ide> if (typeof resolver === 'function') {
<ide> return resolver(hostname, callback);
<ide> } else {
<del> throw new Error(`Unknown type "${type_}"`);
<add> throw new Error(`Unknown type "${rrtype}"`);
<ide> }
<ide> }
<ide>
<ide><path>test/parallel/test-dns.js
<ide> assert.doesNotThrow(() => dns.setServers([]));
<ide> assert.deepStrictEqual(dns.getServers(), []);
<ide>
<ide> assert.throws(() => {
<del> dns.resolve('test.com', [], common.mustNotCall());
<del>}, function(err) {
<del> return !(err instanceof TypeError);
<del>}, 'Unexpected error');
<add> dns.resolve('example.com', [], common.mustNotCall());
<add>}, /^TypeError: "rrtype" argument must be a string$/);
<ide>
<ide> // dns.lookup should accept only falsey and string values
<ide> { | 2 |
Javascript | Javascript | fix usage of writebuffer in makesyncwrite | 67b5985c0800b3cfaef5947e2cb6ef61c8cf49f8 | <ide><path>lib/internal/errors.js
<ide> function message(key, args) {
<ide> * @returns {Error}
<ide> */
<ide> function uvException(ctx) {
<del> const err = new Error();
<add> const [ code, uvmsg ] = errmap.get(ctx.errno);
<add> let message = `${code}: ${uvmsg}, ${ctx.syscall}`;
<add>
<add> let path;
<add> let dest;
<add> if (ctx.path) {
<add> path = ctx.path.toString();
<add> message += ` '${path}'`;
<add> }
<add> if (ctx.dest) {
<add> dest = ctx.dest.toString();
<add> message += ` -> '${dest}'`;
<add> }
<add>
<add> // Pass the message to the constructor instead of setting it on the object
<add> // to make sure it is the same as the one created in C++
<add> const err = new Error(message);
<ide>
<ide> for (const prop of Object.keys(ctx)) {
<ide> if (prop === 'message' || prop === 'path' || prop === 'dest') {
<ide> function uvException(ctx) {
<ide> err[prop] = ctx[prop];
<ide> }
<ide>
<del> const [ code, uvmsg ] = errmap.get(ctx.errno);
<ide> err.code = code;
<del> let message = `${code}: ${uvmsg}, ${ctx.syscall}`;
<del> if (ctx.path) {
<del> const path = ctx.path.toString();
<del> message += ` '${path}'`;
<add> if (path) {
<ide> err.path = path;
<ide> }
<del> if (ctx.dest) {
<del> const dest = ctx.dest.toString();
<del> message += ` -> '${dest}'`;
<add> if (dest) {
<ide> err.dest = dest;
<ide> }
<del> err.message = message;
<ide>
<ide> Error.captureStackTrace(err, uvException);
<ide> return err;
<ide><path>lib/internal/net.js
<ide> const Buffer = require('buffer').Buffer;
<ide> const { isIPv6 } = process.binding('cares_wrap');
<ide> const { writeBuffer } = process.binding('fs');
<add>const errors = require('internal/errors');
<ide>
<ide> const octet = '(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])';
<ide> const re = new RegExp(`^${octet}[.]${octet}[.]${octet}[.]${octet}$`);
<ide> function makeSyncWrite(fd) {
<ide>
<ide> this._bytesDispatched += chunk.length;
<ide>
<del> try {
<del> writeBuffer(fd, chunk, 0, chunk.length, null);
<del> } catch (ex) {
<add> const ctx = {};
<add> writeBuffer(fd, chunk, 0, chunk.length, null, undefined, ctx);
<add> if (ctx.errno !== undefined) {
<add> const ex = errors.uvException(ctx);
<ide> // Legacy: net writes have .code === .errno, whereas writeBuffer gives the
<ide> // raw errno number in .errno.
<del> if (typeof ex.code === 'string')
<del> ex.errno = ex.code;
<add> ex.errno = ex.code;
<ide> return cb(ex);
<ide> }
<ide> cb(); | 2 |
PHP | PHP | add test for | 4e1e4f84fe426b7a31273068ff69aba9f1e6a9e1 | <ide><path>tests/TestCase/ORM/QueryRegressionTest.php
<ide> public function testFindLastOnEmptyTable()
<ide> $this->assertNull($table->find()->last());
<ide> }
<ide>
<add> /**
<add> * Tests calling contain in a nested closure
<add> *
<add> * @see https://github.com/cakephp/cakephp/issues/7591
<add> * @return void
<add> */
<add> public function testContainInNestedClosure()
<add> {
<add> $table = TableRegistry::get('Comments');
<add> $table->belongsTo('Articles');
<add> $table->Articles->belongsTo('Authors');
<add> $table->Articles->Authors->belongsToMany('Tags');
<add>
<add> $query = $table->find()->where(['Comments.id' => 5])->contain(['Articles' => function ($q) {
<add> return $q->contain(['Authors' => function ($q) {
<add> return $q->contain('Tags');
<add> }]);
<add> }]);
<add> $this->assertCount(2, $query->first()->article->author->tags);
<add> }
<add>
<ide> /**
<ide> * Test that the typemaps used in function expressions
<ide> * create the correct results. | 1 |
Python | Python | add tag map for french and italian | 3cef901834e25bb3b7033e606a9332855c45ca82 | <ide><path>spacy/lang/fr/__init__.py
<ide>
<ide> from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS, TOKEN_MATCH
<ide> from .punctuation import TOKENIZER_SUFFIXES, TOKENIZER_INFIXES
<add>from .tag_map import TAG_MAP
<ide> from .stop_words import STOP_WORDS
<ide> from .lex_attrs import LEX_ATTRS
<ide> from .lemmatizer import LOOKUP
<ide> class FrenchDefaults(Language.Defaults):
<ide> lex_attr_getters[LANG] = lambda text: 'fr'
<ide> lex_attr_getters[NORM] = add_lookups(Language.Defaults.lex_attr_getters[NORM], BASE_NORMS)
<ide> tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
<add> tag_map = TAG_MAP
<ide> stop_words = STOP_WORDS
<ide> infixes = TOKENIZER_INFIXES
<ide> suffixes = TOKENIZER_SUFFIXES
<ide><path>spacy/lang/fr/tag_map.py
<add># coding: utf8
<add>from __future__ import unicode_literals
<add>
<add>
<add>TAG_MAP = {
<add> "ADJ__Gender=Fem|Number=Plur": {"pos": "PRON"},
<add> "ADJ__Gender=Fem|Number=Plur|NumType=Ord": {"pos": "PRON"},
<add> "ADJ__Gender=Fem|Number=Sing": {"pos": "PRON"},
<add> "ADJ__Gender=Fem|Number=Sing|NumType=Ord": {"pos": "PRON"},
<add> "ADJ__Gender=Masc": {"pos": "PRON"},
<add> "ADJ__Gender=Masc|Number=Plur": {"pos": "PRON"},
<add> "ADJ__Gender=Masc|Number=Plur|NumType=Ord": {"pos": "PRON"},
<add> "ADJ__Gender=Masc|Number=Sing": {"pos": "PRON"},
<add> "ADJ__Gender=Masc|Number=Sing|NumType=Card": {"pos": "PRON"},
<add> "ADJ__Gender=Masc|Number=Sing|NumType=Ord": {"pos": "PRON"},
<add> "ADJ__NumType=Card": {"pos": "PRON"},
<add> "ADJ__NumType=Ord": {"pos": "PRON"},
<add> "ADJ__Number=Plur": {"pos": "PRON"},
<add> "ADJ__Number=Sing": {"pos": "PRON"},
<add> "ADJ__Number=Sing|NumType=Ord": {"pos": "PRON"},
<add> "ADJ___": {"pos": "PRON"},
<add> "ADP__Gender=Fem|Number=Plur|Person=3": {"pos": "PRON"},
<add> "ADP__Gender=Masc|Number=Plur|Person=3": {"pos": "PRON"},
<add> "ADP__Gender=Masc|Number=Sing|Person=3": {"pos": "PRON"},
<add> "ADP___": {"pos": "PRON"},
<add> "ADV__Polarity=Neg": {"pos": "PRON"},
<add> "ADV__PronType=Int": {"pos": "PRON"},
<add> "ADV___": {"pos": "PRON"},
<add> "AUX__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "AUX__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "AUX__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "AUX__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "AUX__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "AUX__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "AUX__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "AUX__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "AUX__Mood=Cnd|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Cnd|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Cnd|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Cnd|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Cnd|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Imp|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Past|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Sing|Person=2|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Sub|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Sub|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Sub|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Sub|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Mood=Sub|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "AUX__Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "AUX__Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "AUX__Tense=Pres|VerbForm=Part": {"pos": "PRON"},
<add> "AUX__VerbForm=Inf": {"pos": "PRON"},
<add> "CCONJ___": {"pos": "PRON"},
<add> "DET__Definite=Def|Gender=Fem|Number=Sing|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Def|Gender=Masc|Number=Sing|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Def|Number=Plur|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Def|Number=Sing|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Ind|Gender=Fem|Number=Plur|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Ind|Gender=Fem|Number=Sing|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Ind|Gender=Masc|Number=Plur|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Ind|Gender=Masc|Number=Sing|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Ind|Number=Plur|PronType=Art": {"pos": "PRON"},
<add> "DET__Definite=Ind|Number=Sing|PronType=Art": {"pos": "PRON"},
<add> "DET__Gender=Fem|Number=Plur": {"pos": "PRON"},
<add> "DET__Gender=Fem|Number=Plur|PronType=Int": {"pos": "PRON"},
<add> "DET__Gender=Fem|Number=Sing": {"pos": "PRON"},
<add> "DET__Gender=Fem|Number=Sing|Poss=Yes": {"pos": "PRON"},
<add> "DET__Gender=Fem|Number=Sing|PronType=Dem": {"pos": "PRON"},
<add> "DET__Gender=Fem|Number=Sing|PronType=Int": {"pos": "PRON"},
<add> "DET__Gender=Masc|Number=Plur": {"pos": "PRON"},
<add> "DET__Gender=Masc|Number=Sing": {"pos": "PRON"},
<add> "DET__Gender=Masc|Number=Sing|PronType=Dem": {"pos": "PRON"},
<add> "DET__Gender=Masc|Number=Sing|PronType=Int": {"pos": "PRON"},
<add> "DET__Number=Plur": {"pos": "PRON"},
<add> "DET__Number=Plur|Poss=Yes": {"pos": "PRON"},
<add> "DET__Number=Plur|PronType=Dem": {"pos": "PRON"},
<add> "DET__Number=Sing": {"pos": "PRON"},
<add> "DET__Number=Sing|Poss=Yes": {"pos": "PRON"},
<add> "DET___": {"pos": "PRON"},
<add> "INTJ___": {"pos": "PRON"},
<add> "NOUN__Gender=Fem": {"pos": "PRON"},
<add> "NOUN__Gender=Fem|Number=Plur": {"pos": "PRON"},
<add> "NOUN__Gender=Fem|Number=Sing": {"pos": "PRON"},
<add> "NOUN__Gender=Masc": {"pos": "PRON"},
<add> "NOUN__Gender=Masc|Number=Plur": {"pos": "PRON"},
<add> "NOUN__Gender=Masc|Number=Plur|NumType=Card": {"pos": "PRON"},
<add> "NOUN__Gender=Masc|Number=Sing": {"pos": "PRON"},
<add> "NOUN__Gender=Masc|Number=Sing|NumType=Card": {"pos": "PRON"},
<add> "NOUN__NumType=Card": {"pos": "PRON"},
<add> "NOUN__Number=Plur": {"pos": "PRON"},
<add> "NOUN__Number=Sing": {"pos": "PRON"},
<add> "NOUN___": {"pos": "PRON"},
<add> "NUM__Gender=Masc|Number=Plur|NumType=Card": {"pos": "PRON"},
<add> "NUM__NumType=Card": {"pos": "PRON"},
<add> "PART___": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Plur": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Plur|Person=3": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Plur|Person=3|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Plur|Person=3|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Plur|PronType=Dem": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Plur|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Sing|Person=3": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Sing|Person=3|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Sing|PronType=Dem": {"pos": "PRON"},
<add> "PRON__Gender=Fem|Number=Sing|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Gender=Fem|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Plur": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Plur|Person=3": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Plur|Person=3|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Plur|Person=3|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Plur|PronType=Dem": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Plur|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Sing": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Sing|Person=3": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Sing|Person=3|PronType=Dem": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Sing|Person=3|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Sing|PronType=Dem": {"pos": "PRON"},
<add> "PRON__Gender=Masc|Number=Sing|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Gender=Masc|PronType=Rel": {"pos": "PRON"},
<add> "PRON__NumType=Card|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Number=Plur|Person=1": {"pos": "PRON"},
<add> "PRON__Number=Plur|Person=1|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Number=Plur|Person=1|Reflex=Yes": {"pos": "PRON"},
<add> "PRON__Number=Plur|Person=2": {"pos": "PRON"},
<add> "PRON__Number=Plur|Person=2|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Number=Plur|Person=2|Reflex=Yes": {"pos": "PRON"},
<add> "PRON__Number=Plur|Person=3": {"pos": "PRON"},
<add> "PRON__Number=Plur|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Number=Sing|Person=1": {"pos": "PRON"},
<add> "PRON__Number=Sing|Person=1|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Number=Sing|Person=1|Reflex=Yes": {"pos": "PRON"},
<add> "PRON__Number=Sing|Person=2|PronType=Prs": {"pos": "PRON"},
<add> "PRON__Number=Sing|Person=3": {"pos": "PRON"},
<add> "PRON__Number=Sing|PronType=Dem": {"pos": "PRON"},
<add> "PRON__Number=Sing|PronType=Rel": {"pos": "PRON"},
<add> "PRON__Person=3": {"pos": "PRON"},
<add> "PRON__Person=3|Reflex=Yes": {"pos": "PRON"},
<add> "PRON__PronType=Int": {"pos": "PRON"},
<add> "PRON__PronType=Rel": {"pos": "PRON"},
<add> "PRON___": {"pos": "PRON"},
<add> "PROPN__Gender=Fem|Number=Plur": {"pos": "PRON"},
<add> "PROPN__Gender=Fem|Number=Sing": {"pos": "PRON"},
<add> "PROPN__Gender=Masc": {"pos": "PRON"},
<add> "PROPN__Gender=Masc|Number=Plur": {"pos": "PRON"},
<add> "PROPN__Gender=Masc|Number=Sing": {"pos": "PRON"},
<add> "PROPN__Number=Plur": {"pos": "PRON"},
<add> "PROPN__Number=Sing": {"pos": "PRON"},
<add> "PROPN___": {"pos": "PRON"},
<add> "PUNCT___": {"pos": "PRON"},
<add> "SCONJ___": {"pos": "PRON"},
<add> "VERB__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Gender=Masc|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Gender=Masc|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Mood=Cnd|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Cnd|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Cnd|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Imp|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Imp|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=2|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=2|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=3|Tense=Past|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Sing|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|Person=3|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Ind|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Sub|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Sub|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Sub|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Mood=Sub|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "PRON"},
<add> "VERB__Number=Plur|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Number=Plur|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Number=Sing|Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Number=Sing|Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Tense=Past|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__Tense=Past|VerbForm=Part|Voice=Pass": {"pos": "PRON"},
<add> "VERB__Tense=Pres|VerbForm=Part": {"pos": "PRON"},
<add> "VERB__VerbForm=Inf": {"pos": "PRON"},
<add> "VERB__VerbForm=Part": {"pos": "PRON"},
<add> "X___": {"pos": "PRON"},
<add> "_SP": {"pos": "PRON"}
<add>}
<ide><path>spacy/lang/it/__init__.py
<ide>
<ide> from .stop_words import STOP_WORDS
<ide> from .lemmatizer import LOOKUP
<add>from .tag_map import TAG_MAP
<ide>
<ide> from ..tokenizer_exceptions import BASE_EXCEPTIONS
<ide> from ..norm_exceptions import BASE_NORMS
<ide> class ItalianDefaults(Language.Defaults):
<ide> tokenizer_exceptions = update_exc(BASE_EXCEPTIONS)
<ide> stop_words = STOP_WORDS
<ide> lemma_lookup = LOOKUP
<add> tag_map = TAG_MAP
<ide>
<ide>
<ide> class Italian(Language):
<ide><path>spacy/lang/it/tag_map.py
<add># coding: utf8
<add>from __future__ import unicode_literals
<add>
<add>
<add>TAG_MAP = {
<add> "AP__Gender=Fem|Number=Plur|Poss=Yes|PronType=Prs": {"pos": "AP"},
<add> "AP__Gender=Fem|Number=Sing|Poss=Yes|PronType=Prs": {"pos": "AP"},
<add> "AP__Gender=Masc|Number=Plur|Poss=Yes|PronType=Prs": {"pos": "AP"},
<add> "AP__Gender=Masc|Number=Sing|Poss=Yes|PronType=Prs": {"pos": "AP"},
<add> "AP__Gender=Masc|Poss=Yes|PronType=Prs": {"pos": "AP"},
<add> "AP__Number=Sing|Poss=Yes|PronType=Prs": {"pos": "AP"},
<add> "AP__Poss=Yes|PronType=Prs": {"pos": "AP"},
<add> "A__Degree=Abs|Gender=Fem|Number=Plur": {"pos": "A"},
<add> "A__Degree=Abs|Gender=Fem|Number=Sing": {"pos": "A"},
<add> "A__Degree=Abs|Gender=Masc|Number=Plur": {"pos": "A"},
<add> "A__Degree=Abs|Gender=Masc|Number=Sing": {"pos": "A"},
<add> "A__Degree=Cmp": {"pos": "A"},
<add> "A__Degree=Cmp|Number=Plur": {"pos": "A"},
<add> "A__Degree=Cmp|Number=Sing": {"pos": "A"},
<add> "A__Gender=Fem|Number=Plur": {"pos": "A"},
<add> "A__Gender=Fem|Number=Sing": {"pos": "A"},
<add> "A__Gender=Fem|Number=Sing|Poss=Yes|PronType=Prs": {"pos": "A"},
<add> "A__Gender=Masc": {"pos": "A"},
<add> "A__Gender=Masc|Number=Plur": {"pos": "A"},
<add> "A__Gender=Masc|Number=Sing": {"pos": "A"},
<add> "A__Number=Plur": {"pos": "A"},
<add> "A__Number=Sing": {"pos": "A"},
<add> "A___": {"pos": "A"},
<add> "BN__PronType=Neg": {"pos": "BN"},
<add> "B__Degree=Abs": {"pos": "B"},
<add> "B__Degree=Abs|Gender=Masc|Number=Sing": {"pos": "B"},
<add> "B___": {"pos": "B"},
<add> "CC___": {"pos": "CC"},
<add> "CS___": {"pos": "CS"},
<add> "DD__Gender=Fem|Number=Plur|PronType=Dem": {"pos": "DD"},
<add> "DD__Gender=Fem|Number=Sing|PronType=Dem": {"pos": "DD"},
<add> "DD__Gender=Masc|Number=Plur|PronType=Dem": {"pos": "DD"},
<add> "DD__Gender=Masc|Number=Sing|PronType=Dem": {"pos": "DD"},
<add> "DD__Gender=Masc|PronType=Dem": {"pos": "DD"},
<add> "DD__Number=Plur|PronType=Dem": {"pos": "DD"},
<add> "DD__Number=Sing|PronType=Dem": {"pos": "DD"},
<add> "DE__PronType=Exc": {"pos": "DE"},
<add> "DI__Definite=Def|Gender=Fem|Number=Plur|PronType=Art": {"pos": "DI"},
<add> "DI__Gender=Fem|Number=Plur": {"pos": "DI"},
<add> "DI__Gender=Fem|Number=Plur|PronType=Ind": {"pos": "DI"},
<add> "DI__Gender=Fem|Number=Sing|PronType=Ind": {"pos": "DI"},
<add> "DI__Gender=Masc|Number=Plur": {"pos": "DI"},
<add> "DI__Gender=Masc|Number=Plur|PronType=Ind": {"pos": "DI"},
<add> "DI__Gender=Masc|Number=Sing|PronType=Ind": {"pos": "DI"},
<add> "DI__Number=Sing|PronType=Art": {"pos": "DI"},
<add> "DI__Number=Sing|PronType=Ind": {"pos": "DI"},
<add> "DI__PronType=Ind": {"pos": "DI"},
<add> "DQ__Gender=Fem|Number=Plur|PronType=Int": {"pos": "DQ"},
<add> "DQ__Gender=Fem|Number=Sing|PronType=Int": {"pos": "DQ"},
<add> "DQ__Gender=Masc|Number=Plur|PronType=Int": {"pos": "DQ"},
<add> "DQ__Gender=Masc|Number=Sing|PronType=Int": {"pos": "DQ"},
<add> "DQ__Number=Plur|PronType=Int": {"pos": "DQ"},
<add> "DQ__Number=Sing|PronType=Int": {"pos": "DQ"},
<add> "DQ__PronType=Int": {"pos": "DQ"},
<add> "DQ___": {"pos": "DQ"},
<add> "DR__Number=Plur|PronType=Rel": {"pos": "DR"},
<add> "DR__PronType=Rel": {"pos": "DR"},
<add> "E__Gender=Masc|Number=Sing": {"pos": "E"},
<add> "E___": {"pos": "E"},
<add> "FB___": {"pos": "FB"},
<add> "FC___": {"pos": "FC"},
<add> "FF___": {"pos": "FF"},
<add> "FS___": {"pos": "FS"},
<add> "I__Polarity=Neg": {"pos": "I"},
<add> "I__Polarity=Pos": {"pos": "I"},
<add> "I___": {"pos": "I"},
<add> "NO__Gender=Fem|Number=Plur|NumType=Ord": {"pos": "NO"},
<add> "NO__Gender=Fem|Number=Sing|NumType=Ord": {"pos": "NO"},
<add> "NO__Gender=Masc|Number=Plur": {"pos": "NO"},
<add> "NO__Gender=Masc|Number=Plur|NumType=Ord": {"pos": "NO"},
<add> "NO__Gender=Masc|Number=Sing|NumType=Ord": {"pos": "NO"},
<add> "NO__NumType=Ord": {"pos": "NO"},
<add> "NO__Number=Sing|NumType=Ord": {"pos": "NO"},
<add> "NO___": {"pos": "NO"},
<add> "N__Gender=Masc|Number=Sing": {"pos": "N"},
<add> "N__NumType=Card": {"pos": "N"},
<add> "N__NumType=Range": {"pos": "N"},
<add> "N___": {"pos": "N"},
<add> "PART___": {"pos": "PART"},
<add> "PC__Clitic=Yes|Definite=Def|Gender=Fem|Number=Plur|PronType=Art": {"pos": "PC"},
<add> "PC__Clitic=Yes|Gender=Fem|Number=Plur|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Gender=Fem|Number=Plur|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Gender=Fem|Number=Sing|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Gender=Fem|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Gender=Masc|Number=Plur|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Gender=Masc|Number=Sing|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Gender=Masc|Number=Sing|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Number=Plur|Person=1|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Number=Plur|Person=2|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Number=Plur|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Number=Plur|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Number=Sing|Person=1|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Number=Sing|Person=2|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Number=Sing|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|Person=3|PronType=Prs": {"pos": "PC"},
<add> "PC__Clitic=Yes|PronType=Prs": {"pos": "PC"},
<add> "PD__Gender=Fem|Number=Plur|PronType=Dem": {"pos": "PD"},
<add> "PD__Gender=Fem|Number=Sing|PronType=Dem": {"pos": "PD"},
<add> "PD__Gender=Masc|Number=Plur|PronType=Dem": {"pos": "PD"},
<add> "PD__Gender=Masc|Number=Sing|PronType=Dem": {"pos": "PD"},
<add> "PD__Number=Plur|PronType=Dem": {"pos": "PD"},
<add> "PD__Number=Sing|PronType=Dem": {"pos": "PD"},
<add> "PD__PronType=Dem": {"pos": "PD"},
<add> "PE__Gender=Fem|Number=Plur|Person=3|PronType=Prs": {"pos": "PE"},
<add> "PE__Gender=Fem|Number=Sing|Person=3|PronType=Prs": {"pos": "PE"},
<add> "PE__Gender=Masc|Number=Plur|Person=3|PronType=Prs": {"pos": "PE"},
<add> "PE__Gender=Masc|Number=Sing|Person=3|PronType=Prs": {"pos": "PE"},
<add> "PE__Number=Plur|Person=1|PronType=Prs": {"pos": "PE"},
<add> "PE__Number=Plur|Person=2|PronType=Prs": {"pos": "PE"},
<add> "PE__Number=Plur|Person=3|PronType=Prs": {"pos": "PE"},
<add> "PE__Number=Sing|Person=1|PronType=Prs": {"pos": "PE"},
<add> "PE__Number=Sing|Person=2|PronType=Prs": {"pos": "PE"},
<add> "PE__Number=Sing|Person=3|PronType=Prs": {"pos": "PE"},
<add> "PE__Person=3|PronType=Prs": {"pos": "PE"},
<add> "PE__PronType=Prs": {"pos": "PE"},
<add> "PI__Gender=Fem|Number=Plur|PronType=Ind": {"pos": "PI"},
<add> "PI__Gender=Fem|Number=Sing|PronType=Ind": {"pos": "PI"},
<add> "PI__Gender=Masc|Number=Plur|PronType=Ind": {"pos": "PI"},
<add> "PI__Gender=Masc|Number=Sing": {"pos": "PI"},
<add> "PI__Gender=Masc|Number=Sing|PronType=Ind": {"pos": "PI"},
<add> "PI__Number=Plur|PronType=Ind": {"pos": "PI"},
<add> "PI__Number=Sing|PronType=Ind": {"pos": "PI"},
<add> "PI__PronType=Ind": {"pos": "PI"},
<add> "PP__Gender=Fem|Number=Sing|Poss=Yes|PronType=Prs": {"pos": "PP"},
<add> "PP__Gender=Masc|Number=Plur|Poss=Yes|PronType=Prs": {"pos": "PP"},
<add> "PP__Gender=Masc|Number=Sing|Poss=Yes|PronType=Prs": {"pos": "PP"},
<add> "PP__Number=Plur|Poss=Yes|PronType=Prs": {"pos": "PP"},
<add> "PP__Number=Sing|Poss=Yes|PronType=Prs": {"pos": "PP"},
<add> "PQ__Gender=Fem|Number=Plur|PronType=Int": {"pos": "PQ"},
<add> "PQ__Gender=Fem|Number=Sing|PronType=Int": {"pos": "PQ"},
<add> "PQ__Gender=Masc|Number=Plur|PronType=Int": {"pos": "PQ"},
<add> "PQ__Gender=Masc|Number=Sing|PronType=Int": {"pos": "PQ"},
<add> "PQ__Number=Plur|PronType=Int": {"pos": "PQ"},
<add> "PQ__Number=Sing|PronType=Int": {"pos": "PQ"},
<add> "PQ__PronType=Int": {"pos": "PQ"},
<add> "PR__Gender=Masc|Number=Plur|PronType=Rel": {"pos": "PR"},
<add> "PR__Gender=Masc|Number=Sing|PronType=Rel": {"pos": "PR"},
<add> "PR__Gender=Masc|PronType=Rel": {"pos": "PR"},
<add> "PR__Number=Plur|PronType=Rel": {"pos": "PR"},
<add> "PR__Number=Sing|PronType=Rel": {"pos": "PR"},
<add> "PR__Person=3|PronType=Rel": {"pos": "PR"},
<add> "PR__PronType=Rel": {"pos": "PR"},
<add> "RD__Definite=Def": {"pos": "RD"},
<add> "RD__Definite=Def|Gender=Fem": {"pos": "RD"},
<add> "RD__Definite=Def|Gender=Fem|Number=Plur|PronType=Art": {"pos": "RD"},
<add> "RD__Definite=Def|Gender=Fem|Number=Sing|PronType=Art": {"pos": "RD"},
<add> "RD__Definite=Def|Gender=Masc|Number=Plur|PronType=Art": {"pos": "RD"},
<add> "RD__Definite=Def|Gender=Masc|Number=Sing|PronType=Art": {"pos": "RD"},
<add> "RD__Definite=Def|Number=Plur|PronType=Art": {"pos": "RD"},
<add> "RD__Definite=Def|Number=Sing|PronType=Art": {"pos": "RD"},
<add> "RD__Definite=Def|PronType=Art": {"pos": "RD"},
<add> "RD__Gender=Fem|Number=Sing": {"pos": "RD"},
<add> "RD__Gender=Masc|Number=Sing": {"pos": "RD"},
<add> "RD__Number=Sing": {"pos": "RD"},
<add> "RD__Number=Sing|PronType=Art": {"pos": "RD"},
<add> "RI__Definite=Ind|Gender=Fem|Number=Plur|PronType=Art": {"pos": "RI"},
<add> "RI__Definite=Ind|Gender=Fem|Number=Sing|PronType=Art": {"pos": "RI"},
<add> "RI__Definite=Ind|Gender=Masc|Number=Plur|PronType=Art": {"pos": "RI"},
<add> "RI__Definite=Ind|Gender=Masc|Number=Sing|PronType=Art": {"pos": "RI"},
<add> "RI__Definite=Ind|Number=Sing|PronType=Art": {"pos": "RI"},
<add> "RI__Definite=Ind|PronType=Art": {"pos": "RI"},
<add> "SP__Gender=Fem|Number=Plur": {"pos": "SP"},
<add> "SP__NumType=Card": {"pos": "SP"},
<add> "SP___": {"pos": "SP"},
<add> "SW__Foreign=Yes": {"pos": "SW"},
<add> "SW__Foreign=Yes|Gender=Masc": {"pos": "SW"},
<add> "SW__Foreign=Yes|Number=Sing": {"pos": "SW"},
<add> "SYM___": {"pos": "SYM"},
<add> "S__Gender=Fem": {"pos": "S"},
<add> "S__Gender=Fem|Number=Plur": {"pos": "S"},
<add> "S__Gender=Fem|Number=Sing": {"pos": "S"},
<add> "S__Gender=Masc": {"pos": "S"},
<add> "S__Gender=Masc|Number=Plur": {"pos": "S"},
<add> "S__Gender=Masc|Number=Sing": {"pos": "S"},
<add> "S__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "S"},
<add> "S__Number=Plur": {"pos": "S"},
<add> "S__Number=Sing": {"pos": "S"},
<add> "S___": {"pos": "S"},
<add> "Sw___": {"pos": "Sw"},
<add> "T__Gender=Fem|Number=Plur|PronType=Tot": {"pos": "T"},
<add> "T__Gender=Fem|Number=Sing": {"pos": "T"},
<add> "T__Gender=Fem|Number=Sing|PronType=Tot": {"pos": "T"},
<add> "T__Gender=Masc|Number=Plur|PronType=Tot": {"pos": "T"},
<add> "T__Gender=Masc|Number=Sing|PronType=Tot": {"pos": "T"},
<add> "T__Number=Plur|PronType=Tot": {"pos": "T"},
<add> "T__PronType=Tot": {"pos": "T"},
<add> "VA__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "VA"},
<add> "VA__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "VA"},
<add> "VA__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "VA"},
<add> "VA__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "VA"},
<add> "VA__Mood=Cnd|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Cnd|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Cnd|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Cnd|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=2|Tense=Fut|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=2|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=3|Tense=Past|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=1|Tense=Past|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=2|Tense=Fut|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Sub|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Sub|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Sub|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Sub|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Sub|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VA"},
<add> "VA__Mood=Sub|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VA"},
<add> "VA__VerbForm=Ger": {"pos": "VA"},
<add> "VA__VerbForm=Inf": {"pos": "VA"},
<add> "VM__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "VM"},
<add> "VM__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "VM"},
<add> "VM__Mood=Cnd|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Cnd|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Cnd|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Cnd|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Cnd|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Cnd|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Imp|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Imp|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=2|Tense=Fut|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=3|Tense=Past|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=2|Tense=Fut|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=2|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Sub|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Sub|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Sub|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Sub|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Sub|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "VM"},
<add> "VM__Mood=Sub|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "VM"},
<add> "VM__VerbForm=Ger": {"pos": "VM"},
<add> "VM__VerbForm=Inf": {"pos": "VM"},
<add> "V__Gender=Fem|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "V"},
<add> "V__Gender=Fem|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "V"},
<add> "V__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Fin": {"pos": "V"},
<add> "V__Gender=Masc|Number=Plur|Tense=Past|VerbForm=Part": {"pos": "V"},
<add> "V__Gender=Masc|Number=Sing|Tense=Past|VerbForm=Part": {"pos": "V"},
<add> "V__Mood=Cnd|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Cnd|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Cnd|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Cnd|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Cnd|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Cnd|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Imp|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Imp|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Imp|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Imp|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=1|Tense=Past|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=2|Tense=Fut|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=3|Tense=Past|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=1|Tense=Fut|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=1|Tense=Past|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=2|Tense=Fut|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=3|Tense=Fut|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Ind|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Plur|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Plur|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Plur|Person=2|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Plur|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Plur|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Sing|Person=1|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Sing|Person=1|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Sing|Person=2|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Sing|Person=3|Tense=Imp|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin": {"pos": "V"},
<add> "V__Mood=Sub|Number=Sing|Person=3|VerbForm=Fin": {"pos": "V"},
<add> "V__Number=Plur|Tense=Pres|VerbForm=Part": {"pos": "V"},
<add> "V__Number=Sing|Tense=Pres|VerbForm=Part": {"pos": "V"},
<add> "V__Tense=Past|VerbForm=Part": {"pos": "V"},
<add> "V__VerbForm=Ger": {"pos": "V"},
<add> "V__VerbForm=Inf": {"pos": "V"},
<add> "X___": {"pos": "X"},
<add> "_SP": {"pos": "_SP"}
<add>} | 4 |
Java | Java | fix root_path constant to match recent changes | 67e482aaf898876080c1b36634ed711c0a41267d | <ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/DefaultPathSegmentContainer.java
<ide> class DefaultPathSegmentContainer implements PathSegmentContainer {
<ide> new DefaultPathSegmentContainer("", Collections.emptyList());
<ide>
<ide> private static final PathSegmentContainer ROOT_PATH =
<del> new DefaultPathSegmentContainer("/", Collections.singletonList(EMPTY_PATH_SEGMENT));
<add> new DefaultPathSegmentContainer("/", Collections.emptyList());
<ide>
<ide>
<ide> private final String path;
<ide><path>spring-web/src/test/java/org/springframework/http/server/reactive/DefaultPathSegmentContainerTests.java
<ide> public void path() throws Exception {
<ide> testPath("/a/b/c", "/a/b/c", false, true, Arrays.asList("a", "b", "c"), false);
<ide>
<ide> // root path
<del> testPath("/", "/", false, true, Collections.singletonList(""), false);
<add> testPath("/", "/", false, true, Collections.emptyList(), false);
<ide>
<ide> // empty path
<ide> testPath("", "", true, false, Collections.emptyList(), false); | 2 |
Mixed | Text | fix wrong doc and message about rails profiler | 0f47c496781fda4ce35f79bea9fb03e6b29c1867 | <ide><path>guides/source/performance_testing.md
<ide> Usage: rails profiler 'Ruby.code' 'Ruby.more_code' ... [OPTS]
<ide> Default: 1
<ide> -o, --output PATH Directory to use when writing the results.
<ide> Default: tmp/performance
<del> --metrics a,b,c Metrics to use.
<add> -m, --metrics a,b,c Metrics to use.
<ide> Default: process_time,memory,objects
<del> -m, --formats x,y,z Formats to output to.
<add> -f, --formats x,y,z Formats to output to.
<ide> Default: flat,graph_html,call_tree
<ide> ```
<ide>
<ide><path>railties/lib/rails/commands/profiler.rb
<ide> def options
<ide> defaults = ActiveSupport::Testing::Performance::DEFAULTS
<ide>
<ide> OptionParser.new do |opt|
<del> opt.banner = "Usage: rails benchmarker 'Ruby.code' 'Ruby.more_code' ... [OPTS]"
<add> opt.banner = "Usage: rails profiler 'Ruby.code' 'Ruby.more_code' ... [OPTS]"
<ide> opt.on('-r', '--runs N', Numeric, 'Number of runs.', "Default: #{defaults[:runs]}") { |r| options[:runs] = r }
<ide> opt.on('-o', '--output PATH', String, 'Directory to use when writing the results.', "Default: #{defaults[:output]}") { |o| options[:output] = o }
<ide> opt.on('-m', '--metrics a,b,c', Array, 'Metrics to use.', "Default: #{defaults[:metrics].join(",")}") { |m| options[:metrics] = m.map(&:to_sym) } | 2 |
Go | Go | remove unused mounted function in overlay | 5cc082473068b00dee123f8388a79d7a48842a57 | <ide><path>daemon/graphdriver/overlay/overlay.go
<ide> func (d *Driver) Get(id string, mountLabel string) (s string, err error) {
<ide> return mergedDir, nil
<ide> }
<ide>
<del>func (d *Driver) mounted(dir string) (bool, error) {
<del> return graphdriver.Mounted(graphdriver.FsMagicOverlay, dir)
<del>}
<del>
<ide> // Put unmounts the mount path created for the give id.
<ide> func (d *Driver) Put(id string) error {
<ide> mountpoint := path.Join(d.dir(id), "merged") | 1 |
PHP | PHP | remove iron config. moved to package | 36db347a0c5f4088ee1befe81cf735aa65dd5149 | <ide><path>config/queue.php
<ide> | syntax for each one. Here you may set the default queue driver.
<ide> |
<ide> | Supported: "null", "sync", "database", "beanstalkd",
<del> | "sqs", "iron", "redis"
<add> | "sqs", "redis"
<ide> |
<ide> */
<ide>
<ide> 'region' => 'us-east-1',
<ide> ],
<ide>
<del> 'iron' => [
<del> 'driver' => 'iron',
<del> 'host' => 'mq-aws-us-east-1.iron.io',
<del> 'token' => 'your-token',
<del> 'project' => 'your-project-id',
<del> 'queue' => 'your-queue-name',
<del> 'encrypt' => true,
<del> ],
<del>
<ide> 'redis' => [
<ide> 'driver' => 'redis',
<ide> 'connection' => 'default', | 1 |
Text | Text | add mlm pretraining xla torch readme | 16c0efca2c307aabc4d338c807f19a27ae0790f4 | <ide><path>examples/flax/language-modeling/README.md
<ide> This should take less than 18 hours.
<ide> Training statistics can be accessed on [tfhub.de](https://tensorboard.dev/experiment/GdYmdak2TWeVz0DDRYOrrg).
<ide>
<ide> For a step-by-step walkthrough of how to do masked language modeling in Flax, please have a
<del>look at [this TODO: (Patrick)]() google colab.
<add>look at [this](https://colab.research.google.com/github/huggingface/notebooks/blob/master/examples/masked_language_modeling_flax.ipynb) google colab.
<ide>
<ide>
<del>## TODO(Patrick): Add comparison with PyTorch GPU/TPU
<add>## Runtime evaluation
<add>
<add>We also ran masked language modeling using PyTorch/XLA on a TPUv3-8, and PyTorch on 8 V100 GPUs. We report the
<add>overall training time below.
<add>For reproducibility, we state the training commands used for PyTorch/XLA and PyTorch further below.
<add>
<add>| Task | [TPU v3-8 (Flax)](https://tensorboard.dev/experiment/GdYmdak2TWeVz0DDRYOrrg/) | [TPU v3-8 (Pytorch/XLA)](https://tensorboard.dev/experiment/7Jq1kcQQRAmy12KOdXek7A/)| [8 GPU (PyTorch)](https://tensorboard.dev/experiment/PJneV8FQRxa2unPw1QnVHA) |
<add>|-------|-----------|------------|------------|
<add>| MLM | 15h32m | 23h46m | 44h14m |
<add>| **COST*** | $124.24 | $187.84 | $877.92 |
<add>
<add>*All experiments are ran on Google Cloud Platform. Prices are on-demand prices
<add>(not preemptible), obtained on May 12, 2021 for zone Iowa (us-central1) using
<add>the following tables:
<add>[TPU pricing table](https://cloud.google.com/tpu/pricing) ($8.00/h for v3-8),
<add>[GPU pricing table](https://cloud.google.com/compute/gpus-pricing) ($2.48/h per
<add>V100 GPU). GPU experiments are ran without further optimizations besides JAX
<add>transformations. GPU experiments are ran with full precision (fp32). "TPU v3-8"
<add>are 8 TPU cores on 4 chips (each chips has 2 cores), while "8 GPU" are 8 GPU chips.
<add>
<add>### Script to run MLM with PyTorch/XLA on TPUv3-8
<add>
<add>For comparison one can run the same pre-training with PyTorch/XLA on TPU. To set up PyTorch/XLA on Cloud TPU VMs, please
<add>refer to [this](https://cloud.google.com/tpu/docs/pytorch-xla-ug-tpu-vm) guide.
<add>Having created the tokenzier and configuration in `norwegian-roberta-base`, we create the following symbolic links:
<add>
<add>```bash
<add>ln -s ~/transformers/examples/pytorch/language-modeling/run_mlm.py ./
<add>ln -s ~/transformers/examples/pytorch/xla_spawn.py ./
<add>```
<add>
<add>, set the following environment variables:
<add>
<add>```bash
<add>export XRT_TPU_CONFIG="localservice;0;localhost:51011"
<add>unset LD_PRELOAD
<add>
<add>export NUM_TPUS=8
<add>export TOKENIZERS_PARALLELISM=0
<add>export MODEL_DIR="./norwegian-roberta-base"
<add>mkdir -p ${MODEL_DIR}
<add>```
<add>
<add>, and start training as follows:
<add>
<add>```bash
<add>python3 xla_spawn.py --num_cores ${NUM_TPUS} run_mlm.py --output_dir="./runs" \
<add> --model_type="roberta" \
<add> --config_name="${MODEL_DIR}" \
<add> --tokenizer_name="${MODEL_DIR}" \
<add> --dataset_name="oscar" \
<add> --dataset_config_name="unshuffled_deduplicated_no" \
<add> --max_seq_length="128" \
<add> --weight_decay="0.01" \
<add> --per_device_train_batch_size="128" \
<add> --per_device_eval_batch_size="128" \
<add> --learning_rate="3e-4" \
<add> --warmup_steps="1000" \
<add> --overwrite_output_dir \
<add> --num_train_epochs="18" \
<add> --adam_beta1="0.9" \
<add> --adam_beta2="0.98" \
<add> --do_train \
<add> --do_eval \
<add> --logging_steps="500" \
<add> --evaluation_strategy="epoch" \
<add> --report_to="tensorboard" \
<add> --save_strategy="no"
<add>```
<add>
<add>### Script to compare pre-training with PyTorch on 8 GPU V100's
<add>
<add>For comparison you can run the same pre-training with PyTorch on GPU. Note that we have to make use of `gradient_accumulation`
<add>because the maximum batch size that fits on a single V100 GPU is 32 instead of 128.
<add>Having created the tokenzier and configuration in `norwegian-roberta-base`, we create the following symbolic links:
<add>
<add>```bash
<add>ln -s ~/transformers/examples/pytorch/language-modeling/run_mlm.py ./
<add>```
<add>
<add>, set some environment variables:
<add>
<add>```bash
<add>export NUM_GPUS=8
<add>export TOKENIZERS_PARALLELISM=0
<add>export MODEL_DIR="./norwegian-roberta-base"
<add>mkdir -p ${MODEL_DIR}
<add>```
<add>
<add>, and can start training as follows:
<add>
<add>```bash
<add>python3 -m torch.distributed.launch --nproc_per_node ${NUM_GPUS} run_mlm.py \
<add> --output_dir="./runs" \
<add> --model_type="roberta" \
<add> --config_name="${MODEL_DIR}" \
<add> --tokenizer_name="${MODEL_DIR}" \
<add> --dataset_name="oscar" \
<add> --dataset_config_name="unshuffled_deduplicated_no" \
<add> --max_seq_length="128" \
<add> --weight_decay="0.01" \
<add> --per_device_train_batch_size="32" \
<add> --per_device_eval_batch_size="32" \
<add> --gradient_accumulation="4" \
<add> --learning_rate="3e-4" \
<add> --warmup_steps="1000" \
<add> --overwrite_output_dir \
<add> --num_train_epochs="18" \
<add> --adam_beta1="0.9" \
<add> --adam_beta2="0.98" \
<add> --do_train \
<add> --do_eval \
<add> --logging_steps="500" \
<add> --evaluation_strategy="steps" \
<add> --report_to="tensorboard" \
<add> --save_strategy="no"
<add>```
<ide><path>examples/flax/text-classification/README.md
<ide> overall training time below. For comparison we ran Pytorch's [run_glue.py](https
<ide> *All experiments are ran on Google Cloud Platform. Prices are on-demand prices
<ide> (not preemptible), obtained on May 12, 2021 for zone Iowa (us-central1) using
<ide> the following tables:
<del>[TPU pricing table](https://cloud.google.com/tpu/pricing) ($2.40/h for v3-8),
<add>[TPU pricing table](https://cloud.google.com/tpu/pricing) ($8.00/h for v3-8),
<ide> [GPU pricing table](https://cloud.google.com/compute/gpus-pricing) ($2.48/h per
<ide> V100 GPU). GPU experiments are ran without further optimizations besides JAX
<ide> transformations. GPU experiments are ran with full precision (fp32). "TPU v3-8" | 2 |
Ruby | Ruby | remove unused variables | 7adef79e1413db36f25a363f96136a1df85ea1a2 | <ide><path>actionpack/test/controller/live_stream_test.rb
<ide> def test_abort_with_full_buffer
<ide> t = Thread.new(@response) { |resp|
<ide> resp.await_commit
<ide> _, _, body = resp.to_a
<del> body.each do |part|
<add> body.each do
<ide> @controller.latch.wait
<ide> body.close
<ide> break
<ide> def test_ignore_client_disconnect
<ide> t = Thread.new(@response) { |resp|
<ide> resp.await_commit
<ide> _, _, body = resp.to_a
<del> body.each do |part|
<add> body.each do
<ide> body.close
<ide> break
<ide> end | 1 |
Text | Text | fix broken links in readme | 50b206d3739660cdf089b0a3f8a5bb21d6970e00 | <ide><path>README.md
<ide> OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<ide> [sandbox]: http://restframework.herokuapp.com/
<ide>
<ide> [index]: http://www.django-rest-framework.org/
<del>[oauth1-section]: http://www.django-rest-framework.org/api-guide/authentication.html#oauthauthentication
<del>[oauth2-section]: http://www.django-rest-framework.org/api-guide/authentication.html#oauth2authentication
<del>[serializer-section]: http://www.django-rest-framework.org/api-guide/serializers.html#serializers
<del>[modelserializer-section]: http://www.django-rest-framework.org/api-guide/serializers.html#modelserializer
<del>[functionview-section]: http://www.django-rest-framework.org/api-guide/views.html#function-based-views
<del>[generic-views]: http://www.django-rest-framework.org/api-guide/generic-views.html
<del>[viewsets]: http://www.django-rest-framework.org/api-guide/viewsets.html
<del>[routers]: http://www.django-rest-framework.org/api-guide/routers.html
<del>[serializers]: http://www.django-rest-framework.org/api-guide/serializers.html
<del>[authentication]: http://www.django-rest-framework.org/api-guide/authentication.html
<del>
<del>[rest-framework-2-announcement]: http://www.django-rest-framework.org/topics/rest-framework-2-announcement.html
<add>[oauth1-section]: http://www.django-rest-framework.org/api-guide/authentication/#oauthauthentication
<add>[oauth2-section]: http://www.django-rest-framework.org/api-guide/authentication/#oauth2authentication
<add>[serializer-section]: http://www.django-rest-framework.org/api-guide/serializers/#serializers
<add>[modelserializer-section]: http://www.django-rest-framework.org/api-guide/serializers/#modelserializer
<add>[functionview-section]: http://www.django-rest-framework.org/api-guide/views/#function-based-views
<add>[generic-views]: http://www.django-rest-framework.org/api-guide/generic-views/
<add>[viewsets]: http://www.django-rest-framework.org/api-guide/viewsets/
<add>[routers]: http://www.django-rest-framework.org/api-guide/routers/
<add>[serializers]: http://www.django-rest-framework.org/api-guide/serializers/
<add>[authentication]: http://www.django-rest-framework.org/api-guide/authentication/
<add>
<add>[rest-framework-2-announcement]: http://www.django-rest-framework.org/topics/rest-framework-2-announcement
<ide> [2.1.0-notes]: https://groups.google.com/d/topic/django-rest-framework/Vv2M0CMY9bg/discussion
<ide> [image]: http://www.django-rest-framework.org/img/quickstart.png
<ide> | 1 |
Mixed | Javascript | accept `lookup` option for `tls.connect()` | e600fbe576e7806a01e11708b65830aeae96b590 | <ide><path>doc/api/tls.md
<ide> decrease overall server throughput.
<ide> <!-- YAML
<ide> added: v0.11.3
<ide> changes:
<add> - version: REPLACEME
<add> pr-url: https://github.com/nodejs/node/pull/12839
<add> description: The `lookup` option is supported now.
<ide> - version: REPLACEME
<ide> pr-url: https://github.com/nodejs/node/pull/11984
<ide> description: The `ALPNProtocols` and `NPNProtocols` options can
<ide> changes:
<ide> `tls.createSecureContext()`. *Note*: In effect, all
<ide> [`tls.createSecureContext()`][] options can be provided, but they will be
<ide> _completely ignored_ unless the `secureContext` option is missing.
<add> * `lookup`: {Function} Custom lookup function. Defaults to [`dns.lookup()`][].
<ide> * ...: Optional [`tls.createSecureContext()`][] options can be provided, see
<ide> the `secureContext` option for more information.
<ide> * `callback` {Function}
<ide> where `secure_socket` has the same API as `pair.cleartext`.
<ide> [modifying the default cipher suite]: #tls_modifying_the_default_tls_cipher_suite
<ide> [specific attacks affecting larger AES key sizes]: https://www.schneier.com/blog/archives/2009/07/another_new_aes.html
<ide> [tls.Server]: #tls_class_tls_server
<add>[`dns.lookup()`]: dns.html#dns_dns_lookup_hostname_options_callback
<ide><path>lib/_tls_wrap.js
<ide> exports.connect = function(...args /* [port,] [host,] [options,] [cb] */) {
<ide> port: options.port,
<ide> host: options.host,
<ide> family: options.family,
<del> localAddress: options.localAddress
<add> localAddress: options.localAddress,
<add> lookup: options.lookup
<ide> };
<ide> }
<ide> socket.connect(connect_opt, function() {
<ide><path>test/parallel/test-tls-lookup.js
<add>'use strict';
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const tls = require('tls');
<add>
<add>const expectedError = /^TypeError: "lookup" option should be a function$/;
<add>
<add>['foobar', 1, {}, []].forEach(function connectThrows(input) {
<add> const opts = {
<add> host: 'localhost',
<add> port: common.PORT,
<add> lookup: input
<add> };
<add>
<add> assert.throws(function() {
<add> tls.connect(opts);
<add> }, expectedError);
<add>});
<add>
<add>connectDoesNotThrow(common.mustCall(() => {}));
<add>
<add>function connectDoesNotThrow(input) {
<add> const opts = {
<add> host: 'localhost',
<add> port: common.PORT,
<add> lookup: input
<add> };
<add>
<add> assert.doesNotThrow(function() {
<add> tls.connect(opts);
<add> });
<add>} | 3 |
Javascript | Javascript | fix reference to root global context | 94728125696ce05f5308ccbc111874e9fa936936 | <ide><path>lib/module.js
<ide> Module.prototype._compile = function (content, filename) {
<ide> sandbox.__filename = filename;
<ide> sandbox.__dirname = dirname;
<ide> sandbox.module = self;
<del> sandbox.root = sandbox;
<add> sandbox.root = global;
<ide>
<ide> Script.runInNewContext(content, sandbox, filename);
<ide> | 1 |
Ruby | Ruby | fix 1.8.6 compatibilty issue | 1e51ccb30d0a8bcbbb843d2cff1739cfdc94e38b | <ide><path>Library/Homebrew/cmd/cleanup.rb
<ide> def can_cleanup?
<ide> elsif opt_prefix.directory?
<ide> # SHA records were added to INSTALL_RECEIPTS the same day as opt symlinks
<ide> !Formula.installed.
<del> select{ |ff| ff.deps.map(&:to_s).include? name }.
<add> select{ |ff| ff.deps.map{ |d| d.to_s }.include? name }.
<ide> map{ |ff| ff.rack.children rescue [] }.
<ide> flatten.
<ide> map{ |keg_path| Tab.for_keg(keg_path).send("HEAD") }. | 1 |
PHP | PHP | add test to increase coverage | 5428f608b1d533e189a209e142a12aa9ae5ebc75 | <ide><path>tests/TestCase/ORM/Association/HasManyTest.php
<ide> public function testSaveAssociatedEmptySetWithReplaceStrategyRemovesAssociatedRe
<ide> ]);
<ide> $this->assertEmpty($entity->get('comments'));
<ide> }
<add>
<add> /**
<add> * Tests that providing an invalid strategy throws an exception
<add> *
<add> * @return void
<add> */
<add> public function testInvalidStrategy()
<add> {
<add> $this->expectException(\InvalidArgumentException::class);
<add> $articles = TableRegistry::get('Articles');
<add>
<add> $association = $articles->hasMany('Comments');
<add> $association->setStrategy('anotherThing');
<add> }
<ide> } | 1 |
Javascript | Javascript | fix code style in todomvc flux example | bc11793c04236270996f506e7bd4b6bceddc9543 | <ide><path>examples/todomvc-flux/js/components/Header.react.js
<ide> var Header = React.createClass({
<ide> * @param {string} text
<ide> */
<ide> _onSave: function(text) {
<del> if(text.trim()){
<add> if (text.trim()){
<ide> TodoActions.create(text);
<ide> }
<del>
<add>
<ide> }
<ide>
<ide> }); | 1 |
Ruby | Ruby | add a test case for the scope enum adds | 09447929a06a4650d82ed51af56e3365ec7583a6 | <ide><path>activerecord/test/cases/enum_test.rb
<ide> class StoreTest < ActiveRecord::TestCase
<ide> assert_equal :proposed, @book.status
<ide> end
<ide>
<add> test "find via scope" do
<add> assert_equal @book, Book.proposed.first
<add> end
<add>
<ide> test "update by declaration" do
<ide> @book.written!
<ide> assert @book.written? | 1 |
Java | Java | improve javadoc in annotatedelementutils | 2c5781473c25a80d48a4d7517b0e1b95636c5777 | <ide><path>spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java
<ide> public static AnnotatedElement forAnnotations(Annotation... annotations) {
<ide> * @param element the annotated element
<ide> * @param annotationType the annotation type on which to find meta-annotations
<ide> * @return the names of all meta-annotations present on the annotation,
<del> * or {@code null} if not found
<add> * or an empty set if not found
<ide> * @since 4.2
<ide> * @see #getMetaAnnotationTypes(AnnotatedElement, String)
<ide> * @see #hasMetaAnnotationTypes | 1 |
Ruby | Ruby | fix a typo in `time_field` [ci skip] | 29bfd950120ff8b709f8f1516e828ac15e431850 | <ide><path>actionview/lib/action_view/helpers/form_helper.rb
<ide> def date_field(object_name, method, options = {})
<ide> # Returns a text_field of type "time".
<ide> #
<ide> # The default value is generated by trying to call +strftime+ with "%T.%L"
<del> # on the objects's value. It is still possible to override that
<add> # on the object's value. It is still possible to override that
<ide> # by passing the "value" option.
<ide> #
<ide> # === Options | 1 |
Java | Java | improve basetestconsumer with awaitcount & timeout | a86425a047c82286db0dfb3dce955b283039f59f | <ide><path>src/main/java/io/reactivex/internal/util/VolatileSizeArrayList.java
<add>/**
<add> * Copyright (c) 2016-present, RxJava Contributors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.util;
<add>
<add>import java.util.*;
<add>import java.util.concurrent.atomic.AtomicInteger;
<add>
<add>/**
<add> * Tracks the current underlying array size in a volatile field.
<add> *
<add> * @param <T> the element type
<add> * @since 2.0.7
<add> */
<add>public final class VolatileSizeArrayList<T> extends AtomicInteger implements List<T> {
<add>
<add> private static final long serialVersionUID = 3972397474470203923L;
<add>
<add> final ArrayList<T> list;
<add>
<add> public VolatileSizeArrayList() {
<add> list = new ArrayList<T>();
<add> }
<add>
<add> public VolatileSizeArrayList(int initialCapacity) {
<add> list = new ArrayList<T>(initialCapacity);
<add> }
<add>
<add> @Override
<add> public int size() {
<add> return get();
<add> }
<add>
<add> @Override
<add> public boolean isEmpty() {
<add> return get() == 0;
<add> }
<add>
<add> @Override
<add> public boolean contains(Object o) {
<add> return list.contains(o);
<add> }
<add>
<add> @Override
<add> public Iterator<T> iterator() {
<add> return list.iterator();
<add> }
<add>
<add> @Override
<add> public Object[] toArray() {
<add> return list.toArray();
<add> }
<add>
<add> @Override
<add> public <E> E[] toArray(E[] a) {
<add> return list.toArray(a);
<add> }
<add>
<add> @Override
<add> public boolean add(T e) {
<add> boolean b = list.add(e);
<add> lazySet(list.size());
<add> return b;
<add> }
<add>
<add> @Override
<add> public boolean remove(Object o) {
<add> boolean b = list.remove(o);
<add> lazySet(list.size());
<add> return b;
<add> }
<add>
<add> @Override
<add> public boolean containsAll(Collection<?> c) {
<add> return list.containsAll(c);
<add> }
<add>
<add> @Override
<add> public boolean addAll(Collection<? extends T> c) {
<add> boolean b = list.addAll(c);
<add> lazySet(list.size());
<add> return b;
<add> }
<add>
<add> @Override
<add> public boolean addAll(int index, Collection<? extends T> c) {
<add> boolean b = list.addAll(index, c);
<add> lazySet(list.size());
<add> return b;
<add> }
<add>
<add> @Override
<add> public boolean removeAll(Collection<?> c) {
<add> boolean b = list.removeAll(c);
<add> lazySet(list.size());
<add> return b;
<add> }
<add>
<add> @Override
<add> public boolean retainAll(Collection<?> c) {
<add> boolean b = list.retainAll(c);
<add> lazySet(list.size());
<add> return b;
<add> }
<add>
<add> @Override
<add> public void clear() {
<add> list.clear();
<add> lazySet(0);
<add> }
<add>
<add> @Override
<add> public T get(int index) {
<add> return list.get(index);
<add> }
<add>
<add> @Override
<add> public T set(int index, T element) {
<add> return list.set(index, element);
<add> }
<add>
<add> @Override
<add> public void add(int index, T element) {
<add> list.add(index, element);
<add> lazySet(list.size());
<add> }
<add>
<add> @Override
<add> public T remove(int index) {
<add> T v = list.remove(index);
<add> lazySet(list.size());
<add> return v;
<add> }
<add>
<add> @Override
<add> public int indexOf(Object o) {
<add> return list.indexOf(o);
<add> }
<add>
<add> @Override
<add> public int lastIndexOf(Object o) {
<add> return list.lastIndexOf(o);
<add> }
<add>
<add> @Override
<add> public ListIterator<T> listIterator() {
<add> return list.listIterator();
<add> }
<add>
<add> @Override
<add> public ListIterator<T> listIterator(int index) {
<add> return list.listIterator(index);
<add> }
<add>
<add> @Override
<add> public List<T> subList(int fromIndex, int toIndex) {
<add> return list.subList(fromIndex, toIndex);
<add> }
<add>
<add> @Override
<add> public boolean equals(Object obj) {
<add> if (obj instanceof VolatileSizeArrayList) {
<add> return list.equals(((VolatileSizeArrayList<?>)obj).list);
<add> }
<add> return list.equals(obj);
<add> }
<add>
<add> @Override
<add> public int hashCode() {
<add> return list.hashCode();
<add> }
<add>
<add> @Override
<add> public String toString() {
<add> return list.toString();
<add> }
<add>}
<ide><path>src/main/java/io/reactivex/observers/BaseTestConsumer.java
<ide> import io.reactivex.disposables.Disposable;
<ide> import io.reactivex.exceptions.CompositeException;
<ide> import io.reactivex.functions.Predicate;
<del>import io.reactivex.internal.functions.Functions;
<del>import io.reactivex.internal.functions.ObjectHelper;
<del>import io.reactivex.internal.util.ExceptionHelper;
<add>import io.reactivex.internal.functions.*;
<add>import io.reactivex.internal.util.*;
<ide>
<ide> /**
<ide> * Base class with shared infrastructure to support TestSubscriber and TestObserver.
<ide>
<ide> protected int establishedFusionMode;
<ide>
<add> /**
<add> * The optional tag associated with this test consumer.
<add> * @since 2.0.7
<add> */
<ide> protected CharSequence tag;
<ide>
<add> /**
<add> * Indicates that one of the awaitX method has timed out.
<add> * @since 2.0.7
<add> */
<add> protected boolean timeout;
<add>
<ide> public BaseTestConsumer() {
<del> this.values = new ArrayList<T>();
<del> this.errors = new ArrayList<Throwable>();
<add> this.values = new VolatileSizeArrayList<T>();
<add> this.errors = new VolatileSizeArrayList<Throwable>();
<ide> this.done = new CountDownLatch(1);
<ide> }
<ide>
<ide> protected final AssertionError fail(String message) {
<ide> .append("completions = ").append(completions)
<ide> ;
<ide>
<add> if (timeout) {
<add> b.append(", timeout!");
<add> }
<add>
<add> if (isDisposed()) {
<add> b.append(", disposed!");
<add> }
<add>
<ide> CharSequence tag = this.tag;
<ide> if (tag != null) {
<ide> b.append(", tag = ")
<ide> public final U await() throws InterruptedException {
<ide> * @see #awaitTerminalEvent(long, TimeUnit)
<ide> */
<ide> public final boolean await(long time, TimeUnit unit) throws InterruptedException {
<del> return done.getCount() == 0 || done.await(time, unit);
<add> boolean d = done.getCount() == 0 || (done.await(time, unit));
<add> timeout = !d;
<add> return d;
<ide> }
<ide>
<ide> // assertion methods
<ide> public final U assertFailureAndMessage(Class<? extends Throwable> error,
<ide> public final U awaitDone(long time, TimeUnit unit) {
<ide> try {
<ide> if (!done.await(time, unit)) {
<add> timeout = true;
<ide> dispose();
<ide> }
<ide> } catch (InterruptedException ex) {
<ide> public final U awaitDone(long time, TimeUnit unit) {
<ide>
<ide>
<ide> /**
<del> * Assert that the TestObserver/TestSubscriber/TestSubscriber has received a Disposable but no other events.
<add> * Assert that the TestObserver/TestSubscriber has received a Disposable but no other events.
<ide> * @return this
<ide> */
<ide> public final U assertEmpty() {
<ide> public final U withTag(CharSequence tag) {
<ide> this.tag = tag;
<ide> return (U)this;
<ide> }
<add>
<add> /**
<add> * Enumeration of default wait strategies when waiting for a specific number of
<add> * items in {@link BaseTestConsumer#awaitCount(int, Runnable)}.
<add> * @since 2.0.7 - experimental
<add> */
<add> @Experimental
<add> public enum TestWaitStrategy implements Runnable {
<add> /** The wait loop will spin as fast as possible. */
<add> SPIN {
<add> @Override
<add> public void run() {
<add> // nothing to do
<add> }
<add> },
<add> /** The current thread will be yielded. */
<add> YIELD {
<add> @Override
<add> public void run() {
<add> Thread.yield();
<add> }
<add> },
<add> /** The current thread sleeps for 1 millisecond. */
<add> SLEEP_1MS {
<add> @Override
<add> public void run() {
<add> sleep(1);
<add> }
<add> },
<add> /** The current thread sleeps for 10 milliseconds. */
<add> SLEEP_10MS {
<add> @Override
<add> public void run() {
<add> sleep(10);
<add> }
<add> },
<add> /** The current thread sleeps for 100 milliseconds. */
<add> SLEEP_100MS {
<add> @Override
<add> public void run() {
<add> sleep(100);
<add> }
<add> },
<add> /** The current thread sleeps for 1000 milliseconds. */
<add> SLEEP_1000MS {
<add> @Override
<add> public void run() {
<add> sleep(1000);
<add> }
<add> }
<add> ;
<add>
<add> @Override
<add> public abstract void run();
<add>
<add> static void sleep(int millis) {
<add> try {
<add> Thread.sleep(millis);
<add> } catch (InterruptedException ex) {
<add> throw new RuntimeException(ex);
<add> }
<add> }
<add> }
<add>
<add>
<add> /**
<add> * Await until the TestObserver/TestSubscriber receives the given
<add> * number of items or terminates by sleeping 10 milliseconds at a time
<add> * up to 5000 milliseconds of timeout.
<add> * @param atLeast the number of items expected at least
<add> * @return this
<add> * @see #awaitCount(int, Runnable, long)
<add> * @since 2.0.7 - experimental
<add> */
<add> @Experimental
<add> public final U awaitCount(int atLeast) {
<add> return awaitCount(atLeast, TestWaitStrategy.SLEEP_10MS, 5000);
<add> }
<add>
<add> /**
<add> * Await until the TestObserver/TestSubscriber receives the given
<add> * number of items or terminates by waiting according to the wait
<add> * strategy and up to 5000 milliseconds of timeout.
<add> * @param atLeast the number of items expected at least
<add> * @param waitStrategy a Runnable called when the current received count
<add> * hasn't reached the expected value and there was
<add> * no terminal event either, see {@link TestWaitStrategy}
<add> * for examples
<add> * @return this
<add> * @see #awaitCount(int, Runnable, long)
<add> * @since 2.0.7 - experimental
<add> */
<add> @Experimental
<add> public final U awaitCount(int atLeast, Runnable waitStrategy) {
<add> return awaitCount(atLeast, waitStrategy, 5000);
<add> }
<add>
<add> /**
<add> * Await until the TestObserver/TestSubscriber receives the given
<add> * number of items or terminates.
<add> * @param atLeast the number of items expected at least
<add> * @param waitStrategy a Runnable called when the current received count
<add> * hasn't reached the expected value and there was
<add> * no terminal event either, see {@link TestWaitStrategy}
<add> * for examples
<add> * @param timeoutMillis if positive, the await ends if the specified amount of
<add> * time has passed no matter how many items were received
<add> * @return this
<add> * @since 2.0.7 - experimental
<add> */
<add> @SuppressWarnings("unchecked")
<add> @Experimental
<add> public final U awaitCount(int atLeast, Runnable waitStrategy, long timeoutMillis) {
<add> long start = System.currentTimeMillis();
<add> for (;;) {
<add> if (timeoutMillis > 0L && System.currentTimeMillis() - start >= timeoutMillis) {
<add> timeout = true;
<add> break;
<add> }
<add> if (done.getCount() == 0L) {
<add> break;
<add> }
<add> if (values.size() >= atLeast) {
<add> break;
<add> }
<add>
<add> waitStrategy.run();
<add> }
<add> return (U)this;
<add> }
<add>
<add> /**
<add> * @return true if one of the timeout-based await methods has timed out.
<add> * @see #clearTimeout()
<add> * @see #assertTimeout()
<add> * @see #assertNoTimeout()
<add> * @since 2.0.7 - experimental
<add> */
<add> @Experimental
<add> public final boolean isTimeout() {
<add> return timeout;
<add> }
<add>
<add> /**
<add> * Clears the timeout flag set by the await methods when they timed out.
<add> * @return this
<add> * @since 2.0.7 - experimental
<add> * @see #isTimeout()
<add> */
<add> @SuppressWarnings("unchecked")
<add> @Experimental
<add> public final U clearTimeout() {
<add> timeout = false;
<add> return (U)this;
<add> }
<add>
<add> /**
<add> * Asserts that some awaitX method has timed out.
<add> * @return this
<add> * @since 2.0.7 - experimental
<add> */
<add> @SuppressWarnings("unchecked")
<add> @Experimental
<add> public final U assertTimeout() {
<add> if (!timeout) {
<add> throw fail("No timeout?!");
<add> }
<add> return (U)this;
<add> }
<add>
<add>
<add> /**
<add> * Asserts that some awaitX method has not timed out.
<add> * @return this
<add> * @since 2.0.7 - experimental
<add> */
<add> @SuppressWarnings("unchecked")
<add> @Experimental
<add> public final U assertNoTimeout() {
<add> if (timeout) {
<add> throw fail("Timeout?!");
<add> }
<add> return (U)this;
<add> }
<ide> }
<ide><path>src/test/java/io/reactivex/internal/util/VolatileSizeArrayListTest.java
<add>/**
<add> * Copyright (c) 2016-present, RxJava Contributors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.util;
<add>
<add>import static org.junit.Assert.*;
<add>
<add>import java.util.*;
<add>
<add>import org.junit.Test;
<add>
<add>public class VolatileSizeArrayListTest {
<add>
<add> @Test
<add> public void normal() {
<add> List<Integer> list = new VolatileSizeArrayList<Integer>();
<add>
<add> assertTrue(list.isEmpty());
<add> assertEquals(0, list.size());
<add> assertFalse(list.contains(1));
<add> assertFalse(list.remove((Integer)1));
<add>
<add> list = new VolatileSizeArrayList<Integer>(16);
<add> assertTrue(list.add(1));
<add> assertTrue(list.addAll(Arrays.asList(3, 4, 7)));
<add> list.add(1, 2);
<add> assertTrue(list.addAll(4, Arrays.asList(5, 6)));
<add>
<add> assertTrue(list.contains(2));
<add> assertFalse(list.remove((Integer)10));
<add>
<add> assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7), list);
<add> assertFalse(list.isEmpty());
<add> assertEquals(7, list.size());
<add>
<add> Iterator<Integer> it = list.iterator();
<add> for (int i = 1; i < 8; i++) {
<add> assertEquals(i, it.next().intValue());
<add> }
<add>
<add> assertArrayEquals(new Object[] { 1, 2, 3, 4, 5, 6, 7 }, list.toArray());
<add> assertArrayEquals(new Integer[] { 1, 2, 3, 4, 5, 6, 7 }, list.toArray(new Integer[7]));
<add>
<add> assertTrue(list.containsAll(Arrays.asList(2, 4, 6)));
<add> assertFalse(list.containsAll(Arrays.asList(2, 4, 6, 10)));
<add>
<add> assertFalse(list.removeAll(Arrays.asList(10, 11, 12)));
<add>
<add> assertFalse(list.retainAll(Arrays.asList(1, 2, 3, 4, 5, 6, 7)));
<add>
<add> assertEquals(7, list.size());
<add>
<add> for (int i = 1; i < 8; i++) {
<add> assertEquals(i, list.get(i - 1).intValue());
<add> }
<add>
<add> for (int i = 1; i < 8; i++) {
<add> assertEquals(i, list.set(i - 1, i).intValue());
<add> }
<add>
<add> assertEquals(2, list.indexOf(3));
<add>
<add> assertEquals(5, list.lastIndexOf(6));
<add>
<add> ListIterator<Integer> lit = list.listIterator(7);
<add> for (int i = 7; i > 0; i--) {
<add> assertEquals(i, lit.previous().intValue());
<add> }
<add>
<add> assertEquals(Arrays.asList(3, 4, 5), list.subList(2, 5));
<add>
<add> VolatileSizeArrayList<Integer> list2 = new VolatileSizeArrayList<Integer>();
<add> list2.addAll(Arrays.asList(1, 2, 3, 4, 5, 6));
<add>
<add> assertFalse(list2.equals(list));
<add> assertFalse(list.equals(list2));
<add>
<add> list2.add(7);
<add> assertTrue(list2.equals(list));
<add> assertTrue(list.equals(list2));
<add>
<add> List<Integer> list3 = new ArrayList<Integer>();
<add> list3.addAll(Arrays.asList(1, 2, 3, 4, 5, 6));
<add>
<add> assertFalse(list3.equals(list));
<add> assertFalse(list.equals(list3));
<add>
<add> list3.add(7);
<add> assertTrue(list3.equals(list));
<add> assertTrue(list.equals(list3));
<add>
<add> assertEquals(list.hashCode(), list3.hashCode());
<add> assertEquals(list.toString(), list3.toString());
<add>
<add> list.remove(0);
<add> assertEquals(6, list.size());
<add>
<add> list.clear();
<add> assertEquals(0, list.size());
<add> assertTrue(list.isEmpty());
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/subscribers/TestSubscriberTest.java
<ide> import io.reactivex.internal.functions.Functions;
<ide> import io.reactivex.internal.fuseable.QueueSubscription;
<ide> import io.reactivex.internal.subscriptions.*;
<add>import io.reactivex.observers.BaseTestConsumer;
<add>import io.reactivex.observers.BaseTestConsumer.TestWaitStrategy;
<ide> import io.reactivex.processors.*;
<ide> import io.reactivex.schedulers.Schedulers;
<ide>
<ide> public void withTag() {
<ide> assertTrue(ex.toString(), ex.toString().contains("testing with item=2"));
<ide> }
<ide> }
<add>
<add> @Test
<add> public void timeoutIndicated() throws InterruptedException {
<add> Thread.interrupted(); // clear flag
<add>
<add> TestSubscriber<Object> ts = Flowable.never()
<add> .test();
<add> assertFalse(ts.await(1, TimeUnit.MILLISECONDS));
<add>
<add> try {
<add> ts.assertResult(1);
<add> fail("Should have thrown!");
<add> } catch (AssertionError ex) {
<add> assertTrue(ex.toString(), ex.toString().contains("timeout!"));
<add> }
<add> }
<add>
<add> @Test
<add> public void timeoutIndicated2() throws InterruptedException {
<add> try {
<add> Flowable.never()
<add> .test()
<add> .awaitDone(1, TimeUnit.MILLISECONDS)
<add> .assertResult(1);
<add>
<add> fail("Should have thrown!");
<add> } catch (AssertionError ex) {
<add> assertTrue(ex.toString(), ex.toString().contains("timeout!"));
<add> }
<add> }
<add>
<add>
<add> @Test
<add> public void timeoutIndicated3() throws InterruptedException {
<add> TestSubscriber<Object> ts = Flowable.never()
<add> .test();
<add> assertFalse(ts.awaitTerminalEvent(1, TimeUnit.MILLISECONDS));
<add>
<add> try {
<add> ts.assertResult(1);
<add> fail("Should have thrown!");
<add> } catch (AssertionError ex) {
<add> assertTrue(ex.toString(), ex.toString().contains("timeout!"));
<add> }
<add> }
<add>
<add> @Test
<add> public void disposeIndicated() {
<add> TestSubscriber<Object> ts = new TestSubscriber<Object>();
<add> ts.cancel();
<add>
<add> try {
<add> ts.assertResult(1);
<add> fail("Should have thrown!");
<add> } catch (Throwable ex) {
<add> assertTrue(ex.toString(), ex.toString().contains("disposed!"));
<add> }
<add> }
<add>
<add> @Test
<add> public void checkTestWaitStrategyEnum() {
<add> TestHelper.checkEnum(BaseTestConsumer.TestWaitStrategy.class);
<add> }
<add>
<add> @Test
<add> public void awaitCount() {
<add> Flowable.range(1, 10).delay(100, TimeUnit.MILLISECONDS)
<add> .test(5)
<add> .awaitCount(5)
<add> .assertValues(1, 2, 3, 4, 5)
<add> .requestMore(5)
<add> .awaitDone(5, TimeUnit.SECONDS)
<add> .assertResult(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
<add> }
<add>
<add> @Test
<add> public void awaitCountLess() {
<add> Flowable.range(1, 4)
<add> .test()
<add> .awaitCount(5)
<add> .assertResult(1, 2, 3, 4);
<add> }
<add>
<add> @Test
<add> public void awaitCountLess2() {
<add> Flowable.range(1, 4)
<add> .test()
<add> .awaitCount(5, TestWaitStrategy.YIELD)
<add> .assertResult(1, 2, 3, 4);
<add> }
<add>
<add> @Test
<add> public void awaitCountLess3() {
<add> Flowable.range(1, 4).delay(50, TimeUnit.MILLISECONDS)
<add> .test()
<add> .awaitCount(5, TestWaitStrategy.SLEEP_1MS)
<add> .assertResult(1, 2, 3, 4);
<add> }
<add>
<add> @Test
<add> public void interruptTestWaitStrategy() {
<add> try {
<add> Thread.currentThread().interrupt();
<add> TestWaitStrategy.SLEEP_1000MS.run();
<add> } catch (RuntimeException ex) {
<add> assertTrue(ex.toString(), ex.getCause() instanceof InterruptedException);
<add> }
<add> }
<add>
<add> @Test
<add> public void awaitCountTimeout() {
<add> TestSubscriber<Object> ts = Flowable.never()
<add> .test()
<add> .awaitCount(1, TestWaitStrategy.SLEEP_1MS, 50);
<add>
<add> assertTrue(ts.isTimeout());
<add> ts.clearTimeout();
<add> assertFalse(ts.isTimeout());
<add> }
<add>
<add> @Test
<add> public void assertTimeout() {
<add> Flowable.never()
<add> .test()
<add> .awaitCount(1, TestWaitStrategy.SLEEP_1MS, 50)
<add> .assertTimeout();
<add> }
<add>
<add> @Test
<add> public void assertTimeout2() {
<add> try {
<add> Flowable.empty()
<add> .test()
<add> .awaitCount(1, TestWaitStrategy.SLEEP_1MS, 50)
<add> .assertTimeout();
<add> fail("Should have thrown!");
<add> } catch (AssertionError ex) {
<add> assertTrue(ex.toString(), ex.getMessage().contains("No timeout?!"));
<add> }
<add> }
<add>
<add> @Test
<add> public void assertNoTimeout() {
<add> Flowable.just(1)
<add> .test()
<add> .awaitCount(1, TestWaitStrategy.SLEEP_1MS, 50)
<add> .assertNoTimeout();
<add> }
<add>
<add> @Test
<add> public void assertNoTimeout2() {
<add> try {
<add> Flowable.never()
<add> .test()
<add> .awaitCount(1, TestWaitStrategy.SLEEP_1MS, 50)
<add> .assertNoTimeout();
<add> fail("Should have thrown!");
<add> } catch (AssertionError ex) {
<add> assertTrue(ex.toString(), ex.getMessage().contains("Timeout?!"));
<add> }
<add> }
<add>
<add> @Test
<add> public void assertNeverPredicateThrows() {
<add> try {
<add> Flowable.just(1)
<add> .test()
<add> .assertNever(new Predicate<Integer>() {
<add> @Override
<add> public boolean test(Integer t) throws Exception {
<add> throw new IllegalArgumentException();
<add> }
<add> });
<add> fail("Should have thrown!");
<add> } catch (IllegalArgumentException ex) {
<add> // expected
<add> }
<add> }
<add>
<add> @Test
<add> public void assertValueAtPredicateThrows() {
<add> try {
<add> Flowable.just(1)
<add> .test()
<add> .assertValueAt(0, new Predicate<Integer>() {
<add> @Override
<add> public boolean test(Integer t) throws Exception {
<add> throw new IllegalArgumentException();
<add> }
<add> });
<add> fail("Should have thrown!");
<add> } catch (IllegalArgumentException ex) {
<add> // expected
<add> }
<add> }
<add>
<add> @Test
<add> public void waitStrategyRuns() {
<add> for (TestWaitStrategy ws : TestWaitStrategy.values()) {
<add> ws.run();
<add> }
<add> }
<ide> } | 4 |
Ruby | Ruby | move contenttype inline for now. | d58b57a3caf4ad434c2be4f63eecd9a1921c7c4a | <ide><path>actionpack/lib/action_controller/new_base.rb
<ide> module ActionController
<del> autoload :ContentType, "action_controller/new_base/content_type"
<ide> autoload :HideActions, "action_controller/new_base/hide_actions"
<ide> autoload :Http, "action_controller/new_base/base"
<ide> autoload :Layouts, "action_controller/new_base/layouts"
<ide><path>actionpack/lib/action_controller/new_base/content_type.rb
<del>module ActionController
<del> module ContentType
<del>
<del> def render_to_body(options = {})
<del> if content_type = options[:content_type]
<del> response.content_type = content_type
<del> end
<del>
<del> ret = super
<del> response.content_type ||= options[:_template].mime_type
<del> ret
<del> end
<del>
<del> end
<del>end
<ide>\ No newline at end of file
<ide><path>actionpack/lib/action_controller/new_base/renderer.rb
<ide> def render_to_body(options)
<ide> options[:_prefix] = _prefix
<ide> end
<ide>
<del> super(options)
<add> ret = super(options)
<add> response.content_type ||= options[:_template].mime_type
<add> ret
<ide> end
<ide>
<ide> private
<ide> def _text(options)
<ide> end
<ide>
<ide> def _process_options(options)
<del> if status = options[:status]
<del> response.status = status.to_i
<del> end
<add> status, content_type = options.values_at(:status, :content_type)
<add> response.status = status.to_i if status
<add> response.content_type = content_type if content_type
<ide> end
<ide> end
<ide> end
<ide><path>actionpack/test/new_base/test_helper.rb
<ide> class Base2 < Http
<ide> use ActionController::UrlFor
<ide> use ActionController::Renderer
<ide> use ActionController::Layouts
<del> use ActionController::ContentType
<ide>
<ide> def self.inherited(klass)
<ide> ::ActionController::Base2.subclasses << klass.to_s | 4 |
Python | Python | prevent modification of coefficients | 0ea21d1092c9154d72d50b04ba2c8025704c679f | <ide><path>numpy/lib/polynomial.py
<ide> class poly1d(object):
<ide> @property
<ide> def coeffs(self):
<ide> """ The polynomial coefficients """
<del> return self._coeffs
<add> return self._coeffs.copy()
<ide>
<ide> @property
<ide> def variable(self): | 1 |
Text | Text | create changing columns on migrations guides | 9477f4372fac1a18aad6fbce249a1bfc03e0b61f | <ide><path>guides/source/migrations.md
<ide> end
<ide> removes the `description` and `name` columns, creates a `part_number` string
<ide> column and adds an index on it. Finally it renames the `upccode` column.
<ide>
<add>### Changing Columns
<add>
<add>Like the `remove_column` and `add_column` Rails provides the `change_column`.
<add>
<add>```ruby
<add>change_column :products, :part_number, :text
<add>```
<add>
<add>This changes the column part_number on products table to be a text.
<add>
<add>Besides the `change_column`, the `change_column_null` and
<add>`change_column_default` are used specifically to change the null and default
<add>values of a column.
<add>
<add>```ruby
<add>change_column_null :products, :name, false
<add>change_column_default :products, :approved, false
<add>```
<add>
<add>This sets name on products to a NOT NULL column and the default value
<add>of approved as false.
<add>
<ide> ### When Helpers aren't Enough
<ide>
<ide> If the helpers provided by Active Record aren't enough you can use the `execute` | 1 |
PHP | PHP | formalize current deprecations | 7667c927c9b6caee10dcc8d58b2ba1db6540406f | <ide><path>src/Cache/Cache.php
<ide> protected static function _buildEngine(string $name): void
<ide> */
<ide> public static function engine(string $config)
<ide> {
<add> deprecationWarning('Cache::engine() is deprecated. Use Cache::pool() instead.');
<add>
<ide> return static::pool($config);
<ide> }
<ide>
<ide><path>src/Database/Type/DateTimeType.php
<ide> public function toDatabase($value, DriverInterface $driver): ?string
<ide> */
<ide> public function setTimezone($timezone)
<ide> {
<add> deprecationWarning('DateTimeType::setTimezone() is deprecated. use setDatabaseTimezone() instead.');
<add>
<ide> return $this->setDatabaseTimezone($timezone);
<ide> }
<ide>
<ide><path>src/Datasource/EntityTrait.php
<ide> public function unset($field)
<ide> */
<ide> public function unsetProperty($field)
<ide> {
<add> deprecationWarning('EntityTrait::unsetProperty() is deprecated. Use unset() instead.');
<add>
<ide> return $this->unset($field);
<ide> }
<ide>
<ide><path>src/Form/Form.php
<ide> public function getSchema(): Schema
<ide> */
<ide> public function schema(?Schema $schema = null): Schema
<ide> {
<add> deprecationWarning('Form::schema() is deprecated. Use setSchema() and getSchema() instead.');
<ide> if ($schema !== null) {
<ide> $this->setSchema($schema);
<ide> }
<ide><path>src/Http/Cookie/Cookie.php
<ide> public function getValue()
<ide> */
<ide> public function getStringValue()
<ide> {
<add> deprecationWarning('Cookie::getStringValue() is deprecated. Use getStringValue() instead.');
<add>
<ide> return $this->getScalarValue();
<ide> }
<ide>
<ide><path>src/Http/Middleware/CsrfProtectionMiddleware.php
<ide> protected function _unsetTokenField(ServerRequestInterface $request): ServerRequ
<ide> */
<ide> protected function _createToken(): string
<ide> {
<add> deprecationWarning('_createToken() is deprecated. Use createToken() instead.');
<add>
<ide> return $this->createToken();
<ide> }
<ide>
<ide><path>src/Mailer/Mailer.php
<ide> public function __call(string $method, array $args)
<ide> */
<ide> public function set($key, $value = null)
<ide> {
<add> deprecationWarning('Mailer::set() is deprecated. Use setViewVars() instead.');
<ide> return $this->setViewVars($key, $value);
<ide> }
<ide>
<ide><path>src/ORM/Behavior.php
<ide> public function initialize(array $config): void
<ide> */
<ide> public function getTable(): Table
<ide> {
<add> deprecationWarning('Behavior::getTable() is deprecated. Use table() instead.');
<add>
<ide> return $this->table();
<ide> }
<ide>
<ide><path>src/Validation/Validation.php
<ide> public static function compareFields($check, string $field, string $operator, ar
<ide> */
<ide> public static function containsNonAlphaNumeric($check, int $count = 1): bool
<ide> {
<add> deprecationWarning('Validation::containsNonAlphaNumeric() is deprecated. Use notAlphaNumeric() instead.');
<ide> if (!is_string($check)) {
<ide> return false;
<ide> }
<ide><path>src/Validation/Validator.php
<ide> public function lessThanOrEqualToField(string $field, string $secondField, ?stri
<ide> */
<ide> public function containsNonAlphaNumeric(string $field, int $limit = 1, ?string $message = null, $when = null)
<ide> {
<add> deprecationWarning('Validator::containsNonAlphaNumeric() is deprecated. Use notAlphaNumeric() instead.');
<ide> $extra = array_filter(['on' => $when, 'message' => $message]);
<ide>
<ide> return $this->add($field, 'containsNonAlphaNumeric', $extra + [
<ide><path>src/View/Helper/NumberHelper.php
<ide> public function formatDelta($value, array $options = []): string
<ide> */
<ide> public function defaultCurrency($currency): ?string
<ide> {
<add> deprecationWarning('NumberHelper::defaultCurreny() is deprecated. Use setDefaultCurreny() and getDefaultCurrent() instead.');
<add>
<ide> return $this->_engine->defaultCurrency($currency);
<ide> }
<ide>
<ide><path>tests/TestCase/Database/Type/DateTimeFractionalTypeTest.php
<ide> public function testToPHPString()
<ide> $this->assertInstanceOf(FrozenTime::class, $result);
<ide> $this->assertSame('123456', $result->format('u'));
<ide>
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toPHP('2001-01-04 12:00:00.123456', $this->driver);
<ide> $this->assertInstanceOf(FrozenTime::class, $result);
<ide> $this->assertSame('2001', $result->format('Y'));
<ide> public function testManyToPHP()
<ide> $this->type->manyToPHP($values, array_keys($values), $this->driver)
<ide> );
<ide>
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $values = [
<ide> 'a' => null,
<ide> 'b' => '2001-01-04 12:13:14',
<ide> public function testToDatabase()
<ide> $this->assertSame('2013-08-12 15:16:17.123456', $result);
<ide>
<ide> $tz = $date->getTimezone();
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17.123456', $result);
<ide> $this->assertEquals($tz, $date->getTimezone());
<ide>
<del> $this->type->setTimezone(new DateTimeZone('Asia/Kolkata'));
<add> $this->type->setDatabaseTimezone(new DateTimeZone('Asia/Kolkata'));
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17.123456', $result);
<del> $this->type->setTimezone(null);
<add> $this->type->setDatabaseTimezone(null);
<ide>
<ide> $date = new FrozenTime('2013-08-12 15:16:17.123456');
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 15:16:17.123456', $result);
<ide>
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17.123456', $result);
<del> $this->type->setTimezone(null);
<add> $this->type->setDatabaseTimezone(null);
<ide> }
<ide>
<ide> /**
<ide> public function testToDatabaseNoMicroseconds()
<ide> $this->assertSame('2013-08-12 15:16:17.000000', $result);
<ide>
<ide> $tz = $date->getTimezone();
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17.000000', $result);
<ide> $this->assertEquals($tz, $date->getTimezone());
<ide>
<del> $this->type->setTimezone(new DateTimeZone('Asia/Kolkata'));
<add> $this->type->setDatabaseTimezone(new DateTimeZone('Asia/Kolkata'));
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17.000000', $result);
<del> $this->type->setTimezone(null);
<add> $this->type->setDatabaseTimezone(null);
<ide>
<ide> $date = new FrozenTime('2013-08-12 15:16:17');
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 15:16:17.000000', $result);
<ide>
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17.000000', $result);
<del> $this->type->setTimezone(null);
<add> $this->type->setDatabaseTimezone(null);
<ide>
<ide> $date = 1401906995;
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide><path>tests/TestCase/Database/Type/DateTimeTypeTest.php
<ide> public function testToPHPString()
<ide> $this->assertSame('13', $result->format('i'));
<ide> $this->assertSame('14', $result->format('s'));
<ide>
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toPHP('2001-01-04 12:00:00', $this->driver);
<ide> $this->assertInstanceOf(FrozenTime::class, $result);
<ide> $this->assertSame('2001', $result->format('Y'));
<ide> public function testManyToPHP()
<ide> $this->type->manyToPHP($values, array_keys($values), $this->driver)
<ide> );
<ide>
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $values = [
<ide> 'a' => null,
<ide> 'b' => '2001-01-04 12:13:14',
<ide> public function testToDatabase()
<ide> $this->assertSame('2013-08-12 15:16:17', $result);
<ide>
<ide> $tz = $date->getTimezone();
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17', $result);
<ide> $this->assertEquals($tz, $date->getTimezone());
<ide>
<del> $this->type->setTimezone(new DateTimeZone('Asia/Kolkata'));
<add> $this->type->setDatabaseTimezone(new DateTimeZone('Asia/Kolkata'));
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17', $result);
<del> $this->type->setTimezone(null);
<add> $this->type->setDatabaseTimezone(null);
<ide>
<ide> $date = new FrozenTime('2013-08-12 15:16:17');
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 15:16:17', $result);
<ide>
<del> $this->type->setTimezone('Asia/Kolkata'); // UTC+5:30
<add> $this->type->setDatabaseTimezone('Asia/Kolkata'); // UTC+5:30
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide> $this->assertSame('2013-08-12 20:46:17', $result);
<del> $this->type->setTimezone(null);
<add> $this->type->setDatabaseTimezone(null);
<ide>
<ide> $date = 1401906995;
<ide> $result = $this->type->toDatabase($date, $this->driver);
<ide><path>tests/TestCase/Form/FormTest.php
<ide> class FormTest extends TestCase
<ide> */
<ide> public function testSchema()
<ide> {
<del> $form = new Form();
<del> $schema = $form->schema();
<add> $this->deprecated(function () {
<add> $form = new Form();
<add> $schema = $form->schema();
<ide>
<del> $this->assertInstanceOf('Cake\Form\Schema', $schema);
<del> $this->assertSame($schema, $form->schema(), 'Same instance each time');
<add> $this->assertInstanceOf('Cake\Form\Schema', $schema);
<add> $this->assertSame($schema, $form->schema(), 'Same instance each time');
<ide>
<del> $schema = new Schema();
<del> $this->assertSame($schema, $form->schema($schema));
<del> $this->assertSame($schema, $form->schema());
<add> $schema = new Schema();
<add> $this->assertSame($schema, $form->schema($schema));
<add> $this->assertSame($schema, $form->schema());
<ide>
<del> $form = new AppForm();
<del> $this->assertInstanceOf(FormSchema::class, $form->schema());
<add> $form = new AppForm();
<add> $this->assertInstanceOf(FormSchema::class, $form->schema());
<add> });
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Http/Cookie/CookieTest.php
<ide> public function testWithValue()
<ide> */
<ide> public function testGetStringValue()
<ide> {
<del> $cookie = new Cookie('cakephp', 'thing');
<del> $this->assertSame('thing', $cookie->getStringValue());
<add> $this->deprecated(function () {
<add> $cookie = new Cookie('cakephp', 'thing');
<add> $this->assertSame('thing', $cookie->getStringValue());
<ide>
<del> $value = ['user_id' => 1, 'token' => 'abc123'];
<del> $cookie = new Cookie('cakephp', $value);
<add> $value = ['user_id' => 1, 'token' => 'abc123'];
<add> $cookie = new Cookie('cakephp', $value);
<ide>
<del> $this->assertSame($value, $cookie->getValue());
<del> $this->assertSame(json_encode($value), $cookie->getStringValue());
<add> $this->assertSame($value, $cookie->getValue());
<add> $this->assertSame(json_encode($value), $cookie->getStringValue());
<add> });
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/ORM/EntityTest.php
<ide> public function testMagicUnset()
<ide> */
<ide> public function testUnsetDeprecated()
<ide> {
<del> $entity = new Entity();
<del> $entity->foo = 'foo';
<add> $this->deprecated(function () {
<add> $entity = new Entity();
<add> $entity->foo = 'foo';
<ide>
<del> $entity->unsetProperty('foo');
<del> $this->assertNull($entity->foo);
<add> $entity->unsetProperty('foo');
<add> $this->assertNull($entity->foo);
<add> });
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Validation/ValidatorTest.php
<ide> public function testLessThanOrEqualToField()
<ide> */
<ide> public function testContainsNonAlphaNumeric()
<ide> {
<del> $validator = new Validator();
<del> $this->assertProxyMethod($validator, 'containsNonAlphaNumeric', 2, [2]);
<del> $this->assertNotEmpty($validator->validate(['username' => '$']));
<add> $this->deprecated(function () {
<add> $validator = new Validator();
<add> $this->assertProxyMethod($validator, 'containsNonAlphaNumeric', 2, [2]);
<add> $this->assertNotEmpty($validator->validate(['username' => '$']));
<add> });
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/View/Helper/NumberHelperTest.php
<ide> public function methodProvider()
<ide> ['currency'],
<ide> ['format'],
<ide> ['formatDelta'],
<del> ['defaultCurrency'],
<ide> ['ordinal'],
<ide> ];
<ide> } | 18 |
Text | Text | translate 03 to korean | f0afc7809a602cbc27af3b0676e31c89f1cdf671 | <ide><path>docs/docs/03-interactivity-and-dynamic-uis.ko-KR.md
<add>---
<add>id: interactivity-and-dynamic-uis
<add>title: Interactivity and Dynamic UIs
<add>permalink: interactivity-and-dynamic-uis.ko-KR.html
<add>prev: jsx-gotchas.ko-KR.html
<add>next: multiple-components.ko-KR.html
<add>---
<add>
<add>이미 React에서 [어떻게 데이터를 표시](/react/docs/displaying-data.ko-KR.html)하는지를 배웠습니다. 이제 UI와의 상호작용을 어떻게 만드는지 살펴보죠.
<add>
<add>
<add>## 간단한 예제
<add>
<add>```javascript
<add>var LikeButton = React.createClass({
<add> getInitialState: function() {
<add> return {liked: false};
<add> },
<add> handleClick: function(event) {
<add> this.setState({liked: !this.state.liked});
<add> },
<add> render: function() {
<add> var text = this.state.liked ? 'like' : 'haven\'t liked';
<add> return (
<add> <p onClick={this.handleClick}>
<add> You {text} this. Click to toggle.
<add> </p>
<add> );
<add> }
<add>});
<add>
<add>React.render(
<add> <LikeButton />,
<add> document.getElementById('example')
<add>);
<add>```
<add>
<add>
<add>## 이벤트 핸들링과 통합적인(Synthetic) 이벤트
<add>
<add>React에서의 이벤트 핸들러는 HTML에서 그러던 것처럼 간단히 카멜케이스 프로퍼티(camelCased prop)로 넘기면 됩니다. React의 모든 이벤트는 통합적인 이벤트 시스템의 구현으로 IE8 이상에서는 같은 행동이 보장됩니다. 즉, React는 사양에 따라 어떻게 이벤트를 일으키고(bubble) 잡는지 알고 있고, 당신이 사용하는 브라우저와 관계없이 이벤트 핸들러에 전달되는 이벤트는 [W3C 사양](http://www.w3.org/TR/DOM-Level-3-Events/)과 같도록 보장됩니다.
<add>
<add>React를 폰이나 테블릿같은 터치 디바이스에서 사용하려 한다면, 간단히 `React.initializeTouchEvents(true);`로 터치 이벤트 핸들링을 켜면 됩니다.
<add>
<add>
<add>## 기본 구현: 오토바인딩과 이벤트 딜리게이션
<add>
<add>코드를 고성능으로 유지하고 이해하기 쉽게 하기 위해 React는 안 보이는 곳에서 몇 가지 일을 합니다.
<add>
<add>**오토바인딩:** 자바스크립트에서 콜백을 만들 때, 보통은 `this`의 값이 정확하도록 명시적으로 메서드를 인스턴스에 바인드해야 합니다. React에서는 모든 메서드가 자동으로 React의 컴포넌트 인스턴스에 바인드됩니다. React가 바인드 메서드를 캐시하기 때문에 매우 CPU와 메모리에 효율적입니다. 타이핑해야 할 것도 적죠!
<add>
<add>**이벤트 딜리게이션:** React는 실제로는 노드자신에게 이벤트 핸들러를 붙이지 않습니다. React가 시작되면 React는 탑 레벨의 단일 이벤트 리스너로 모든 이벤트를 리스닝하기 시작합니다. 컴포넌트가 마운트되거나 언마운트 될 때, 이벤트 핸들러는 그냥 내부 매핑에서 넣거나 뺄 뿐입니다. 이벤트가 발생하면, React는 이 매핑을 사용해서 어떻게 디스패치할 지를 알게 됩니다. 매핑에 이벤트 핸들러가 남아있지 않으면, React의 이벤트 핸들러는 그냥 아무것도 하지 않습니다. 왜 이 방식이 빠른지 더 알고 싶으시면, [David Walsh의 멋진 블로그 글](http://davidwalsh.name/event-delegate)을 읽어 보세요.
<add>
<add>
<add>## 컴포넌트는 그냥 상태 머신일 뿐
<add>
<add>React는 UI를 간단한 상태머신이라 생각합니다. UI를 다양한 상태와 그 상태의 렌더링으로 생각함으로써 UI를 일관성 있게 관리하기 쉬워집니다.
<add>
<add>React에서는, 간단히 컴포넌트의 상태를 업데이트하고, 이 새로운 상태의 UI를 렌더링합니다. React는 DOM의 변경을 가장 효율적인 방법으로 관리해줍니다.
<add>
<add>
<add>## 상태의 동작 원리
<add>
<add>React에게 데이터의 변경을 알리는 일반적인 방법은 `setState(data, callback)`을 호출하는 것입니다. 이 메서드는 `this.state`에 `data`를 머지하고 컴포넌트를 재 렌더링 합니다. 컴포넌트의 재 렌더링이 끝나면, 생략가능한 `callback`이 호출됩니다. 대부분의 경우 React가 UI를 최신상태로 유지해주기 때문에 `callback`을 사용할 필요가 없습니다.
<add>
<add>
<add>## 어떤 컴포넌트가 상태를 가져야 할까요?
<add>
<add>대부분의 컴포넌트는 `props`로부터 데이터를 받아 렌더할 뿐입니다만, 가끔 유저 인풋, 서버 리퀘스트, 시간의 경과에 반응해야 할 필요가 있습니다. 이럴 때 상태를 사용합니다.
<add>
<add>**가능한 한 컴포넌트가 상태가 가지지 않도록(stateless) 하세요.** 이렇게 함으로써 가장 논리적인 장소로 상태를 격리하게 되고 쉽게 애플리케이션을 추론할 수 있도록 중복을 최소화할 수 있습니다.
<add>
<add>일반적인 패턴은 데이터만 렌더하는 여러 상태를 가지지 않은 컴포넌트를 만들고, 그 위에 상태기반 컴포넌트를 만들어 계층 안의 자식 컴포넌트에게 `props`를 통해 상태를 전달하는 것입니다. 상태를 가지지 않은 컴포넌트가 선언적인 방법으로 데이터를 렌더링 하는 동안, 상태기반 컴포넌트는 모든 상호작용 로직을 캡슐화합니다.
<add>
<add>
<add>## 상태를 어떻게 *써야* 할까요?
<add>
<add>**상태는 컴포넌트의 이벤트 핸들러에 의해 UI 업데이트를 트리거할때 변경될 가능성이 있어, 그때 사용할 데이터를 가져야 합니다.** 실제 엡에서는 이 데이터는 매우 작고 JSON 직렬화 가능한 경향이 있습니다. 상태기반 컴포넌트를 만들때, 가능한 작게 상태를 서술하고 `this.state`에만 저장하도록 해보세요. 그냥 `render()` 안에서 이 상태를 기반으로 다른 모든 정보를 계산합니다. 이 방식으로 애플리케이션을 작성하고 생각하면 가장 최적의 애플리케이션으로 발전해가는 경향이 있다는 것을 발견하게 될것입니다. 장황하거나 계산된 값을 상태에 추가하는 것은 렌더가 그것을 계산하는 대신에 명시적으로 그것들을 싱크해야 하는 것을 의미하기 때문이죠.
<add>
<add>## 상태를 어떻게 *쓰지 말아야* 할까요?
<add>
<add>`this.state`는 UI의 상태를 표현할 최소한의 데이터만 가져야 합니다. 그래서 이런것을들 가지지 말아야 합니다.
<add>
<add>* **계산된 데이터:** 상태에 따라 값을 미리 계산하는 것을 염려하지 마세요. 계산은 모두 `render()`에서 하는 것이 UI의 일관성을 유지하기 쉽습니다. 예를 들어, 상태에서 list items 배열을 가지고 있고 문자열으로 카운트를 렌더링 할 경우, 상태에 저장하기보다는 그냥 `render()` 메서드안에서 `this.state.listItems.length + ' list items'`를 렌더하세요.
<add>* **React 컴포넌트:** 가지고 있는 props와 상태로 `render()`안에서 만드세요.
<add>* **props에서 복사한 데이터:** 가능한한 원래의 소스로 props를 사용하도록 해보세요. props를 상태에 저장하는 단하나의 올바른 사용법은 이전 값을 알고 싶을 때입니다. props는 시간이 지나면 변경될 수도 있기 때문이죠. | 1 |
Javascript | Javascript | update code to pass lint | d5a7cf39bb431cf4f75bc1ec8aa7c7c1cb8c81f8 | <ide><path>bin/webpack.js
<ide> function processOptions(options) {
<ide> console.error("\u001b[1m\u001b[31m" + e.message + "\u001b[39m\u001b[22m");
<ide> else
<ide> console.error(e.message);
<del> process.exit(1);
<add> process.exit(1); // eslint-disable-line no-process-exit
<ide> }
<ide> throw e;
<ide> }
<ide><path>lib/AsyncDependenciesBlock.js
<ide> AsyncDependenciesBlock.prototype.sortItems = function() {
<ide> if(this.chunks) {
<ide> this.chunks.sort(function(a, b) {
<ide> var i = 0;
<del> while(true) {
<add> while(true) { // eslint-disable-line no-constant-condition
<ide> if(!a.modules[i] && !b.modules[i]) return 0;
<ide> if(!a.modules[i]) return -1;
<ide> if(!b.modules[i]) return 1;
<ide><path>lib/ContextReplacementPlugin.js
<ide> function createResolveDependenciesFromContextMap(createContextMap) {
<ide> callback(null, dependencies);
<ide> });
<ide> }
<del>};
<add>}
<ide><path>lib/FlagDependencyExportsPlugin.js
<ide> FlagDependencyExportsPlugin.prototype.apply = function(compiler) {
<ide> module.providedExports = exports.slice();
<ide> changed = true;
<ide> }
<del> };
<add> }
<ide> }
<ide> if(changed) {
<ide> notifyDependencies();
<ide><path>lib/WebpackOptionsValidationError.js
<ide> WebpackOptionsValidationError.formatValidationError = function formatValidationE
<ide> return dataPath + " should not be empty.";
<ide> else
<ide> return dataPath + " " + err.message;
<del> default:
<add> default: // eslint-disable-line no-fallthrough
<ide> return dataPath + " " + err.message + " (" + JSON.stringify(err, 0, 2) + ").\n" +
<ide> getSchemaPartText(err.parentSchema);
<ide> }
<ide><path>lib/dependencies/HarmonyExportImportedSpecifierDependency.js
<ide> HarmonyExportImportedSpecifierDependency.prototype.getExports = function() {
<ide> return {
<ide> exports: [this.name]
<ide> }
<del> };
<add> }
<ide> if(this.importDependency.module && Array.isArray(this.importDependency.module.providedExports)) {
<ide> return {
<ide> exports: this.importDependency.module.providedExports.filter(function(id) { | 6 |
Text | Text | add abouthiroppy to collaborators | 04da44afd0e93f73fb702e7ef7b77cdc943917a1 | <ide><path>README.md
<ide> more information about the governance of the Node.js project, see
<ide>
<ide> ### Collaborators
<ide>
<add>* [abouthiroppy](https://github.com/abouthiroppy) -
<add>**Yuta Hiroto** <[email protected]>
<ide> * [ak239](https://github.com/ak239) -
<ide> **Aleksei Koziatinskii** <[email protected]>
<ide> * [andrasq](https://github.com/andrasq) - | 1 |
Javascript | Javascript | convert 4 remaining languages to locale | 1d916a375b54d4645a280332c88c18f61a93f68a | <add><path>locale/af.js
<del><path>lang/af.js
<del>// moment.js language configuration
<del>// language : afrikaans (af)
<add>// moment.js locale configuration
<add>// locale : afrikaans (af)
<ide> // author : Werner Mollentze : https://github.com/wernerm
<ide>
<ide> (function (factory) {
<ide> factory(window.moment); // Browser global
<ide> }
<ide> }(function (moment) {
<del> return moment.lang('af', {
<add> return moment.defineLocale('af', {
<ide> months : "Januarie_Februarie_Maart_April_Mei_Junie_Julie_Augustus_September_Oktober_November_Desember".split("_"),
<ide> monthsShort : "Jan_Feb_Mar_Apr_Mei_Jun_Jul_Aug_Sep_Okt_Nov_Des".split("_"),
<ide> weekdays : "Sondag_Maandag_Dinsdag_Woensdag_Donderdag_Vrydag_Saterdag".split("_"),
<add><path>locale/be.js
<del><path>lang/be.js
<del>// moment.js language configuration
<del>// language : belarusian (be)
<add>// moment.js locale configuration
<add>// locale : belarusian (be)
<ide> // author : Dmitry Demidov : https://github.com/demidov91
<ide> // author: Praleska: http://praleska.pro/
<ide> // Author : Menelion Elensúle : https://github.com/Oire
<ide> return weekdays[nounCase][m.day()];
<ide> }
<ide>
<del> return moment.lang('be', {
<add> return moment.defineLocale('be', {
<ide> months : monthsCaseReplace,
<ide> monthsShort : 'студ_лют_сак_крас_трав_чэрв_ліп_жнів_вер_каст_ліст_снеж'.split('_'),
<ide> weekdays : weekdaysCaseReplace,
<add><path>locale/my.js
<del><path>lang/my.js
<del>// moment.js language configuration
<del>// language : Burmese (mm)
<add>// moment.js locale configuration
<add>// locale : Burmese (mm)
<ide> // author : Squar team, mysquar.com
<ide>
<ide> (function (factory) {
<ide> '၉': '9',
<ide> '၀': '0'
<ide> };
<del> return moment.lang('my', {
<add> return moment.defineLocale('my', {
<ide> months: "ဇန်နဝါရီ_ဖေဖော်ဝါရီ_မတ်_ဧပြီ_မေ_ဇွန်_ဇူလိုင်_သြဂုတ်_စက်တင်ဘာ_အောက်တိုဘာ_နိုဝင်ဘာ_ဒီဇင်ဘာ".split("_"),
<ide> monthsShort: "ဇန်_ဖေ_မတ်_ပြီ_မေ_ဇွန်_လိုင်_သြ_စက်_အောက်_နို_ဒီ".split("_"),
<ide> weekdays: "တနင်္ဂနွေ_တနင်္လာ_အင်္ဂါ_ဗုဒ္ဓဟူး_ကြာသပတေး_သောကြာ_စနေ".split("_"),
<add><path>locale/ti.js
<del><path>lang/ti.js
<del>// moment.js language configuration
<del>// language : tibetan (ti)
<add>// moment.js locale configuration
<add>// locale : tibetan (ti)
<ide> // author : Thupten N. Chakrishar : https://github.com/vajradog
<ide>
<ide> (function (factory) {
<ide> '༠': '0'
<ide> };
<ide>
<del> return moment.lang('ti', {
<add> return moment.defineLocale('ti', {
<ide> months : 'ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ'.split("_"),
<ide> monthsShort : 'ཟླ་བ་དང་པོ_ཟླ་བ་གཉིས་པ_ཟླ་བ་གསུམ་པ_ཟླ་བ་བཞི་པ_ཟླ་བ་ལྔ་པ_ཟླ་བ་དྲུག་པ_ཟླ་བ་བདུན་པ_ཟླ་བ་བརྒྱད་པ_ཟླ་བ་དགུ་པ_ཟླ་བ་བཅུ་པ_ཟླ་བ་བཅུ་གཅིག་པ_ཟླ་བ་བཅུ་གཉིས་པ'.split("_"),
<ide> weekdays : 'གཟའ་ཉི་མ་_གཟའ་ཟླ་བ་_གཟའ་མིག་དམར་_གཟའ་ལྷག་པ་_གཟའ་ཕུར་བུ_གཟའ་པ་སངས་_གཟའ་སྤེན་པ་'.split("_"),
<add><path>test/locale/af.js
<del><path>test/lang/af.js
<ide> var moment = require("../../moment");
<ide> Afrikaans
<ide> *************************************************/
<ide>
<del>exports["lang:af"] = {
<add>exports["locale:af"] = {
<ide> setUp : function (cb) {
<del> moment.lang('af');
<add> moment.locale('af');
<ide> moment.createFromInputFallback = function () {
<ide> throw new Error("input not handled by moment");
<ide> };
<ide> cb();
<ide> },
<ide>
<ide> tearDown : function (cb) {
<del> moment.lang('en');
<add> moment.locale('en');
<ide> cb();
<ide> },
<ide>
<ide> exports["lang:af"] = {
<ide> test.equal(moment([2012, 0, 9]).format('w ww wo'), '2 02 2de', "Jan 9 2012 should be week 2");
<ide> test.equal(moment([2012, 0, 15]).format('w ww wo'), '2 02 2de', "Jan 15 2012 should be week 2");
<ide>
<del> test.done();
<del> },
<del>
<del> "returns the name of the language" : function (test) {
<del> if (typeof module !== 'undefined' && module.exports) {
<del> test.equal(require('../../lang/af'), 'af', "module should export af");
<del> }
<del>
<ide> test.done();
<ide> }
<ide> };
<add><path>test/locale/be.js
<del><path>test/lang/be.js
<ide> var moment = require("../../moment");
<ide> Belarusian
<ide> *************************************************/
<ide>
<del>exports["lang:be"] = {
<add>exports["locale:be"] = {
<ide> setUp : function (cb) {
<del> moment.lang('be');
<add> moment.locale('be');
<ide> moment.createFromInputFallback = function () {
<ide> throw new Error("input not handled by moment");
<ide> };
<ide> cb();
<ide> },
<ide>
<ide> tearDown : function (cb) {
<del> moment.lang('en');
<add> moment.locale('en');
<ide> cb();
<ide> },
<ide>
<ide> exports["lang:be"] = {
<ide> test.equal(moment([2012, 0, 8]).format('w ww wo'), '2 02 2-і', "Jan 8 2012 should be week 2");
<ide> test.equal(moment([2012, 0, 9]).format('w ww wo'), '3 03 3-і', "Jan 9 2012 should be week 3");
<ide>
<del> test.done();
<del> },
<del>
<del> "returns the name of the language" : function (test) {
<del> if (typeof module !== 'undefined' && module.exports) {
<del> test.equal(require('../../lang/be'), 'be', "module should export be");
<del> }
<del>
<ide> test.done();
<ide> }
<add>
<ide> };
<add><path>test/locale/my.js
<del><path>test/lang/my.js
<ide> var moment = require("../../moment");
<ide> Myanmar Burmese
<ide> *************************************************/
<ide>
<del>exports["lang:my"] = {
<add>exports["locale:my"] = {
<ide> setUp: function (cb) {
<del> moment.lang('my');
<add> moment.locale('my');
<ide> moment.createFromInputFallback = function () {
<ide> throw new Error("input not handled by moment");
<ide> };
<ide> cb();
<ide> },
<ide>
<ide> tearDown: function (cb) {
<del> moment.lang('en');
<add> moment.locale('en');
<ide> cb();
<ide> },
<ide>
<ide> exports["lang:my"] = {
<ide> test.equal(moment([2012, 0, 9]).format('w ww wo'), '၂ ၀၂ ၂', "Jan 9 2012 should be week 2");
<ide> test.equal(moment([2012, 0, 15]).format('w ww wo'), '၂ ၀၂ ၂', "Jan 15 2012 should be week 2");
<ide>
<del> test.done();
<del> },
<del>
<del> "returns the name of the language": function (test) {
<del> if (typeof module !== 'undefined' && module.exports) {
<del> test.equal(require('../../lang/my'), 'my', "module should export my");
<del> }
<del>
<ide> test.done();
<ide> }
<add>
<ide> };
<add><path>test/locale/ti.js
<del><path>test/lang/ti.js
<ide> var moment = require("../../moment");
<ide> Tibetan
<ide> *************************************************/
<ide>
<del>exports["lang:ti"] = {
<add>exports["locale:ti"] = {
<ide> setUp : function (cb) {
<del> moment.lang('ti');
<add> moment.locale('ti');
<ide> moment.createFromInputFallback = function () {
<ide> throw new Error("input not handled by moment");
<ide> };
<ide> cb();
<ide> },
<ide>
<ide> tearDown : function (cb) {
<del> moment.lang('en');
<add> moment.locale('en');
<ide> cb();
<ide> },
<ide>
<ide> exports["lang:ti"] = {
<ide> test.equal(moment([2012, 0, 14]).format('w ww wo'), '༢ ༠༢ ༢', "Jan 14 2012 should be week 2");
<ide> test.equal(moment([2012, 0, 15]).format('w ww wo'), '༣ ༠༣ ༣', "Jan 15 2012 should be week 3");
<ide>
<del> test.done();
<del> },
<del>
<del> "returns the name of the language" : function (test) {
<del> if (typeof module !== 'undefined' && module.exports) {
<del> test.equal(require('../../lang/ti'), 'ti', "module should export ti");
<del> }
<del>
<ide> test.done();
<ide> }
<ide> }; | 8 |
PHP | PHP | allow listener for core event | 2f158e113506130561c484bcd0ea5a8f959393cb | <ide><path>src/Illuminate/Foundation/Console/ListenerMakeCommand.php
<ide> protected function buildClass($name)
<ide>
<ide> $event = $this->option('event');
<ide>
<del> if (! Str::startsWith($event, $this->laravel->getNamespace())) {
<add> if (! Str::startsWith($event, $this->laravel->getNamespace()) && ! Str::startsWith($event, 'Illuminate')) {
<ide> $event = $this->laravel->getNamespace().'Events\\'.$event;
<ide> }
<ide> | 1 |
PHP | PHP | correct doc block | 783c783391e40a476fb08a9218f91a5b82d0e639 | <ide><path>src/Routing/Router.php
<ide> public static function parseNamedParams(Request $request, array $options = []) {
<ide> *
<ide> * @param string $path The path prefix for the scope. This path will be prepended
<ide> * to all routes connected in the scoped collection.
<del> * @param array $params An array of routing defaults to add to each connected route.
<add> * @param array|callable $params An array of routing defaults to add to each connected route.
<ide> * If you have no parameters, this argument can be a callable.
<ide> * @param callable $callback The callback to invoke with the scoped collection.
<ide> * @throws \InvalidArgumentException When an invalid callable is provided.
<ide> * @return null|\Cake\Routing\RouteBuilder The route builder
<ide> * was created/used.
<ide> */
<del> public static function scope($path, array $params = [], $callback = null) {
<add> public static function scope($path, $params = [], $callback = null) {
<ide> $builder = new RouteBuilder(static::$_collection, '/', [], [
<ide> 'routeClass' => static::defaultRouteClass(),
<ide> 'extensions' => static::$_collection->extensions() | 1 |
Python | Python | fix resnet breakage and add keras end-to-end tests | 8367cf6dabe11adf7628541706b660821f397dce | <ide><path>official/recommendation/ncf_test.py
<ide> from official.recommendation import neumf_model
<ide> from official.recommendation import ncf_common
<ide> from official.recommendation import ncf_estimator_main
<add>from official.recommendation import ncf_keras_main
<add>from official.utils.testing import integration
<ide>
<ide>
<ide> NUM_TRAIN_NEG = 4
<ide> def test_hit_rate_and_ndcg(self):
<ide> self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
<ide> 2 * math.log(2) / math.log(4)) / 4)
<ide>
<add> _BASE_END_TO_END_FLAGS = ['-batch_size', '1024', '-train_epochs', '1']
<ide>
<del> _BASE_END_TO_END_FLAGS = {
<del> "batch_size": 1024,
<del> "train_epochs": 1,
<del> "use_synthetic_data": True
<del> }
<add> @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
<add> def test_end_to_end_estimator(self):
<add> integration.run_synthetic(
<add> ncf_estimator_main.main, tmp_root=self.get_temp_dir(), max_train=None,
<add> extra_flags=self._BASE_END_TO_END_FLAGS)
<add>
<add> @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
<add> def test_end_to_end_estimator_mlperf(self):
<add> integration.run_synthetic(
<add> ncf_estimator_main.main, tmp_root=self.get_temp_dir(), max_train=None,
<add> extra_flags=self._BASE_END_TO_END_FLAGS + ['-ml_perf', 'True'])
<ide>
<del> @flagsaver.flagsaver(**_BASE_END_TO_END_FLAGS)
<ide> @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
<del> def test_end_to_end(self):
<del> ncf_estimator_main.main(None)
<add> def test_end_to_end_keras(self):
<add> self.skipTest("TODO: fix synthetic data with keras")
<add> integration.run_synthetic(
<add> ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None,
<add> extra_flags=self._BASE_END_TO_END_FLAGS +
<add> ['-distribution_strategy', 'off'])
<ide>
<del> @flagsaver.flagsaver(ml_perf=True, **_BASE_END_TO_END_FLAGS)
<ide> @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
<del> def test_end_to_end_mlperf(self):
<del> ncf_estimator_main.main(None)
<add> def test_end_to_end_keras_mlperf(self):
<add> self.skipTest("TODO: fix synthetic data with keras")
<add> integration.run_synthetic(
<add> ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None,
<add> extra_flags=self._BASE_END_TO_END_FLAGS +
<add> ['-ml_perf', 'True', '-distribution_strategy', 'off'])
<ide>
<ide>
<ide> if __name__ == "__main__":
<ide><path>official/resnet/cifar10_test.py
<ide> import tensorflow as tf # pylint: disable=g-bad-import-order
<ide>
<ide> from official.resnet import cifar10_main
<add>from official.resnet.keras import keras_cifar_main
<add>from official.resnet.keras import keras_common
<ide> from official.utils.testing import integration
<ide>
<ide> tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
<ide> class BaseTest(tf.test.TestCase):
<ide> """Tests for the Cifar10 version of Resnet.
<ide> """
<ide>
<add> _num_validation_images = None
<add>
<ide> @classmethod
<ide> def setUpClass(cls): # pylint: disable=invalid-name
<ide> super(BaseTest, cls).setUpClass()
<ide> cifar10_main.define_cifar_flags()
<add> keras_common.define_keras_flags()
<add>
<add> def setUp(self):
<add> super(BaseTest, self).setUp()
<add> self._num_validation_images = cifar10_main.NUM_IMAGES['validation']
<add> cifar10_main.NUM_IMAGES['validation'] = 4
<ide>
<ide> def tearDown(self):
<ide> super(BaseTest, self).tearDown()
<ide> tf.io.gfile.rmtree(self.get_temp_dir())
<add> cifar10_main.NUM_IMAGES['validation'] = self._num_validation_images
<ide>
<ide> def test_dataset_input_fn(self):
<ide> fake_data = bytearray()
<ide> def test_cifar10model_shape_v2(self):
<ide> def test_cifar10_end_to_end_synthetic_v1(self):
<ide> integration.run_synthetic(
<ide> main=cifar10_main.run_cifar, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-resnet_version', '1']
<add> extra_flags=['-resnet_version', '1', '-batch_size', '4']
<ide> )
<ide>
<ide> def test_cifar10_end_to_end_synthetic_v2(self):
<ide> integration.run_synthetic(
<ide> main=cifar10_main.run_cifar, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-resnet_version', '2']
<add> extra_flags=['-resnet_version', '2', '-batch_size', '4']
<add> )
<add>
<add> def test_cifar10_end_to_end_keras_synthetic_v1(self):
<add> integration.run_synthetic(
<add> main=keras_cifar_main.main, tmp_root=self.get_temp_dir(),
<add> extra_flags=['-resnet_version', '1', '-batch_size', '4',
<add> '-train_steps', '1']
<ide> )
<ide>
<ide>
<ide><path>official/resnet/imagenet_test.py
<ide> import tensorflow as tf # pylint: disable=g-bad-import-order
<ide>
<ide> from official.resnet import imagenet_main
<add>from official.resnet.keras import keras_common
<add>from official.resnet.keras import keras_imagenet_main
<ide> from official.utils.testing import integration
<ide>
<ide> tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
<ide>
<ide> class BaseTest(tf.test.TestCase):
<ide>
<add> _num_validation_images = None
<add>
<ide> @classmethod
<ide> def setUpClass(cls): # pylint: disable=invalid-name
<ide> super(BaseTest, cls).setUpClass()
<ide> imagenet_main.define_imagenet_flags()
<add> keras_common.define_keras_flags()
<add>
<add> def setUp(self):
<add> super(BaseTest, self).setUp()
<add> self._num_validation_images = imagenet_main.NUM_IMAGES['validation']
<add> imagenet_main.NUM_IMAGES['validation'] = 4
<ide>
<ide> def tearDown(self):
<ide> super(BaseTest, self).tearDown()
<ide> tf.io.gfile.rmtree(self.get_temp_dir())
<add> imagenet_main.NUM_IMAGES['validation'] = self._num_validation_images
<ide>
<ide> def _tensor_shapes_helper(self, resnet_size, resnet_version, dtype, with_gpu):
<ide> """Checks the tensor shapes after each phase of the ResNet model."""
<ide> def test_imagenetmodel_shape_v2(self):
<ide> def test_imagenet_end_to_end_synthetic_v1(self):
<ide> integration.run_synthetic(
<ide> main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-v', '1']
<add> extra_flags=['-resnet_version', '1', '-batch_size', '4']
<ide> )
<ide>
<ide> def test_imagenet_end_to_end_synthetic_v2(self):
<ide> integration.run_synthetic(
<ide> main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-v', '2']
<add> extra_flags=['-resnet_version', '2', '-batch_size', '4']
<ide> )
<ide>
<ide> def test_imagenet_end_to_end_synthetic_v1_tiny(self):
<ide> integration.run_synthetic(
<ide> main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-resnet_version', '1', '-resnet_size', '18']
<add> extra_flags=['-resnet_version', '1', '-batch_size', '4',
<add> '-resnet_size', '18']
<ide> )
<ide>
<ide> def test_imagenet_end_to_end_synthetic_v2_tiny(self):
<ide> integration.run_synthetic(
<ide> main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-resnet_version', '2', '-resnet_size', '18']
<add> extra_flags=['-resnet_version', '2', '-batch_size', '4',
<add> '-resnet_size', '18']
<ide> )
<ide>
<ide> def test_imagenet_end_to_end_synthetic_v1_huge(self):
<ide> integration.run_synthetic(
<ide> main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-resnet_version', '1', '-resnet_size', '200']
<add> extra_flags=['-resnet_version', '1', '-batch_size', '4',
<add> '-resnet_size', '200']
<ide> )
<ide>
<ide> def test_imagenet_end_to_end_synthetic_v2_huge(self):
<ide> integration.run_synthetic(
<ide> main=imagenet_main.run_imagenet, tmp_root=self.get_temp_dir(),
<del> extra_flags=['-resnet_version', '2', '-resnet_size', '200']
<add> extra_flags=['-resnet_version', '2', '-batch_size', '4',
<add> '-resnet_size', '200']
<add> )
<add>
<add> def test_imagenet_end_to_end_keras_synthetic_v1(self):
<add> integration.run_synthetic(
<add> main=keras_imagenet_main.main, tmp_root=self.get_temp_dir(),
<add> extra_flags=['-resnet_version', '1', '-batch_size', '4',
<add> '-train_steps', '1']
<ide> )
<ide>
<ide>
<ide><path>official/resnet/keras/keras_cifar_main.py
<ide> def run(flags_obj):
<ide> distribution_strategy=flags_obj.distribution_strategy,
<ide> num_gpus=flags_obj.num_gpus)
<ide>
<del> strategy_scope = distribution_utils.MaybeDistributionScope(strategy)
<add> strategy_scope = keras_common.get_strategy_scope(strategy)
<ide>
<ide> with strategy_scope:
<ide> optimizer = keras_common.get_optimizer()
<ide><path>official/resnet/keras/keras_imagenet_main.py
<ide> def run(flags_obj):
<ide> distribution_strategy=flags_obj.distribution_strategy,
<ide> num_gpus=flags_obj.num_gpus)
<ide>
<del> strategy_scope = distribution_utils.MaybeDistributionScope(strategy)
<add> strategy_scope = keras_common.get_strategy_scope(strategy)
<ide>
<ide> with strategy_scope:
<ide> optimizer = keras_common.get_optimizer()
<ide><path>official/utils/misc/distribution_utils.py
<ide> def get_distribution_strategy(distribution_strategy="default",
<ide> if distribution_strategy == "parameter_server":
<ide> return tf.distribute.experimental.ParameterServerStrategy()
<ide>
<del> if distribution_strategy == "collective":
<del> return tf.contrib.distribute.CollectiveAllReduceStrategy(
<del> num_gpus_per_worker=num_gpus)
<del>
<ide> raise ValueError(
<ide> "Unrecognized Distribution Strategy: %r" % distribution_strategy)
<ide> | 6 |
Text | Text | add changelog entry for e1ceb10 | 876865aba7c0ca2c069ff4be88e9916f5f0f28bd | <ide><path>activerecord/CHANGELOG.md
<add>* Undefine attribute methods on descendants when resetting column
<add> information.
<add>
<add> *Chris Salzberg*
<add>
<ide> * Log database query callers
<ide>
<ide> Add `verbose_query_logs` configuration option to display the caller | 1 |
Python | Python | update taskgroup typing | 33af2b19a210e1041ac93b563123d41a29a4d637 | <ide><path>airflow/utils/task_group.py
<ide> def set_upstream(self, task_or_task_list: Union[TaskMixin, Sequence[TaskMixin]])
<ide> """Set a TaskGroup/task/list of task upstream of this TaskGroup."""
<ide> self._set_relative(task_or_task_list, upstream=True)
<ide>
<del> def __enter__(self):
<add> def __enter__(self) -> "TaskGroup":
<ide> TaskGroupContext.push_context_managed_task_group(self)
<ide> return self
<ide> | 1 |
Javascript | Javascript | set state to loading on reload() | b2c5584fceb17f09b53ffd75345c39f65694174c | <ide><path>Libraries/Components/WebView/WebView.ios.js
<ide> var WebView = React.createClass({
<ide> * Reloads the current page.
<ide> */
<ide> reload: function() {
<add> this.setState({viewState: WebViewState.LOADING});
<ide> UIManager.dispatchViewManagerCommand(
<ide> this.getWebViewHandle(),
<ide> UIManager.RCTWebView.Commands.reload, | 1 |
Go | Go | add loading message | 20b1e19641d94b5e71ecee7a9703eb392a81cf9a | <ide><path>runtime.go
<ide> func (runtime *Runtime) Destroy(container *Container) error {
<ide> }
<ide>
<ide> func (runtime *Runtime) restore() error {
<add> wheel := "-\\|/"
<add> if os.Getenv("DEBUG") == "" {
<add> fmt.Printf("Loading containers: ")
<add> }
<ide> dir, err := ioutil.ReadDir(runtime.repository)
<ide> if err != nil {
<ide> return err
<ide> }
<del> for _, v := range dir {
<add> for i, v := range dir {
<ide> id := v.Name()
<ide> container, err := runtime.Load(id)
<add> if i%21 == 0 && os.Getenv("DEBUG") == "" {
<add> fmt.Printf("\b%c", wheel[i%4])
<add> }
<ide> if err != nil {
<ide> utils.Debugf("Failed to load container %v: %v", id, err)
<ide> continue
<ide> }
<ide> utils.Debugf("Loaded container %v", container.ID)
<ide> }
<add> if os.Getenv("DEBUG") == "" {
<add> fmt.Printf("\bdone.\n")
<add> }
<ide> return nil
<ide> }
<ide> | 1 |
Javascript | Javascript | add another test case | 1da5c0fc83807661cedbdf610887a3291cda6b0c | <ide><path>test/cases/side-effects/dynamic-reexports/index.js
<ide> import {
<ide> value as valueMultipleSources,
<ide> value2 as value2MultipleSources
<ide> } from "./multiple-sources";
<add>import { a, b } from "./swapped";
<ide>
<ide> it("should dedupe static reexport target", () => {
<ide> expect(valueStatic).toBe(42);
<ide> it("should handle multiple dynamic sources correctly", () => {
<ide> expect(valueMultipleSources).toBe(42);
<ide> expect(value2MultipleSources).toBe(42);
<ide> });
<add>
<add>it("should handle renamed dynamic reexports", () => {
<add> expect(a).toBe(43);
<add> expect(b).toBe(42);
<add>});
<ide><path>test/cases/side-effects/dynamic-reexports/swapped/a.js
<add>export const a = 42;
<ide><path>test/cases/side-effects/dynamic-reexports/swapped/b.js
<add>Object(exports).b = 43;
<ide><path>test/cases/side-effects/dynamic-reexports/swapped/index.js
<add>export * from "./module2";
<ide><path>test/cases/side-effects/dynamic-reexports/swapped/module.js
<add>export * from "./a";
<add>export * from "./b";
<ide><path>test/cases/side-effects/dynamic-reexports/swapped/module2.js
<add>export { a as b } from "./module";
<add>export { b as a } from "./module";
<add>export * from "./module"; | 6 |
Javascript | Javascript | remove unused import | 37c0e0c5e7291800cddf57ff2c6b782661016b8d | <ide><path>packages/ember-runtime/lib/mixins/registry_proxy.js
<ide> import {
<ide> Mixin
<ide> } from 'ember-metal';
<del>import { deprecate } from 'ember-debug';
<ide>
<ide> /**
<ide> RegistryProxyMixin is used to provide public access to specific | 1 |
Ruby | Ruby | give better python advice | e40d302deeb8e1011d4f0c9dc3aa1ceafefb654c | <ide><path>Library/Homebrew/formula_cellar_checks.rb
<ide> def check_easy_install_pth lib
<ide> <<-EOS.undent
<ide> easy-install.pth files were found
<ide> These .pth files are likely to cause link conflicts. Please invoke
<del> setup.py with options
<del> --single-version-externally-managed --record=install.txt
<add> setup.py using Language::Python.setup_install_args.
<ide> The offending files are
<ide> #{pth_found * "\n "}
<ide> EOS | 1 |
Go | Go | fix tests on fedora | eb4578daee98561b16d11d2978b5f5e297d903e8 | <ide><path>graphdriver/devmapper/driver_test.go
<ide> type Set map[string]bool
<ide>
<ide> func (r Set) Assert(t *testing.T, names ...string) {
<ide> for _, key := range names {
<del> if _, exists := r[key]; !exists {
<add> required := true
<add> if strings.HasPrefix(key, "?") {
<add> key = key[1:]
<add> required = false
<add> }
<add> if _, exists := r[key]; !exists && required {
<ide> t.Fatalf("Key not set: %s", key)
<ide> }
<ide> delete(r, key)
<ide> func TestDriverCreate(t *testing.T) {
<ide> "ioctl.blkgetsize",
<ide> "ioctl.loopsetfd",
<ide> "ioctl.loopsetstatus",
<add> "?ioctl.loopctlgetfree",
<ide> )
<ide>
<ide> if err := d.Create("1", ""); err != nil {
<ide> func TestDriverRemove(t *testing.T) {
<ide> "ioctl.blkgetsize",
<ide> "ioctl.loopsetfd",
<ide> "ioctl.loopsetstatus",
<add> "?ioctl.loopctlgetfree",
<ide> )
<ide>
<ide> if err := d.Create("1", ""); err != nil { | 1 |
Text | Text | add datatonic to inthewild | cb0b8951766a750df8cedc92a6b9254386f80639 | <ide><path>INTHEWILD.md
<ide> Currently, **officially** using Airflow:
<ide> 1. [DataFox](https://www.datafox.com/) [[@sudowork](https://github.com/sudowork)]
<ide> 1. [Datamaran](https://www.datamaran.com) [[@valexharo](https://github.com/valexharo)]
<ide> 1. [DataSprints](https://datasprints.com/) [[@lopesdiego12](https://github.com/lopesdiego12) & [@rafaelsantanaep](https://github.com/rafaelsantanaep)]
<add>1. [Datatonic](https://datatonic.com/) [[@teamdatatonic](https://github.com/teamdatatonic)]
<ide> 1. [Datumo](https://datumo.io) [[@michalmisiewicz](https://github.com/michalmisiewicz)]
<ide> 1. [Dentsu Inc.](http://www.dentsu.com/) [[@bryan831](https://github.com/bryan831) & [@loozhengyuan](https://github.com/loozhengyuan)]
<ide> 1. [Deseret Digital Media](http://deseretdigital.com/) [[@formigone](https://github.com/formigone) | 1 |
Ruby | Ruby | add option to force no bottle revision | b20335b314f45d25e9c9f96a20575d7336cef1cb | <ide><path>Library/Homebrew/cmd/bottle.rb
<ide> def bottle_formula f
<ide> begin
<ide> bottle_revision += 1
<ide> filename = bottle_filename(f, bottle_revision)
<del> end while master_bottle_filenames.include? filename
<add> end while not ARGV.include? '--no-revision' \
<add> and master_bottle_filenames.include? filename
<ide>
<ide> if bottle_filename_formula_name(filename).empty?
<ide> return ofail "Add a new regex to bottle_version.rb to parse the bottle filename." | 1 |
Java | Java | fix bugs in equals and hashcode | 77e27713eae8decb8369c4f2e27df4144f205257 | <ide><path>rxjava-core/src/main/java/rx/schedulers/Timestamped.java
<ide> public boolean equals(Object obj) {
<ide> if (this == obj) {
<ide> return true;
<ide> }
<add> if (obj == null) {
<add> return false;
<add> }
<ide> if (!(obj instanceof Timestamped)) {
<ide> return false;
<ide> }
<ide> public boolean equals(Object obj) {
<ide> public int hashCode() {
<ide> final int prime = 31;
<ide> int result = 1;
<del> result = prime * result + (int) (timestampMillis ^ (timestampMillis));
<add> result = prime * result + (int) (timestampMillis ^ (timestampMillis >>> 32));
<ide> result = prime * result + ((value == null) ? 0 : value.hashCode());
<ide> return result;
<ide> } | 1 |
Ruby | Ruby | remove warnings from actioncable | e1459c7c86363d941191551505ad083d4335b815 | <ide><path>actioncable/lib/action_cable/channel/base.rb
<ide> def initialize(connection, identifier, params = {})
<ide> # transmission until redis pubsub subscription is confirmed.
<ide> @defer_subscription_confirmation = false
<ide>
<add> @reject_subscription = nil
<add> @subscription_confirmation_sent = nil
<add>
<ide> delegate_connection_identifiers
<ide> subscribe_to_channel
<ide> end
<ide><path>actioncable/lib/action_cable/connection/base.rb
<ide> def initialize(server, env)
<ide> @subscriptions = ActionCable::Connection::Subscriptions.new(self)
<ide> @message_buffer = ActionCable::Connection::MessageBuffer.new(self)
<ide>
<add> @_internal_redis_subscriptions = nil
<ide> @started_at = Time.now
<ide> end
<ide>
<ide> def cookies
<ide> end
<ide>
<ide>
<del> private
<add> protected
<ide> attr_reader :websocket
<ide> attr_reader :message_buffer
<ide>
<add> private
<ide> def on_open
<ide> connect if respond_to?(:connect)
<ide> subscribe_to_internal_channel
<ide><path>actioncable/lib/action_cable/connection/message_buffer.rb
<ide> def process!
<ide> receive_buffered_messages
<ide> end
<ide>
<del> private
<add> protected
<ide> attr_reader :connection
<ide> attr_accessor :buffered_messages
<ide>
<add> private
<ide> def valid?(message)
<ide> message.is_a?(String)
<ide> end
<ide><path>actioncable/lib/action_cable/connection/subscriptions.rb
<ide> def unsubscribe_from_all
<ide> subscriptions.each { |id, channel| channel.unsubscribe_from_channel }
<ide> end
<ide>
<add> protected
<add> attr_reader :connection, :subscriptions
<ide>
<ide> private
<del> attr_reader :connection, :subscriptions
<ide> delegate :logger, to: :connection
<ide>
<ide> def find(data)
<ide><path>actioncable/lib/action_cable/connection/web_socket.rb
<ide> def transmit(data)
<ide> websocket.send data
<ide> end
<ide>
<del> private
<add> protected
<ide> attr_reader :websocket
<ide> end
<ide> end
<ide><path>actioncable/test/channel/stream_test.rb
<ide> def send_confirmation
<ide> connection = TestConnection.new
<ide> connection.expects(:pubsub).returns EM::Hiredis.connect.pubsub
<ide>
<del> channel = ChatChannel.new connection, "{id: 1}", { id: 1 }
<add> ChatChannel.new connection, "{id: 1}", { id: 1 }
<ide> assert_nil connection.last_transmission
<ide>
<ide> EM::Timer.new(0.1) do
<ide><path>actioncable/test/connection/cross_site_forgery_test.rb
<ide> def send_async(method, *args)
<ide> private
<ide> def assert_origin_allowed(origin)
<ide> response = connect_with_origin origin
<del> assert_equal -1, response[0]
<add> assert_equal(-1, response[0])
<ide> end
<ide>
<ide> def assert_origin_not_allowed(origin) | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.