content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Javascript
Javascript
add linkto test
479d6bb6ec9d3dfc202a1576ff1e6d71009ede2c
<ide><path>packages/ember-routing/lib/handlebars_ext.js <ide> Ember.onLoad('Ember.Handlebars', function(Handlebars) { <ide> }); <ide> <ide> }); <add> <add>function args(linkView) { <add> var ret = [ linkView.namedRoute ], <add> params = linkView.parameters, <add> contexts = params.contexts, <add> roots = params.roots, <add> data = params.data; <add> <add> for (var i=0, l=contexts.length; i<l; i++) { <add> ret.push( Ember.Handlebars.get(roots[i], contexts[i], { data: data }) ); <add> } <add> <add> return ret; <add>} <add> <add>var LinkView = Ember.View.extend({ <add> tagName: 'a', <add> attributeBindings: 'href', <add> <add> click: function() { <add> this.router.transitionTo.apply(this.router, args(this)); <add> return false; <add> }, <add> <add> href: Ember.computed(function() { <add> // TODO: Fix this in route-recognizer <add> return this.router.generate.apply(this.router, args(this)) || "/"; <add> }) <add>}); <add> <add>LinkView.toString = function() { return "LinkView"; }; <add> <add>Ember.Handlebars.registerHelper('linkTo', function(name) { <add> var options = [].slice.call(arguments, -1)[0]; <add> var contexts = [].slice.call(arguments, 1, -1); <add> <add> var hash = options.hash; <add> <add> var controller = options.data.keywords.controller; <add> Ember.assert("You cannot use the {{linkTo}} helper because your current template does not have a controller", controller.target); <add> <add> hash.namedRoute = name; <add> hash.router = controller.target; <add> hash.parameters = { <add> data: options.data, <add> contexts: contexts, <add> roots: options.contexts <add> }; <add> <add> return Ember.Handlebars.helpers.view.call(this, LinkView, options); <add>}); <ide><path>packages/ember-routing/lib/system/route.js <ide> Ember.Route = Ember.Object.extend({ <ide> setup: function(context) { <ide> var templateName = this.templateName, <ide> controller = this.lookup('controller', templateName, function() { <del> if (context) { <add> if (context && context.isSCArray) { <add> return Ember.ArrayController.create({ content: context }); <add> } else if (context) { <ide> return Ember.ObjectController.create({ content: context }); <ide> } else { <ide> return Ember.Controller.create(); <ide><path>packages/ember-routing/tests/helpers/link_to_test.js <add>var Router, App, AppView, templates, router, eventDispatcher; <add>var get = Ember.get, set = Ember.set; <add> <add>function bootApplication() { <add> router = Router.create({ <add> location: 'none' <add> }); <add> <add> Ember.run(function() { <add> router._activeViews.application = AppView.create().appendTo('#qunit-fixture'); <add> router.startRouting(); <add> }); <add>} <add> <add>module("The {{linkTo}} helper", { <add> setup: function() { <add> Ember.run(function() { <add> App = Ember.Namespace.create(); <add> App.toString = function() { return "App"; }; <add> <add> Ember.TEMPLATES.app = Ember.Handlebars.compile("{{outlet}}"); <add> Ember.TEMPLATES.home = Ember.Handlebars.compile("<h3>Home</h3>{{#linkTo about id='about-link'}}About{{/linkTo}}"); <add> Ember.TEMPLATES.about = Ember.Handlebars.compile("<h3>About</h3>{{#linkTo home id='home-link'}}Home{{/linkTo}}"); <add> Ember.TEMPLATES.item = Ember.Handlebars.compile("<h3>Item</h3><p>{{name}}</p>{{#linkTo home id='home-link'}}Home{{/linkTo}}"); <add> <add> AppView = Ember.View.extend({ <add> template: Ember.TEMPLATES.app <add> }); <add> <add> Router = Ember.Router.extend({ <add> namespace: App, <add> templates: Ember.TEMPLATES <add> }); <add> <add> eventDispatcher = Ember.EventDispatcher.create(); <add> eventDispatcher.setup(); <add> }); <add> }, <add> <add> teardown: function() { <add> Ember.run(function() { eventDispatcher.destroy(); }); <add> } <add>}); <add> <add>test("The {{linkTo}} helper moves into the named route", function() { <add> Router.map(function(match) { <add> match("/").to("home"); <add> match("/about").to("about"); <add> }); <add> <add> bootApplication(); <add> <add> Ember.run(function() { <add> router.handleURL("/"); <add> }); <add> <add> equal(Ember.$('h3:contains(Home)', '#qunit-fixture').length, 1, "The home template was rendered"); <add> <add> console.log(Ember.$('#qunit-fixture')[0]); <add> <add> Ember.run(function() { <add> Ember.$('a', '#qunit-fixture').click(); <add> }); <add> <add> equal(Ember.$('h3:contains(About)', '#qunit-fixture').length, 1, "The about template was rendered"); <add>}); <add> <add>test("The {{linkTo}} helper moves into the named route with context", function() { <add> Router.map(function(match) { <add> match("/").to("home"); <add> match("/about").to("about"); <add> match("/item/:id").to("item"); <add> }); <add> <add> Ember.TEMPLATES.about = Ember.Handlebars.compile("<h3>List</h3><ul>{{#each controller}}<li>{{#linkTo item this}}{{name}}{{/linkTo}}<li>{{/each}}</ul>{{#linkTo home id='home-link'}}Home{{/linkTo}}"); <add> <add> var people = { <add> yehuda: "Yehuda Katz", <add> tom: "Tom Dale", <add> erik: "Erik Brynroflsson" <add> }; <add> <add> App.AboutRoute = Ember.Route.extend({ <add> model: function() { <add> return Ember.A([ <add> { id: "yehuda", name: "Yehuda Katz" }, <add> { id: "tom", name: "Tom Dale" }, <add> { id: "erik", name: "Erik Brynroflsson" } <add> ]); <add> } <add> }); <add> <add> App.ItemRoute = Ember.Route.extend({ <add> serialize: function(object) { <add> return { id: object.id }; <add> }, <add> <add> deserialize: function(params) { <add> return { id: params.id, name: people[params.id] }; <add> } <add> }); <add> <add> bootApplication(); <add> <add> Ember.run(function() { <add> router.handleURL("/about"); <add> }); <add> <add> equal(Ember.$('h3:contains(List)', '#qunit-fixture').length, 1, "The home template was rendered"); <add> equal(Ember.$('#home-link').attr('href'), '/', "The home link points back at /"); <add> <add> Ember.run(function() { <add> Ember.$('li a:contains(Yehuda)', '#qunit-fixture').click(); <add> }); <add> <add> equal(Ember.$('h3:contains(Item)', '#qunit-fixture').length, 1, "The item template was rendered"); <add> equal(Ember.$('p', '#qunit-fixture').text(), "Yehuda Katz", "The name is correct"); <add> <add> Ember.run(function() { Ember.$('#home-link').click(); }); <add> Ember.run(function() { Ember.$('#about-link').click(); }); <add> <add> equal(Ember.$('li a:contains(Yehuda)').attr('href'), "/item/yehuda"); <add> equal(Ember.$('li a:contains(Tom)').attr('href'), "/item/tom"); <add> equal(Ember.$('li a:contains(Erik)').attr('href'), "/item/erik"); <add> <add> Ember.run(function() { <add> Ember.$('li a:contains(Erik)', '#qunit-fixture').click(); <add> }); <add> <add> equal(Ember.$('h3:contains(Item)', '#qunit-fixture').length, 1, "The item template was rendered"); <add> equal(Ember.$('p', '#qunit-fixture').text(), "Erik Brynroflsson", "The name is correct"); <add>}); <add> <ide><path>packages/ember-views/lib/views/view.js <ide> Ember.View = Ember.CoreView.extend( <ide> @type Object <ide> */ <ide> controller: Ember.computed(function(key) { <del> var parentView = get(this, 'parentView'); <add> var parentView = get(this, '_parentView'); <ide> return parentView ? get(parentView, 'controller') : null; <del> }).property(), <add> }).property('_parentView'), <ide> <ide> /** <ide> A view may contain a layout. A layout is a regular template but
4
Python
Python
reuse repositories when possible
fb7c4214ced3b0533316e3eebd90ac07fe7b2933
<ide><path>contrib/brew/brew/brew.py <ide> level='INFO') <ide> client = docker.Client() <ide> processed = {} <add>processed_folders = [] <ide> <ide> <ide> def build_library(repository=None, branch=None, namespace=None, push=False, <ide> def build_library(repository=None, branch=None, namespace=None, push=False, <ide> f.close() <ide> if dst_folder != repository: <ide> rmtree(dst_folder, True) <add> for d in processed_folders: <add> rmtree(d, True) <ide> summary.print_summary(logger) <ide> <ide> <ide> def build_repo(repository, ref, docker_repo, docker_tag, namespace, push, registry): <ide> docker_repo = '{0}/{1}'.format(namespace or 'library', docker_repo) <ide> img_id = None <add> dst_folder = None <ide> if '{0}@{1}'.format(repository, ref) not in processed.keys(): <ide> logger.info('Cloning {0} (ref: {1})'.format(repository, ref)) <del> dst_folder = git.clone(repository, ref) <add> if repository not in processed: <add> rep, dst_folder = git.clone(repository, ref) <add> processed[repository] = rep <add> processed_folders.append(dst_folder) <add> else: <add> dst_folder = git.checkout(processed[repository], ref) <ide> if not 'Dockerfile' in os.listdir(dst_folder): <ide> raise RuntimeError('Dockerfile not found in cloned repository') <ide> logger.info('Building using dockerfile...') <ide> img_id, logs = client.build(path=dst_folder, quiet=True) <del> rmtree(dst_folder, True) <ide> else: <ide> img_id = processed['{0}@{1}'.format(repository, ref)] <ide> logger.info('Committing to {0}:{1}'.format(docker_repo, <ide><path>contrib/brew/brew/git.py <ide> def clone_tag(repo_url, tag, folder=None): <ide> return clone(repo_url, 'refs/tags/' + tag, folder) <ide> <ide> <add>def checkout(rep, ref=None): <add> is_commit = False <add> if ref is None: <add> ref = 'refs/heads/master' <add> elif not ref.startswith('refs/'): <add> is_commit = True <add> if is_commit: <add> rep['HEAD'] = rep.commit(ref) <add> else: <add> rep['HEAD'] = rep.refs[ref] <add> indexfile = rep.index_path() <add> tree = rep["HEAD"].tree <add> index.build_index_from_tree(rep.path, indexfile, rep.object_store, tree) <add> return rep.path <add> <ide> def clone(repo_url, ref=None, folder=None): <ide> is_commit = False <ide> if ref is None: <ide> def clone(repo_url, ref=None, folder=None): <ide> tree = rep["HEAD"].tree <ide> index.build_index_from_tree(rep.path, indexfile, rep.object_store, tree) <ide> logger.debug("done") <del> return folder <add> return rep, folder
2
Javascript
Javascript
add en_gb locale for d3.time.format
7a2553dccf2c057ddc80fd76edd0161ec84ee2ae
<ide><path>src/time/format-en_GB.js <add>// The date and time format (%c), date format (%x) and time format (%X). <add>var d3_time_formatDateTime = "%a %b %e %H:%M:%S %Y", <add> d3_time_formatDate = "%d/%m/%y", <add> d3_time_formatTime = "%H:%M:%S"; <add> <add>// The weekday and month names. <add>var d3_time_days = d3_time_daySymbols, <add> d3_time_dayAbbreviations = d3_time_days.map(d3_time_formatAbbreviate), <add> d3_time_months = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"], <add> d3_time_monthAbbreviations = d3_time_months.map(d3_time_formatAbbreviate); <add> <add>function d3_time_formatAbbreviate(name) { <add> return name.substring(0, 3); <add>}
1
Python
Python
improve coverage reporting
39d9ac9d1ba8635629891f3fb7d4ed7a134c984c
<ide><path>djangorestframework/runtests/runcoverage.py <ide> import sys <ide> os.environ['DJANGO_SETTINGS_MODULE'] = 'djangorestframework.runtests.settings' <ide> <del>from django.conf import settings <del>from django.test.utils import get_runner <ide> from coverage import coverage <ide> from itertools import chain <del>import djangorestframework <ide> <ide> def main(): <ide> """Run the tests for djangorestframework and generate a coverage report.""" <del> <del> # Discover the list of all modules that we should test coverage for <del> project_dir = os.path.dirname(djangorestframework.__file__) <del> cov_files = [] <del> for (path, dirs, files) in os.walk(project_dir): <del> # Drop tests and runtests directories from the test coverage report <del> if os.path.basename(path) == 'tests' or os.path.basename(path) == 'runtests': <del> continue <del> cov_files.extend([os.path.join(path, file) for file in files if file.endswith('.py')]) <ide> <ide> cov = coverage() <ide> cov.erase() <ide> cov.start() <add> <add> from django.conf import settings <add> from django.test.utils import get_runner <ide> TestRunner = get_runner(settings) <ide> <ide> if hasattr(TestRunner, 'func_name'): <ide> def main(): <ide> failures = test_runner.run_tests(['djangorestframework']) <ide> <ide> cov.stop() <add> <add> # Discover the list of all modules that we should test coverage for <add> import djangorestframework <add> <add> project_dir = os.path.dirname(djangorestframework.__file__) <add> cov_files = [] <add> <add> for (path, dirs, files) in os.walk(project_dir): <add> # Drop tests and runtests directories from the test coverage report <add> if os.path.basename(path) == 'tests' or os.path.basename(path) == 'runtests': <add> continue <add> <add> # Drop the compat module from coverage, since we're not interested in the coverage <add> # of a module which is specifically for resolving environment dependant imports. <add> # (Because we'll end up getting different coverage reports for it for each environment) <add> if 'compat.py' in files: <add> files.remove('compat.py') <add> <add> cov_files.extend([os.path.join(path, file) for file in files if file.endswith('.py')]) <add> <ide> cov.report(cov_files) <ide> cov.xml_report(cov_files) <ide> sys.exit(failures)
1
Javascript
Javascript
remove all overrides and overridables related code
d403b8cf2eff23c9c4996ceafe00722589331233
<ide><path>test/configCases/container/overridables/cjs/test1.js <del>module.exports = "original1-cjs"; <ide><path>test/configCases/container/overridables/cjs/test2.js <del>module.exports = "original2-cjs"; <ide><path>test/configCases/container/overridables/cjs/test3.js <del>module.exports = "original3-cjs"; <ide><path>test/configCases/container/overridables/index.js <del>__webpack_override__({ <del> test1: () => <del> new Promise(resolve => { <del> setTimeout(() => { <del> resolve(() => ({ <del> __esModule: true, <del> default: "overriden1" <del> })); <del> }, 100); <del> }), <del> test3: () => () => "overriden3", <del> package: () => <del> new Promise(resolve => { <del> setTimeout(() => { <del> resolve(() => "overriden-package"); <del> }, 100); <del> }), <del> "././options/test1": () => () => "1", <del> "nested1/options/test2": () => () => "2", <del> "nested2/deep/deep": () => () => "3" <del>}); <del> <del>it("should be able to override a esm overridable", () => { <del> return import("./modules/test1").then(m => { <del> expect(m.default).toBe("overriden1"); <del> }); <del>}); <del> <del>it("should be able to not override a esm overridable", () => { <del> return import("./modules/test2").then(m => { <del> expect(m.default).toBe("original2"); <del> }); <del>}); <del> <del>import test3 from "./modules/test3"; <del>it("should be able to use an overridable module in the initial chunk, but it's not overriden", () => { <del> expect(test3).toBe("original3"); <del>}); <del> <del>it("should be able to override a cjs overridable", () => { <del> return import("./cjs/test1").then(m => { <del> expect(m.default).toBe("overriden1"); <del> }); <del>}); <del> <del>it("should be able to not override a cjs overridable", () => { <del> return import("./cjs/test2").then(m => { <del> expect(m.default).toBe("original2-cjs"); <del> }); <del>}); <del> <del>it("should be able to use an overridable module in the initial chunk, and it's overriden", () => { <del> expect(require("./cjs/test3")).toBe("overriden3"); <del>}); <del> <del>it("should be able to override with a package name shortcut", () => { <del> return import("package").then(m => { <del> expect(m.default).toBe("overriden-package"); <del> }); <del>}); <del> <del>it("should be able to override a relative request via shortcut", () => { <del> return import("./options/test1").then(m => { <del> expect(m.default).toBe("1"); <del> }); <del>}); <del> <del>it("should be able to override a nested relative request via shortcut", () => { <del> return import("./options/test2").then(m => { <del> expect(m.default).toBe("2"); <del> }); <del>}); <del> <del>it("should be able to override a deep nested request", () => { <del> return import("./options/test3").then(m => { <del> expect(m.default).toBe("3"); <del> }); <del>}); <del> <del>it("should be able to override when fallback module has multiple chunks", () => { <del> return import("./splitChunks").then(m => { <del> expect(m.default).toBe( <del> "index+app+vendor+shared+shared-separate+shared+shared-separate" <del> ); <del> }); <del>}); <ide><path>test/configCases/container/overridables/modules/test1.js <del>export default "original1"; <ide><path>test/configCases/container/overridables/modules/test2.js <del>export default "original2"; <ide><path>test/configCases/container/overridables/modules/test3.js <del>export default "original3"; <ide><path>test/configCases/container/overridables/node_modules/package/index.js <del>module.exports = "original-package"; <ide><path>test/configCases/container/overridables/options/test1.js <del>module.exports = "test1"; <ide><path>test/configCases/container/overridables/options/test2.js <del>module.exports = "test2"; <ide><path>test/configCases/container/overridables/options/test3.js <del>module.exports = "test3"; <ide><path>test/configCases/container/overridables/splitChunks/app.js <del>import vendor from "./vendor"; <del>import shared from "./shared"; <del>import shared2 from "./shared-separate"; <del>export default "app+" + vendor + "+" + shared + "+" + shared2; <ide><path>test/configCases/container/overridables/splitChunks/index.js <del>import app from "./app"; <del>import shared from "./shared"; <del>import shared2 from "./shared-separate"; <del>export default "index+" + app + "+" + shared + "+" + shared2; <ide><path>test/configCases/container/overridables/splitChunks/shared-separate.js <del>export default "shared-separate"; <ide><path>test/configCases/container/overridables/splitChunks/shared.js <del>export default "shared"; <ide><path>test/configCases/container/overridables/splitChunks/vendor.js <del>export default "vendor"; <ide><path>test/configCases/container/overridables/webpack.config.js <del>const { OverridablesPlugin, scope } = require("../../../../").container; <del> <del>/** @type {import("../../../../").Configuration} */ <del>module.exports = { <del> plugins: [ <del> new OverridablesPlugin({ <del> overridables: [ <del> { <del> test1: ["./modules/test1.js", "./cjs/test1"], <del> test2: "./modules/test2", <del> test3: { <del> import: "./modules/test3" <del> } <del> }, <del> { <del> test2: "./cjs/test2.js", <del> test3: "./cjs/../cjs/test3.js", <del> ...scope("nested1", ["./options/test2"]), <del> ...scope("nested2", { <del> ...scope("deep", { <del> deep: "./options/test3" <del> }) <del> }) <del> }, <del> "package", <del> "././options/test1", <del> "./splitChunks/app" <del> ] <del> }) <del> ], <del> optimization: { <del> splitChunks: { <del> cacheGroups: { <del> vendorTest: { <del> test: /splitChunks.vendor/, <del> enforce: true <del> }, <del> sharedTest: { <del> test: /splitChunks.shared-separate/, <del> enforce: true <del> } <del> } <del> } <del> } <del>};
17
Text
Text
clarify automatic updates in deployment.
14fb7df7f14b30b58a37b0bbb2a2ee8b669c52cb
<ide><path>docs/deployment.md <ide> When you deploy your Next.js application, you want to see the latest version wit <ide> <ide> Next.js will automatically load the latest version of your application in the background when routing. For client-side navigation, `next/link` will temporarily function as a normal `<a>` tag. <ide> <del>If a new page (with an old version) has already been prefetched by `next/link`, Next.js will use the old version. Then, after either a full page refresh or multiple client-side transitions, Next.js will show the latest version. <add>**Note:** If a new page (with an old version) has already been prefetched by `next/link`, Next.js will use the old version. Then, after either a full page refresh or multiple client-side page transitions, Next.js will show the latest version. <ide> <ide> ## Other hosting options <ide>
1
Mixed
Go
implement optional ring buffer for container logs
054abff3b67bb5d66323e5418a43c845a3eac8a1
<ide><path>api/types/container/host_config.go <ide> func (rp *RestartPolicy) IsSame(tp *RestartPolicy) bool { <ide> return rp.Name == tp.Name && rp.MaximumRetryCount == tp.MaximumRetryCount <ide> } <ide> <add>// LogMode is a type to define the available modes for logging <add>// These modes affect how logs are handled when log messages start piling up. <add>type LogMode string <add> <add>// Available logging modes <add>const ( <add> LogModeUnset = "" <add> LogModeBlocking LogMode = "blocking" <add> LogModeNonBlock LogMode = "non-blocking" <add>) <add> <ide> // LogConfig represents the logging configuration of the container. <ide> type LogConfig struct { <ide> Type string <ide><path>container/container.go <ide> import ( <ide> "github.com/docker/docker/runconfig" <ide> "github.com/docker/docker/volume" <ide> "github.com/docker/go-connections/nat" <add> "github.com/docker/go-units" <ide> "github.com/docker/libnetwork" <ide> "github.com/docker/libnetwork/netlabel" <ide> "github.com/docker/libnetwork/options" <ide> func (container *Container) CheckpointDir() string { <ide> // StartLogger starts a new logger driver for the container. <ide> func (container *Container) StartLogger() (logger.Logger, error) { <ide> cfg := container.HostConfig.LogConfig <del> c, err := logger.GetLogDriver(cfg.Type) <add> initDriver, err := logger.GetLogDriver(cfg.Type) <ide> if err != nil { <ide> return nil, fmt.Errorf("failed to get logging factory: %v", err) <ide> } <ide> func (container *Container) StartLogger() (logger.Logger, error) { <ide> return nil, err <ide> } <ide> } <del> return c(info) <add> <add> l, err := initDriver(info) <add> if err != nil { <add> return nil, err <add> } <add> <add> if containertypes.LogMode(cfg.Config["mode"]) == containertypes.LogModeNonBlock { <add> bufferSize := int64(-1) <add> if s, exists := cfg.Config["max-buffer-size"]; exists { <add> bufferSize, err = units.RAMInBytes(s) <add> if err != nil { <add> return nil, err <add> } <add> } <add> l = logger.NewRingLogger(l, info, bufferSize) <add> } <add> return l, nil <ide> } <ide> <ide> // GetProcessLabel returns the process label for the container. <ide><path>daemon/logger/awslogs/cloudwatchlogs.go <ide> func (l *logStream) Log(msg *logger.Message) error { <ide> defer l.lock.RUnlock() <ide> if !l.closed { <ide> // buffer up the data, making sure to copy the Line data <del> l.messages <- logger.CopyMessage(msg) <add> l.messages <- msg <ide> } <ide> return nil <ide> } <ide><path>daemon/logger/copier.go <ide> func (c *Copier) copySrc(name string, src io.Reader) { <ide> buf := make([]byte, bufSize) <ide> n := 0 <ide> eof := false <del> msg := &Message{Source: name} <ide> <ide> for { <ide> select { <ide> func (c *Copier) copySrc(name string, src io.Reader) { <ide> // Break up the data that we've buffered up into lines, and log each in turn. <ide> p := 0 <ide> for q := bytes.Index(buf[p:n], []byte{'\n'}); q >= 0; q = bytes.Index(buf[p:n], []byte{'\n'}) { <del> msg.Line = buf[p : p+q] <del> msg.Timestamp = time.Now().UTC() <del> msg.Partial = false <ide> select { <ide> case <-c.closed: <ide> return <ide> default: <add> msg := &Message{ <add> Source: name, <add> Timestamp: time.Now().UTC(), <add> } <add> msg.Line = append(msg.Line, buf[p:p+q]...) <add> <ide> if logErr := c.dst.Log(msg); logErr != nil { <ide> logrus.Errorf("Failed to log msg %q for logger %s: %s", msg.Line, c.dst.Name(), logErr) <ide> } <ide> func (c *Copier) copySrc(name string, src io.Reader) { <ide> // noting that it's a partial log line. <ide> if eof || (p == 0 && n == len(buf)) { <ide> if p < n { <del> msg.Line = buf[p:n] <del> msg.Timestamp = time.Now().UTC() <add> msg := &Message{ <add> Source: name, <add> Timestamp: time.Now().UTC(), <add> Partial: true, <add> } <add> msg.Line = append(msg.Line, buf[p:n]...) <ide> msg.Partial = true <add> <ide> if logErr := c.dst.Log(msg); logErr != nil { <ide> logrus.Errorf("Failed to log msg %q for logger %s: %s", msg.Line, c.dst.Name(), logErr) <ide> } <ide><path>daemon/logger/factory.go <ide> package logger <ide> import ( <ide> "fmt" <ide> "sync" <add> <add> containertypes "github.com/docker/docker/api/types/container" <add> units "github.com/docker/go-units" <add> "github.com/pkg/errors" <ide> ) <ide> <ide> // Creator builds a logging driver instance with given context. <ide> func GetLogDriver(name string) (Creator, error) { <ide> return factory.get(name) <ide> } <ide> <add>var builtInLogOpts = map[string]bool{ <add> "mode": true, <add> "max-buffer-size": true, <add>} <add> <ide> // ValidateLogOpts checks the options for the given log driver. The <ide> // options supported are specific to the LogDriver implementation. <ide> func ValidateLogOpts(name string, cfg map[string]string) error { <ide> if name == "none" { <ide> return nil <ide> } <ide> <add> switch containertypes.LogMode(cfg["mode"]) { <add> case containertypes.LogModeBlocking, containertypes.LogModeNonBlock, containertypes.LogModeUnset: <add> default: <add> return fmt.Errorf("logger: logging mode not supported: %s", cfg["mode"]) <add> } <add> <add> if s, ok := cfg["max-buffer-size"]; ok { <add> if containertypes.LogMode(cfg["mode"]) != containertypes.LogModeNonBlock { <add> return fmt.Errorf("logger: max-buffer-size option is only supported with 'mode=%s'", containertypes.LogModeNonBlock) <add> } <add> if _, err := units.RAMInBytes(s); err != nil { <add> return errors.Wrap(err, "error parsing option max-buffer-size") <add> } <add> } <add> <ide> if !factory.driverRegistered(name) { <ide> return fmt.Errorf("logger: no log driver named '%s' is registered", name) <ide> } <ide> <add> filteredOpts := make(map[string]string, len(builtInLogOpts)) <add> for k, v := range cfg { <add> if !builtInLogOpts[k] { <add> filteredOpts[k] = v <add> } <add> } <add> <ide> validator := factory.getLogOptValidator(name) <ide> if validator != nil { <del> return validator(cfg) <add> return validator(filteredOpts) <ide> } <ide> return nil <ide> } <ide><path>daemon/logger/logger.go <ide> type Message struct { <ide> Partial bool <ide> } <ide> <del>// CopyMessage creates a copy of the passed-in Message which will remain <del>// unchanged if the original is changed. Log drivers which buffer Messages <del>// rather than dispatching them during their Log() method should use this <del>// function to obtain a Message whose Line member's contents won't change. <del>func CopyMessage(msg *Message) *Message { <del> m := new(Message) <del> m.Line = make([]byte, len(msg.Line)) <del> copy(m.Line, msg.Line) <del> m.Source = msg.Source <del> m.Timestamp = msg.Timestamp <del> m.Partial = msg.Partial <del> m.Attrs = make(LogAttributes) <del> for k, v := range msg.Attrs { <del> m.Attrs[k] = v <del> } <del> return m <del>} <del> <ide> // LogAttributes is used to hold the extra attributes available in the log message <ide> // Primarily used for converting the map type to string and sorting. <ide> type LogAttributes map[string]string <ide><path>daemon/logger/logger_test.go <del>package logger <del> <del>import ( <del> "reflect" <del> "testing" <del> "time" <del>) <del> <del>func TestCopyMessage(t *testing.T) { <del> msg := &Message{ <del> Line: []byte("test line."), <del> Source: "stdout", <del> Timestamp: time.Now(), <del> Attrs: LogAttributes{ <del> "key1": "val1", <del> "key2": "val2", <del> "key3": "val3", <del> }, <del> Partial: true, <del> } <del> <del> m := CopyMessage(msg) <del> if !reflect.DeepEqual(m, msg) { <del> t.Fatalf("CopyMessage failed to copy message") <del> } <del>} <ide><path>daemon/logger/ring.go <add>package logger <add> <add>import ( <add> "errors" <add> "sync" <add> "sync/atomic" <add> <add> "github.com/Sirupsen/logrus" <add>) <add> <add>const ( <add> defaultRingMaxSize = 1e6 // 1MB <add>) <add> <add>// RingLogger is a ring buffer that implements the Logger interface. <add>// This is used when lossy logging is OK. <add>type RingLogger struct { <add> buffer *messageRing <add> l Logger <add> logInfo Info <add> closeFlag int32 <add>} <add> <add>type ringWithReader struct { <add> *RingLogger <add>} <add> <add>func (r *ringWithReader) ReadLogs(cfg ReadConfig) *LogWatcher { <add> reader, ok := r.l.(LogReader) <add> if !ok { <add> // something is wrong if we get here <add> panic("expected log reader") <add> } <add> return reader.ReadLogs(cfg) <add>} <add> <add>func newRingLogger(driver Logger, logInfo Info, maxSize int64) *RingLogger { <add> l := &RingLogger{ <add> buffer: newRing(maxSize), <add> l: driver, <add> logInfo: logInfo, <add> } <add> go l.run() <add> return l <add>} <add> <add>// NewRingLogger creates a new Logger that is implemented as a RingBuffer wrapping <add>// the passed in logger. <add>func NewRingLogger(driver Logger, logInfo Info, maxSize int64) Logger { <add> if maxSize < 0 { <add> maxSize = defaultRingMaxSize <add> } <add> l := newRingLogger(driver, logInfo, maxSize) <add> if _, ok := driver.(LogReader); ok { <add> return &ringWithReader{l} <add> } <add> return l <add>} <add> <add>// Log queues messages into the ring buffer <add>func (r *RingLogger) Log(msg *Message) error { <add> if r.closed() { <add> return errClosed <add> } <add> return r.buffer.Enqueue(msg) <add>} <add> <add>// Name returns the name of the underlying logger <add>func (r *RingLogger) Name() string { <add> return r.l.Name() <add>} <add> <add>func (r *RingLogger) closed() bool { <add> return atomic.LoadInt32(&r.closeFlag) == 1 <add>} <add> <add>func (r *RingLogger) setClosed() { <add> atomic.StoreInt32(&r.closeFlag, 1) <add>} <add> <add>// Close closes the logger <add>func (r *RingLogger) Close() error { <add> r.setClosed() <add> r.buffer.Close() <add> // empty out the queue <add> for _, msg := range r.buffer.Drain() { <add> if err := r.l.Log(msg); err != nil { <add> logrus.WithField("driver", r.l.Name()).WithField("container", r.logInfo.ContainerID).Errorf("Error writing log message: %v", r.l) <add> break <add> } <add> } <add> return r.l.Close() <add>} <add> <add>// run consumes messages from the ring buffer and forwards them to the underling <add>// logger. <add>// This is run in a goroutine when the RingLogger is created <add>func (r *RingLogger) run() { <add> for { <add> if r.closed() { <add> return <add> } <add> msg, err := r.buffer.Dequeue() <add> if err != nil { <add> // buffer is closed <add> return <add> } <add> if err := r.l.Log(msg); err != nil { <add> logrus.WithField("driver", r.l.Name()).WithField("container", r.logInfo.ContainerID).Errorf("Error writing log message: %v", r.l) <add> } <add> } <add>} <add> <add>type messageRing struct { <add> mu sync.Mutex <add> // singals callers of `Dequeue` to wake up either on `Close` or when a new `Message` is added <add> wait *sync.Cond <add> <add> sizeBytes int64 // current buffer size <add> maxBytes int64 // max buffer size size <add> queue []*Message <add> closed bool <add>} <add> <add>func newRing(maxBytes int64) *messageRing { <add> queueSize := 1000 <add> if maxBytes == 0 || maxBytes == 1 { <add> // With 0 or 1 max byte size, the maximum size of the queue would only ever be 1 <add> // message long. <add> queueSize = 1 <add> } <add> <add> r := &messageRing{queue: make([]*Message, 0, queueSize), maxBytes: maxBytes} <add> r.wait = sync.NewCond(&r.mu) <add> return r <add>} <add> <add>// Enqueue adds a message to the buffer queue <add>// If the message is too big for the buffer it drops the oldest messages to make room <add>// If there are no messages in the queue and the message is still too big, it adds the message anyway. <add>func (r *messageRing) Enqueue(m *Message) error { <add> mSize := int64(len(m.Line)) <add> <add> r.mu.Lock() <add> if r.closed { <add> r.mu.Unlock() <add> return errClosed <add> } <add> if mSize+r.sizeBytes > r.maxBytes && len(r.queue) > 0 { <add> r.wait.Signal() <add> r.mu.Unlock() <add> return nil <add> } <add> <add> r.queue = append(r.queue, m) <add> r.sizeBytes += mSize <add> r.wait.Signal() <add> r.mu.Unlock() <add> return nil <add>} <add> <add>// Dequeue pulls a message off the queue <add>// If there are no messages, it waits for one. <add>// If the buffer is closed, it will return immediately. <add>func (r *messageRing) Dequeue() (*Message, error) { <add> r.mu.Lock() <add> for len(r.queue) == 0 && !r.closed { <add> r.wait.Wait() <add> } <add> <add> if r.closed { <add> r.mu.Unlock() <add> return nil, errClosed <add> } <add> <add> msg := r.queue[0] <add> r.queue = r.queue[1:] <add> r.sizeBytes -= int64(len(msg.Line)) <add> r.mu.Unlock() <add> return msg, nil <add>} <add> <add>var errClosed = errors.New("closed") <add> <add>// Close closes the buffer ensuring no new messages can be added. <add>// Any callers waiting to dequeue a message will be woken up. <add>func (r *messageRing) Close() { <add> r.mu.Lock() <add> if r.closed { <add> r.mu.Unlock() <add> return <add> } <add> <add> r.closed = true <add> r.wait.Broadcast() <add> r.mu.Unlock() <add> return <add>} <add> <add>// Drain drains all messages from the queue. <add>// This can be used after `Close()` to get any remaining messages that were in queue. <add>func (r *messageRing) Drain() []*Message { <add> r.mu.Lock() <add> ls := make([]*Message, 0, len(r.queue)) <add> ls = append(ls, r.queue...) <add> r.sizeBytes = 0 <add> r.queue = r.queue[:0] <add> r.mu.Unlock() <add> return ls <add>} <ide><path>daemon/logger/ring_test.go <add>package logger <add> <add>import ( <add> "context" <add> "strconv" <add> "testing" <add> "time" <add>) <add> <add>type mockLogger struct{ c chan *Message } <add> <add>func (l *mockLogger) Log(msg *Message) error { <add> l.c <- msg <add> return nil <add>} <add> <add>func (l *mockLogger) Name() string { <add> return "mock" <add>} <add> <add>func (l *mockLogger) Close() error { <add> return nil <add>} <add> <add>func TestRingLogger(t *testing.T) { <add> mockLog := &mockLogger{make(chan *Message)} // no buffer on this channel <add> ring := newRingLogger(mockLog, Info{}, 1) <add> defer ring.setClosed() <add> <add> // this should never block <add> ring.Log(&Message{Line: []byte("1")}) <add> ring.Log(&Message{Line: []byte("2")}) <add> ring.Log(&Message{Line: []byte("3")}) <add> <add> select { <add> case msg := <-mockLog.c: <add> if string(msg.Line) != "1" { <add> t.Fatalf("got unexpected msg: %q", string(msg.Line)) <add> } <add> case <-time.After(100 * time.Millisecond): <add> t.Fatal("timeout reading log message") <add> } <add> <add> select { <add> case msg := <-mockLog.c: <add> t.Fatalf("expected no more messages in the queue, got: %q", string(msg.Line)) <add> default: <add> } <add>} <add> <add>func TestRingCap(t *testing.T) { <add> r := newRing(5) <add> for i := 0; i < 10; i++ { <add> // queue messages with "0" to "10" <add> // the "5" to "10" messages should be dropped since we only allow 5 bytes in the buffer <add> if err := r.Enqueue(&Message{Line: []byte(strconv.Itoa(i))}); err != nil { <add> t.Fatal(err) <add> } <add> } <add> <add> // should have messages in the queue for "5" to "10" <add> for i := 0; i < 5; i++ { <add> m, err := r.Dequeue() <add> if err != nil { <add> t.Fatal(err) <add> } <add> if string(m.Line) != strconv.Itoa(i) { <add> t.Fatalf("got unexpected message for iter %d: %s", i, string(m.Line)) <add> } <add> } <add> <add> // queue a message that's bigger than the buffer cap <add> if err := r.Enqueue(&Message{Line: []byte("hello world")}); err != nil { <add> t.Fatal(err) <add> } <add> <add> // queue another message that's bigger than the buffer cap <add> if err := r.Enqueue(&Message{Line: []byte("eat a banana")}); err != nil { <add> t.Fatal(err) <add> } <add> <add> m, err := r.Dequeue() <add> if err != nil { <add> t.Fatal(err) <add> } <add> if string(m.Line) != "hello world" { <add> t.Fatalf("got unexpected message: %s", string(m.Line)) <add> } <add> if len(r.queue) != 0 { <add> t.Fatalf("expected queue to be empty, got: %d", len(r.queue)) <add> } <add>} <add> <add>func TestRingClose(t *testing.T) { <add> r := newRing(1) <add> if err := r.Enqueue(&Message{Line: []byte("hello")}); err != nil { <add> t.Fatal(err) <add> } <add> r.Close() <add> if err := r.Enqueue(&Message{}); err != errClosed { <add> t.Fatalf("expected errClosed, got: %v", err) <add> } <add> if len(r.queue) != 1 { <add> t.Fatal("expected empty queue") <add> } <add> if m, err := r.Dequeue(); err == nil || m != nil { <add> t.Fatal("exepcted err on Dequeue after close") <add> } <add> <add> ls := r.Drain() <add> if len(ls) != 1 { <add> t.Fatalf("expected one message: %v", ls) <add> } <add> if string(ls[0].Line) != "hello" { <add> t.Fatalf("got unexpected message: %s", string(ls[0].Line)) <add> } <add>} <add> <add>func TestRingDrain(t *testing.T) { <add> r := newRing(5) <add> for i := 0; i < 5; i++ { <add> if err := r.Enqueue(&Message{Line: []byte(strconv.Itoa(i))}); err != nil { <add> t.Fatal(err) <add> } <add> } <add> <add> ls := r.Drain() <add> if len(ls) != 5 { <add> t.Fatal("got unexpected length after drain") <add> } <add> <add> for i := 0; i < 5; i++ { <add> if string(ls[i].Line) != strconv.Itoa(i) { <add> t.Fatalf("got unexpected message at position %d: %s", i, string(ls[i].Line)) <add> } <add> } <add> if r.sizeBytes != 0 { <add> t.Fatalf("expected buffer size to be 0 after drain, got: %d", r.sizeBytes) <add> } <add> <add> ls = r.Drain() <add> if len(ls) != 0 { <add> t.Fatalf("expected 0 messages on 2nd drain: %v", ls) <add> } <add> <add>} <add> <add>type nopLogger struct{} <add> <add>func (nopLogger) Name() string { return "nopLogger" } <add>func (nopLogger) Close() error { return nil } <add>func (nopLogger) Log(*Message) error { return nil } <add> <add>func BenchmarkRingLoggerThroughputNoReceiver(b *testing.B) { <add> mockLog := &mockLogger{make(chan *Message)} <add> defer mockLog.Close() <add> l := NewRingLogger(mockLog, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <add> <add>func BenchmarkRingLoggerThroughputWithReceiverDelay0(b *testing.B) { <add> l := NewRingLogger(nopLogger{}, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <add> <add>func consumeWithDelay(delay time.Duration, c <-chan *Message) (cancel func()) { <add> started := make(chan struct{}) <add> ctx, cancel := context.WithCancel(context.Background()) <add> go func() { <add> close(started) <add> ticker := time.NewTicker(delay) <add> for range ticker.C { <add> select { <add> case <-ctx.Done(): <add> ticker.Stop() <add> return <add> case <-c: <add> } <add> } <add> }() <add> <-started <add> return cancel <add>} <add> <add>func BenchmarkRingLoggerThroughputConsumeDelay1(b *testing.B) { <add> mockLog := &mockLogger{make(chan *Message)} <add> defer mockLog.Close() <add> l := NewRingLogger(mockLog, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> cancel := consumeWithDelay(1*time.Millisecond, mockLog.c) <add> defer cancel() <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <add> <add>func BenchmarkRingLoggerThroughputConsumeDelay10(b *testing.B) { <add> mockLog := &mockLogger{make(chan *Message)} <add> defer mockLog.Close() <add> l := NewRingLogger(mockLog, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> cancel := consumeWithDelay(10*time.Millisecond, mockLog.c) <add> defer cancel() <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <add> <add>func BenchmarkRingLoggerThroughputConsumeDelay50(b *testing.B) { <add> mockLog := &mockLogger{make(chan *Message)} <add> defer mockLog.Close() <add> l := NewRingLogger(mockLog, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> cancel := consumeWithDelay(50*time.Millisecond, mockLog.c) <add> defer cancel() <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <add> <add>func BenchmarkRingLoggerThroughputConsumeDelay100(b *testing.B) { <add> mockLog := &mockLogger{make(chan *Message)} <add> defer mockLog.Close() <add> l := NewRingLogger(mockLog, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> cancel := consumeWithDelay(100*time.Millisecond, mockLog.c) <add> defer cancel() <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <add> <add>func BenchmarkRingLoggerThroughputConsumeDelay300(b *testing.B) { <add> mockLog := &mockLogger{make(chan *Message)} <add> defer mockLog.Close() <add> l := NewRingLogger(mockLog, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> cancel := consumeWithDelay(300*time.Millisecond, mockLog.c) <add> defer cancel() <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <add> <add>func BenchmarkRingLoggerThroughputConsumeDelay500(b *testing.B) { <add> mockLog := &mockLogger{make(chan *Message)} <add> defer mockLog.Close() <add> l := NewRingLogger(mockLog, Info{}, -1) <add> msg := &Message{Line: []byte("hello humans and everyone else!")} <add> b.SetBytes(int64(len(msg.Line))) <add> <add> cancel := consumeWithDelay(500*time.Millisecond, mockLog.c) <add> defer cancel() <add> <add> for i := 0; i < b.N; i++ { <add> if err := l.Log(msg); err != nil { <add> b.Fatal(err) <add> } <add> } <add>} <ide><path>daemon/logger/splunk/splunk_test.go <ide> func TestDefault(t *testing.T) { <ide> } <ide> <ide> message1Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("{\"a\":\"b\"}"), "stdout", message1Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("{\"a\":\"b\"}"), Source: "stdout", Timestamp: message1Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> message2Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("notajson"), "stdout", message2Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("notajson"), Source: "stdout", Timestamp: message2Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> <ide> func TestInlineFormatWithNonDefaultOptions(t *testing.T) { <ide> } <ide> <ide> messageTime := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("1"), "stdout", messageTime, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("1"), Source: "stdout", Timestamp: messageTime}); err != nil { <ide> t.Fatal(err) <ide> } <ide> <ide> func TestJsonFormat(t *testing.T) { <ide> } <ide> <ide> message1Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("{\"a\":\"b\"}"), "stdout", message1Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("{\"a\":\"b\"}"), Source: "stdout", Timestamp: message1Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> message2Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("notjson"), "stdout", message2Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("notjson"), Source: "stdout", Timestamp: message2Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> <ide> func TestRawFormat(t *testing.T) { <ide> } <ide> <ide> message1Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("{\"a\":\"b\"}"), "stdout", message1Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("{\"a\":\"b\"}"), Source: "stdout", Timestamp: message1Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> message2Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("notjson"), "stdout", message2Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("notjson"), Source: "stdout", Timestamp: message2Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> <ide> func TestRawFormatWithLabels(t *testing.T) { <ide> } <ide> <ide> message1Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("{\"a\":\"b\"}"), "stdout", message1Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("{\"a\":\"b\"}"), Source: "stdout", Timestamp: message1Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> message2Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("notjson"), "stdout", message2Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("notjson"), Source: "stdout", Timestamp: message2Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> <ide> func TestRawFormatWithoutTag(t *testing.T) { <ide> } <ide> <ide> message1Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("{\"a\":\"b\"}"), "stdout", message1Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("{\"a\":\"b\"}"), Source: "stdout", Timestamp: message1Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> message2Time := time.Now() <del> if err := loggerDriver.Log(&logger.Message{[]byte("notjson"), "stdout", message2Time, nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("notjson"), Source: "stdout", Timestamp: message2Time}); err != nil { <ide> t.Fatal(err) <ide> } <ide> <ide> func TestBatching(t *testing.T) { <ide> } <ide> <ide> for i := 0; i < defaultStreamChannelSize*4; i++ { <del> if err := loggerDriver.Log(&logger.Message{[]byte(fmt.Sprintf("%d", i)), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> } <ide> func TestFrequency(t *testing.T) { <ide> } <ide> <ide> for i := 0; i < 10; i++ { <del> if err := loggerDriver.Log(&logger.Message{[]byte(fmt.Sprintf("%d", i)), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> time.Sleep(15 * time.Millisecond) <ide> func TestOneMessagePerRequest(t *testing.T) { <ide> } <ide> <ide> for i := 0; i < 10; i++ { <del> if err := loggerDriver.Log(&logger.Message{[]byte(fmt.Sprintf("%d", i)), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> } <ide> func TestSkipVerify(t *testing.T) { <ide> } <ide> <ide> for i := 0; i < defaultStreamChannelSize*2; i++ { <del> if err := loggerDriver.Log(&logger.Message{[]byte(fmt.Sprintf("%d", i)), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> } <ide> func TestSkipVerify(t *testing.T) { <ide> hec.simulateServerError = false <ide> <ide> for i := defaultStreamChannelSize * 2; i < defaultStreamChannelSize*4; i++ { <del> if err := loggerDriver.Log(&logger.Message{[]byte(fmt.Sprintf("%d", i)), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> } <ide> func TestBufferMaximum(t *testing.T) { <ide> } <ide> <ide> for i := 0; i < 11; i++ { <del> if err := loggerDriver.Log(&logger.Message{[]byte(fmt.Sprintf("%d", i)), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> } <ide> func TestServerAlwaysDown(t *testing.T) { <ide> } <ide> <ide> for i := 0; i < 5; i++ { <del> if err := loggerDriver.Log(&logger.Message{[]byte(fmt.Sprintf("%d", i)), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> } <ide> func TestCannotSendAfterClose(t *testing.T) { <ide> t.Fatal(err) <ide> } <ide> <del> if err := loggerDriver.Log(&logger.Message{[]byte("message1"), "stdout", time.Now(), nil, false}); err != nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("message1"), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> t.Fatal(err) <ide> } <ide> <ide> func TestCannotSendAfterClose(t *testing.T) { <ide> t.Fatal(err) <ide> } <ide> <del> if err := loggerDriver.Log(&logger.Message{[]byte("message2"), "stdout", time.Now(), nil, false}); err == nil { <add> if err := loggerDriver.Log(&logger.Message{Line: []byte("message2"), Source: "stdout", Timestamp: time.Now()}); err == nil { <ide> t.Fatal("Driver should not allow to send messages after close") <ide> } <ide> <ide><path>docs/api/version-history.md <ide> keywords: "API, Docker, rcli, REST, documentation" <ide> * `GET /secrets/{id}` returns information on the secret `id`. <ide> * `POST /secrets/{id}/update` updates the secret `id`. <ide> * `POST /services/(id or name)/update` now accepts service name or prefix of service id as a parameter. <add>* `POST /containers/create` added 2 built-in log-opts that work on all logging drivers, <add>`mode` (`blocking`|`non-blocking`), and `max-buffer-size` (e.g. `2m`) which enables a non-blocking log buffer. <ide> <ide> ## v1.24 API changes <ide>
11
Javascript
Javascript
use sortableset in chunks
747efcadf90f6ba157a18daa509a6743cc9f59ca
<ide><path>lib/Chunk.js <ide> <ide> const util = require("util"); <ide> const compareLocations = require("./compareLocations"); <add>const SortableSet = require("./util/SortableSet"); <ide> let debugId = 1000; <ide> <del>const byId = (a, b) => { <del> if(a.id < b.id) return -1; <del> if(b.id < a.id) return 1; <del> return 0; <del>}; <del> <ide> class Chunk { <ide> <add> static sortById(a, b) { <add> if(a.id < b.id) return -1; <add> if(b.id < a.id) return 1; <add> return 0; <add> } <add> <add> static sortByIdentifier(a, b) { <add> if(a.identifier() > b.identifier()) return 1; <add> if(a.identifier() < b.identifier()) return -1; <add> return 0; <add> } <add> <ide> constructor(name, module, loc) { <ide> this.id = null; <ide> this.ids = null; <ide> this.debugId = debugId++; <ide> this.name = name; <del> this._modules = new Set(); <del> this._modulesIsSorted = true; <add> this._modules = new SortableSet(undefined, Chunk.sortByIdentifier); <ide> this.entrypoints = []; <ide> this.chunks = []; <ide> this.parents = []; <ide> class Chunk { <ide> addModule(module) { <ide> if(!this._modules.has(module)) { <ide> this._modules.add(module); <del> this._modulesIsSorted = false; <ide> return true; <ide> } <ide> return false; <ide> class Chunk { <ide> } <ide> <ide> setModules(modules) { <del> this._modules = new Set(modules); <del> this._modulesIsSorted = false; <add> this._modules = new SortableSet(modules, Chunk.sortByIdentifier); <ide> } <ide> <ide> getNumberOfModules() { <ide> class Chunk { <ide> return Array.from(this._modules, fn); <ide> } <ide> <del> _ensureModulesSorted() { <del> if(this._modulesIsSorted) return; <del> this._modules = new Set(Array.from(this._modules).sort((a, b) => { <del> if(a.identifier() > b.identifier()) return 1; <del> if(a.identifier() < b.identifier()) return -1; <del> return 0; <del> })); <del> this._modulesIsSorted = true; <del> } <del> <ide> compareTo(otherChunk) { <del> this._ensureModulesSorted(); <del> otherChunk._ensureModulesSorted(); <add> this._modules.sort(); <add> otherChunk._modules.sort(); <ide> if(this._modules.size > otherChunk._modules.size) return -1; <ide> if(this._modules.size < otherChunk._modules.size) return 1; <ide> const a = this._modules[Symbol.iterator](); <ide> class Chunk { <ide> } <ide> <ide> getModulesIdent() { <del> this._ensureModulesSorted(); <add> this._modules.sort(); <ide> let str = ""; <ide> this._modules.forEach(m => { <ide> str += m.identifier() + "#"; <ide> class Chunk { <ide> } <ide> <ide> sortItems() { <del> this._modules = new Set(Array.from(this._modules).sort(byId)); <add> this._modules.sortWith(Chunk.sortById); <ide> this.origins.sort((a, b) => { <ide> const aIdent = a.module.identifier(); <ide> const bIdent = b.module.identifier(); <ide> class Chunk { <ide> if(origin.reasons) <ide> origin.reasons.sort(); <ide> }); <del> this.parents.sort(byId); <del> this.chunks.sort(byId); <add> this.parents.sort(Chunk.sortById); <add> this.chunks.sort(Chunk.sortById); <ide> } <ide> <ide> toString() {
1
Javascript
Javascript
adjust comments for upcoming lint rule
f593f9e9ef48f3b41b7f4f40691d42b9512f0f2d
<ide><path>test/async-hooks/test-enable-disable.js <del>/* <del> * Test Steps Explained <del> * ==================== <del> * <del> * Initializing hooks: <del> * <del> * We initialize 3 hooks. For hook2 and hook3 we register a callback for the <del> * "before" and in case of hook3 also for the "after" invocations. <del> * <del> * Enabling hooks initially: <del> * <del> * We only enable hook1 and hook3 initially. <del> * <del> * Enabling hook2: <del> * <del> * When hook3's "before" invocation occurs we enable hook2. Since this <del> * happens right before calling `onfirstImmediate` hook2 will miss all hook <del> * invocations until then, including the "init" and "before" of the first <del> * Immediate. <del> * However afterwards it collects all invocations that follow on the first <del> * Immediate as well as all invocations on the second Immediate. <del> * <del> * This shows that a hook can enable another hook inside a life time event <del> * callback. <del> * <del> * <del> * Disabling hook1 <del> * <del> * Since we registered the "before" callback for hook2 it will execute it <del> * right before `onsecondImmediate` is called. <del> * At that point we disable hook1 which is why it will miss all invocations <del> * afterwards and thus won't include the second "after" as well as the <del> * "destroy" invocations <del> * <del> * This shows that a hook can disable another hook inside a life time event <del> * callback. <del> * <del> * Disabling hook3 <del> * <del> * When the second "after" invocation occurs (after onsecondImmediate), hook3 <del> * disables itself. <del> * As a result it will not receive the "destroy" invocation. <del> * <del> * This shows that a hook can disable itself inside a life time event callback. <del> * <del> * Sample Test Log <del> * =============== <del> * <del> * - setting up first Immediate <del> * hook1.init.uid-5 <del> * hook3.init.uid-5 <del> * - finished setting first Immediate <del> <del> * hook1.before.uid-5 <del> * hook3.before.uid-5 <del> * - enabled hook2 <del> * - entering onfirstImmediate <del> <del> * - setting up second Immediate <del> * hook1.init.uid-6 <del> * hook3.init.uid-6 <del> * hook2.init.uid-6 <del> * - finished setting second Immediate <add>// Test Steps Explained <add>// ==================== <add>// <add>// Initializing hooks: <add>// <add>// We initialize 3 hooks. For hook2 and hook3 we register a callback for the <add>// "before" and in case of hook3 also for the "after" invocations. <add>// <add>// Enabling hooks initially: <add>// <add>// We only enable hook1 and hook3 initially. <add>// <add>// Enabling hook2: <add>// <add>// When hook3's "before" invocation occurs we enable hook2. Since this <add>// happens right before calling `onfirstImmediate` hook2 will miss all hook <add>// invocations until then, including the "init" and "before" of the first <add>// Immediate. <add>// However afterwards it collects all invocations that follow on the first <add>// Immediate as well as all invocations on the second Immediate. <add>// <add>// This shows that a hook can enable another hook inside a life time event <add>// callback. <add>// <add>// <add>// Disabling hook1 <add>// <add>// Since we registered the "before" callback for hook2 it will execute it <add>// right before `onsecondImmediate` is called. <add>// At that point we disable hook1 which is why it will miss all invocations <add>// afterwards and thus won't include the second "after" as well as the <add>// "destroy" invocations <add>// <add>// This shows that a hook can disable another hook inside a life time event <add>// callback. <add>// <add>// Disabling hook3 <add>// <add>// When the second "after" invocation occurs (after onsecondImmediate), hook3 <add>// disables itself. <add>// As a result it will not receive the "destroy" invocation. <add>// <add>// This shows that a hook can disable itself inside a life time event callback. <add>// <add>// Sample Test Log <add>// =============== <add>// <add>// - setting up first Immediate <add>// hook1.init.uid-5 <add>// hook3.init.uid-5 <add>// - finished setting first Immediate <add>// <add>// hook1.before.uid-5 <add>// hook3.before.uid-5 <add>// - enabled hook2 <add>// - entering onfirstImmediate <add>// <add>// - setting up second Immediate <add>// hook1.init.uid-6 <add>// hook3.init.uid-6 <add>// hook2.init.uid-6 <add>// - finished setting second Immediate <add>// <add>// - exiting onfirstImmediate <add>// hook1.after.uid-5 <add>// hook3.after.uid-5 <add>// hook2.after.uid-5 <add>// hook1.destroy.uid-5 <add>// hook3.destroy.uid-5 <add>// hook2.destroy.uid-5 <add>// hook1.before.uid-6 <add>// hook3.before.uid-6 <add>// hook2.before.uid-6 <add>// - disabled hook1 <add>// - entering onsecondImmediate <add>// - exiting onsecondImmediate <add>// hook3.after.uid-6 <add>// - disabled hook3 <add>// hook2.after.uid-6 <add>// hook2.destroy.uid-6 <ide> <del> * - exiting onfirstImmediate <del> * hook1.after.uid-5 <del> * hook3.after.uid-5 <del> * hook2.after.uid-5 <del> * hook1.destroy.uid-5 <del> * hook3.destroy.uid-5 <del> * hook2.destroy.uid-5 <del> * hook1.before.uid-6 <del> * hook3.before.uid-6 <del> * hook2.before.uid-6 <del> * - disabled hook1 <del> * - entering onsecondImmediate <del> * - exiting onsecondImmediate <del> * hook3.after.uid-6 <del> * - disabled hook3 <del> * hook2.after.uid-6 <del> * hook2.destroy.uid-6 <del> */ <ide> <ide> 'use strict'; <ide> <ide><path>test/common/index.js <ide> const PIPE = (() => { <ide> return path.join(pipePrefix, pipeName); <ide> })(); <ide> <del>/* <del> * Check that when running a test with <del> * `$node --abort-on-uncaught-exception $file child` <del> * the process aborts. <del> */ <add>// Check that when running a test with <add>// `$node --abort-on-uncaught-exception $file child` <add>// the process aborts. <ide> function childShouldThrowAndAbort() { <ide> let testCmd = ''; <ide> if (!isWindows) { <ide><path>test/internet/test-dns-ipv6.js <ide> TEST(async function test_lookup_ipv6_explicit(done) { <ide> checkWrap(req); <ide> }); <ide> <del>/* This ends up just being too problematic to test <del>TEST(function test_lookup_ipv6_implicit(done) { <del> var req = dns.lookup(addresses.INET6_HOST, function(err, ip, family) { <del> assert.ifError(err); <del> assert.ok(net.isIPv6(ip)); <del> assert.strictEqual(family, 6); <add>// This ends up just being too problematic to test <add>// TEST(function test_lookup_ipv6_implicit(done) { <add>// var req = dns.lookup(addresses.INET6_HOST, function(err, ip, family) { <add>// assert.ifError(err); <add>// assert.ok(net.isIPv6(ip)); <add>// assert.strictEqual(family, 6); <ide> <del> done(); <del> }); <add>// done(); <add>// }); <ide> <del> checkWrap(req); <del>}); <del>*/ <add>// checkWrap(req); <add>// }); <ide> <ide> TEST(async function test_lookup_ipv6_explicit_object(done) { <ide> function validateResult(res) { <ide> TEST(function test_lookupservice_ip_ipv6(done) { <ide> checkWrap(req); <ide> }); <ide> <del>/* Disabled because it appears to be not working on linux. */ <del>/* TEST(function test_lookup_localhost_ipv6(done) { <del> var req = dns.lookup('localhost', 6, function(err, ip, family) { <del> assert.ifError(err); <del> assert.ok(net.isIPv6(ip)); <del> assert.strictEqual(family, 6); <del> <del> done(); <del> }); <del> <del> checkWrap(req); <del>}); */ <add>// Disabled because it appears to be not working on Linux. <add>// TEST(function test_lookup_localhost_ipv6(done) { <add>// var req = dns.lookup('localhost', 6, function(err, ip, family) { <add>// assert.ifError(err); <add>// assert.ok(net.isIPv6(ip)); <add>// assert.strictEqual(family, 6); <add>// <add>// done(); <add>// }); <add>// <add>// checkWrap(req); <add>// }); <ide><path>test/internet/test-http-dns-fail.js <ide> // USE OR OTHER DEALINGS IN THE SOFTWARE. <ide> <ide> 'use strict'; <del>/* <del> * Repeated requests for a domain that fails to resolve <del> * should trigger the error event after each attempt. <del> */ <add> <add>// Repeated requests for a domain that fails to resolve <add>// should trigger the error event after each attempt. <ide> <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide><path>test/node-api/test_make_callback/test.js <ide> assert.strictEqual(makeCallback(this, <ide> <ide> // TODO(node-api): napi_make_callback needs to support <ide> // strings passed for the func argument <del>/* <del>const recv = { <del> one: common.mustCall(function() { <del> assert.strictEqual(0, arguments.length); <del> assert.strictEqual(this, recv); <del> return 42; <del> }), <del> two: common.mustCall(function(x) { <del> assert.strictEqual(1, arguments.length); <del> assert.strictEqual(this, recv); <del> assert.strictEqual(x, 1337); <del> return 42; <del> }), <del>}; <add>// <add>// const recv = { <add>// one: common.mustCall(function() { <add>// assert.strictEqual(0, arguments.length); <add>// assert.strictEqual(this, recv); <add>// return 42; <add>// }), <add>// two: common.mustCall(function(x) { <add>// assert.strictEqual(1, arguments.length); <add>// assert.strictEqual(this, recv); <add>// assert.strictEqual(x, 1337); <add>// return 42; <add>// }), <add>// }; <add>// <add>// assert.strictEqual(makeCallback(recv, 'one'), 42); <add>// assert.strictEqual(makeCallback(recv, 'two', 1337), 42); <add>// <add>// // Check that callbacks on a receiver from a different context works. <add>// const foreignObject = vm.runInNewContext('({ fortytwo() { return 42; } })'); <add>// assert.strictEqual(makeCallback(foreignObject, 'fortytwo'), 42); <ide> <del>assert.strictEqual(makeCallback(recv, 'one'), 42); <del>assert.strictEqual(makeCallback(recv, 'two', 1337), 42); <del> <del>// Check that callbacks on a receiver from a different context works. <del>const foreignObject = vm.runInNewContext('({ fortytwo() { return 42; } })'); <del>assert.strictEqual(makeCallback(foreignObject, 'fortytwo'), 42); <del>*/ <ide> <ide> // Check that the callback is made in the context of the receiver. <ide> const target = vm.runInNewContext(` <ide><path>test/parallel/test-buffer-parent-property.js <ide> 'use strict'; <ide> <del>/* <del> * Fix for https://github.com/nodejs/node/issues/8266 <del> * <del> * Zero length Buffer objects should expose the `buffer` property of the <del> * TypedArrays, via the `parent` property. <del> */ <add>// Fix for https://github.com/nodejs/node/issues/8266 <add>// <add>// Zero length Buffer objects should expose the `buffer` property of the <add>// TypedArrays, via the `parent` property. <ide> require('../common'); <ide> const assert = require('assert'); <ide> <ide><path>test/parallel/test-buffer-writeuint.js <ide> require('../common'); <ide> const assert = require('assert'); <ide> <del>/* <del> * We need to check the following things: <del> * - We are correctly resolving big endian (doesn't mean anything for 8 bit) <del> * - Correctly resolving little endian (doesn't mean anything for 8 bit) <del> * - Correctly using the offsets <del> * - Correctly interpreting values that are beyond the signed range as unsigned <del> */ <add>// We need to check the following things: <add>// - We are correctly resolving big endian (doesn't mean anything for 8 bit) <add>// - Correctly resolving little endian (doesn't mean anything for 8 bit) <add>// - Correctly using the offsets <add>// - Correctly interpreting values that are beyond the signed range as unsigned <ide> <ide> { // OOB <ide> const data = Buffer.alloc(8); <ide><path>test/parallel/test-child-process-cwd.js <ide> tmpdir.refresh(); <ide> const assert = require('assert'); <ide> const { spawn } = require('child_process'); <ide> <del>/* <del> Spawns 'pwd' with given options, then test <del> - whether the exit code equals expectCode, <del> - optionally whether the trimmed stdout result matches expectData <del>*/ <add>// Spawns 'pwd' with given options, then test <add>// - whether the exit code equals expectCode, <add>// - optionally whether the trimmed stdout result matches expectData <ide> function testCwd(options, expectCode = 0, expectData) { <ide> const child = spawn(...common.pwdCommand, options); <ide> <ide><path>test/parallel/test-child-process-double-pipe.js <ide> if (isWindows) { <ide> echo = spawn('echo', ['hello\nnode\nand\nworld\n']); <ide> } <ide> <del>/* <del> * grep and sed hang if the spawn function leaks file descriptors to child <del> * processes. <del> * This happens when calling pipe(2) and then forgetting to set the <del> * FD_CLOEXEC flag on the resulting file descriptors. <del> * <del> * This test checks child processes exit, meaning they don't hang like <del> * explained above. <del> */ <add>// If the spawn function leaks file descriptors to subprocesses, grep and sed <add>// hang. <add>// This happens when calling pipe(2) and then forgetting to set the <add>// FD_CLOEXEC flag on the resulting file descriptors. <add>// <add>// This test checks child processes exit, meaning they don't hang like <add>// explained above. <ide> <ide> <ide> // pipe echo | grep <ide><path>test/parallel/test-child-process-fork-dgram.js <ide> // USE OR OTHER DEALINGS IN THE SOFTWARE. <ide> <ide> 'use strict'; <del>/* <del> * The purpose of this test is to make sure that when forking a process, <del> * sending a fd representing a UDP socket to the child and sending messages <del> * to this endpoint, these messages are distributed to the parent and the <del> * child process. <del> */ <add> <add>// The purpose of this test is to make sure that when forking a process, <add>// sending a fd representing a UDP socket to the child and sending messages <add>// to this endpoint, these messages are distributed to the parent and the <add>// child process. <add> <ide> <ide> const common = require('../common'); <ide> if (common.isWindows) <ide> if (process.argv[2] === 'child') { <ide> const serverPort = parentServer.address().port; <ide> <ide> const timer = setInterval(() => { <del> /* <del> * Both the parent and the child got at least one message, <del> * test passed, clean up everything. <del> */ <add> // Both the parent and the child got at least one message, <add> // test passed, clean up everything. <ide> if (parentGotMessage && childGotMessage) { <ide> clearInterval(timer); <ide> client.close(); <ide><path>test/parallel/test-cluster-fork-windowsHide.js <ide> const child_process = require('child_process'); <ide> const cluster = require('cluster'); <ide> <ide> if (!process.argv[2]) { <del> /* It seems Windows only allocate new console window for <del> * attaching processes spawned by detached processes. i.e. <del> * - If process D is spawned by process C with `detached: true`, <del> * and process W is spawned by process D with `detached: false`, <del> * W will get a new black console window popped up. <del> * - If D is spawned by C with `detached: false` or W is spawned <del> * by D with `detached: true`, no console window will pop up for W. <del> * <del> * So, we have to spawn a detached process first to run the actual test. <del> */ <add> // It seems Windows only allocate new console window for <add> // attaching processes spawned by detached processes. i.e. <add> // - If process D is spawned by process C with `detached: true`, <add> // and process W is spawned by process D with `detached: false`, <add> // W will get a new black console window popped up. <add> // - If D is spawned by C with `detached: false` or W is spawned <add> // by D with `detached: true`, no console window will pop up for W. <add> // <add> // So, we have to spawn a detached process first to run the actual test. <ide> const master = child_process.spawn( <ide> process.argv[0], <ide> [process.argv[1], '--cluster'], <ide><path>test/parallel/test-cluster-worker-destroy.js <ide> // USE OR OTHER DEALINGS IN THE SOFTWARE. <ide> <ide> 'use strict'; <del>/* <del> * The goal of this test is to cover the Workers' implementation of <del> * Worker.prototype.destroy. Worker.prototype.destroy is called within <del> * the worker's context: once when the worker is still connected to the <del> * master, and another time when it's not connected to it, so that we cover <del> * both code paths. <del> */ <add> <add>// The goal of this test is to cover the Workers' implementation of <add>// Worker.prototype.destroy. Worker.prototype.destroy is called within <add>// the worker's context: once when the worker is still connected to the <add>// master, and another time when it's not connected to it, so that we cover <add>// both code paths. <ide> <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide><path>test/parallel/test-crypto-dh-padding.js <ide> if (!common.hasCrypto) <ide> const assert = require('assert'); <ide> const crypto = require('crypto'); <ide> <del>/* This test verifies padding with leading zeroes for shared <del> * secrets that are strictly smaller than the modulus (prime). <del> * See: <del> * RFC 4346: https://www.ietf.org/rfc/rfc4346.txt <del> * https://github.com/nodejs/node-v0.x-archive/issues/7906 <del> * https://github.com/nodejs/node-v0.x-archive/issues/5239 <del> * <del> * In FIPS mode OPENSSL_DH_FIPS_MIN_MODULUS_BITS = 1024, meaning we need <del> * a FIPS-friendly >= 1024 bit prime, we can use MODP 14 from RFC 3526: <del> * https://www.ietf.org/rfc/rfc3526.txt <del> * <del> * We can generate appropriate values with this code: <del> * <del> * crypto = require('crypto'); <del> * <del> * for (;;) { <del> * var a = crypto.getDiffieHellman('modp14'), <del> * var b = crypto.getDiffieHellman('modp14'); <del> * <del> * a.generateKeys(); <del> * b.generateKeys(); <del> * <del> * var aSecret = a.computeSecret(b.getPublicKey()).toString('hex'); <del> * console.log("A public: " + a.getPublicKey().toString('hex')); <del> * console.log("A private: " + a.getPrivateKey().toString('hex')); <del> * console.log("B public: " + b.getPublicKey().toString('hex')); <del> * console.log("B private: " + b.getPrivateKey().toString('hex')); <del> * console.log("A secret: " + aSecret); <del> * console.log('-------------------------------------------------'); <del> * if(aSecret.substring(0,2) === "00") { <del> * console.log("found short key!"); <del> * return; <del> * } <del> * } <del> */ <add>// This test verifies padding with leading zeroes for shared <add>// secrets that are strictly smaller than the modulus (prime). <add>// See: <add>// RFC 4346: https://www.ietf.org/rfc/rfc4346.txt <add>// https://github.com/nodejs/node-v0.x-archive/issues/7906 <add>// https://github.com/nodejs/node-v0.x-archive/issues/5239 <add>// <add>// In FIPS mode OPENSSL_DH_FIPS_MIN_MODULUS_BITS = 1024, meaning we need <add>// a FIPS-friendly >= 1024 bit prime, we can use MODP 14 from RFC 3526: <add>// https://www.ietf.org/rfc/rfc3526.txt <add>// <add>// We can generate appropriate values with this code: <add>// <add>// crypto = require('crypto'); <add>// <add>// for (;;) { <add>// var a = crypto.getDiffieHellman('modp14'), <add>// var b = crypto.getDiffieHellman('modp14'); <add>// <add>// a.generateKeys(); <add>// b.generateKeys(); <add>// <add>// var aSecret = a.computeSecret(b.getPublicKey()).toString('hex'); <add>// console.log("A public: " + a.getPublicKey().toString('hex')); <add>// console.log("A private: " + a.getPrivateKey().toString('hex')); <add>// console.log("B public: " + b.getPublicKey().toString('hex')); <add>// console.log("B private: " + b.getPrivateKey().toString('hex')); <add>// console.log("A secret: " + aSecret); <add>// console.log('-------------------------------------------------'); <add>// if(aSecret.substring(0,2) === "00") { <add>// console.log("found short key!"); <add>// return; <add>// } <add>// } <ide> <ide> const apub = <ide> '5484455905d3eff34c70980e871f27f05448e66f5a6efbb97cbcba4e927196c2bd9ea272cded91\ <ide><path>test/parallel/test-crypto-dh.js <ide> const aSecret = alice.computeSecret(bob.getPublicKey()).toString('hex'); <ide> const bSecret = bob.computeSecret(alice.getPublicKey()).toString('hex'); <ide> assert.strictEqual(aSecret, bSecret); <ide> <del>/* Ensure specific generator (buffer) works as expected. <del> * The values below (modp2/modp2buf) are for a 1024 bits long prime from <del> * RFC 2412 E.2, see https://tools.ietf.org/html/rfc2412. */ <add>// Ensure specific generator (buffer) works as expected. <add>// The values below (modp2/modp2buf) are for a 1024 bits long prime from <add>// RFC 2412 E.2, see https://tools.ietf.org/html/rfc2412. */ <ide> const modp2 = crypto.createDiffieHellmanGroup('modp2'); <ide> const modp2buf = Buffer.from([ <ide> 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc9, 0x0f, <ide><path>test/parallel/test-crypto-worker-thread.js <ide> if (!common.hasCrypto) <ide> common.skip('missing crypto'); <ide> <ide> // Issue https://github.com/nodejs/node/issues/35263 <del>/* Description: test for checking keyobject passed to worker thread <del> does not crash */ <add>// Description: Test that passing keyobject to worker thread does not crash. <ide> const { createSecretKey } = require('crypto'); <ide> <ide> const { Worker, isMainThread, workerData } = require('worker_threads'); <ide><path>test/parallel/test-dns.js <ide> assert.deepStrictEqual(dns.getServers(), []); <ide> } <ide> <ide> { <del> /* <del> * Make sure that dns.lookup throws if hints does not represent a valid flag. <del> * (dns.V4MAPPED | dns.ADDRCONFIG | dns.ALL) + 1 is invalid because: <del> * - it's different from dns.V4MAPPED and dns.ADDRCONFIG and dns.ALL. <del> * - it's different from any subset of them bitwise ored. <del> * - it's different from 0. <del> * - it's an odd number different than 1, and thus is invalid, because <del> * flags are either === 1 or even. <del> */ <add> // Make sure that dns.lookup throws if hints does not represent a valid flag. <add> // (dns.V4MAPPED | dns.ADDRCONFIG | dns.ALL) + 1 is invalid because: <add> // - it's different from dns.V4MAPPED and dns.ADDRCONFIG and dns.ALL. <add> // - it's different from any subset of them bitwise ored. <add> // - it's different from 0. <add> // - it's an odd number different than 1, and thus is invalid, because <add> // flags are either === 1 or even. <ide> const hints = (dns.V4MAPPED | dns.ADDRCONFIG | dns.ALL) + 1; <ide> const err = { <ide> code: 'ERR_INVALID_ARG_VALUE', <ide><path>test/parallel/test-domain-emit-error-handler-stack.js <ide> const assert = require('assert'); <ide> const domain = require('domain'); <ide> const EventEmitter = require('events'); <ide> <del>/* <del> * Make sure that the domains stack and the active domain is setup properly when <del> * a domain's error handler is called due to an error event being emitted. <del> * More specifically, we want to test that: <del> * - the active domain in the domain's error handler is *not* that domain, *but* <del> * the active domain is a any direct parent domain at the time the error was <del> * emitted. <del> * - the domains stack in the domain's error handler does *not* include that <del> * domain, *but* it includes all parents of that domain when the error was <del> * emitted. <del> */ <add>// Make sure that the domains stack and the active domain is setup properly when <add>// a domain's error handler is called due to an error event being emitted. <add>// More specifically, we want to test that: <add>// - the active domain in the domain's error handler is//not* that domain,//but* <add>// the active domain is a any direct parent domain at the time the error was <add>// emitted. <add>// - the domains stack in the domain's error handler does//not* include that <add>// domain, *but* it includes all parents of that domain when the error was <add>// emitted. <ide> const d1 = domain.create(); <ide> const d2 = domain.create(); <ide> const d3 = domain.create(); <ide><path>test/parallel/test-domain-thrown-error-handler-stack.js <ide> const common = require('../common'); <ide> const domain = require('domain'); <ide> <del>/* <del> * Make sure that when an erorr is thrown from a nested domain, its error <del> * handler runs outside of that domain, but within the context of any parent <del> * domain. <del> */ <add>// Make sure that when an erorr is thrown from a nested domain, its error <add>// handler runs outside of that domain, but within the context of any parent <add>// domain. <ide> <ide> const d = domain.create(); <ide> const d2 = domain.create(); <ide><path>test/parallel/test-domain-top-level-error-handler-clears-stack.js <ide> const common = require('../common'); <ide> const domain = require('domain'); <ide> <del>/* <del> * Make sure that the domains stack is cleared after a top-level domain <del> * error handler exited gracefully. <del> */ <add>// Make sure that the domains stack is cleared after a top-level domain <add>// error handler exited gracefully. <ide> const d = domain.create(); <ide> <ide> d.on('error', common.mustCall(() => { <ide><path>test/parallel/test-domain-top-level-error-handler-throw.js <ide> 'use strict'; <ide> <del>/* <del> * The goal of this test is to make sure that when a top-level error <del> * handler throws an error following the handling of a previous error, <del> * the process reports the error message from the error thrown in the <del> * top-level error handler, not the one from the previous error. <del> */ <add>// The goal of this test is to make sure that when a top-level error <add>// handler throws an error following the handling of a previous error, <add>// the process reports the error message from the error thrown in the <add>// top-level error handler, not the one from the previous error. <ide> <ide> require('../common'); <ide> <ide><path>test/parallel/test-domain-uncaught-exception.js <ide> 'use strict'; <ide> <del>/* <del> * The goal of this test is to make sure that errors thrown within domains <del> * are handled correctly. It checks that the process' 'uncaughtException' event <del> * is emitted when appropriate, and not emitted when it shouldn't. It also <del> * checks that the proper domain error handlers are called when they should <del> * be called, and not called when they shouldn't. <del> */ <add>// The goal of this test is to make sure that errors thrown within domains <add>// are handled correctly. It checks that the process' 'uncaughtException' event <add>// is emitted when appropriate, and not emitted when it shouldn't. It also <add>// checks that the proper domain error handlers are called when they should <add>// be called, and not called when they shouldn't. <ide> <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const child_process = require('child_process'); <ide> const tests = []; <ide> <ide> function test1() { <del> /* <del> * Throwing from an async callback from within a domain that doesn't have <del> * an error handler must result in emitting the process' uncaughtException <del> * event. <del> */ <add> // Throwing from an async callback from within a domain that doesn't have <add> // an error handler must result in emitting the process' uncaughtException <add> // event. <ide> const d = domain.create(); <ide> d.run(function() { <ide> setTimeout(function onTimeout() { <ide> tests.push({ <ide> }); <ide> <ide> function test2() { <del> /* <del> * Throwing from from within a domain that doesn't have an error handler must <del> * result in emitting the process' uncaughtException event. <del> */ <add> // Throwing from from within a domain that doesn't have an error handler must <add> // result in emitting the process' uncaughtException event. <ide> const d2 = domain.create(); <ide> d2.run(function() { <ide> throw new Error('boom!'); <ide> tests.push({ <ide> }); <ide> <ide> function test3() { <del> /* <del> * This test creates two nested domains: d3 and d4. d4 doesn't register an <del> * error handler, but d3 does. The error is handled by the d3 domain and thus <del> * an 'uncaughtException' event should _not_ be emitted. <del> */ <add> // This test creates two nested domains: d3 and d4. d4 doesn't register an <add> // error handler, but d3 does. The error is handled by the d3 domain and thus <add> // an 'uncaughtException' event should _not_ be emitted. <ide> const d3 = domain.create(); <ide> const d4 = domain.create(); <ide> <ide> tests.push({ <ide> }); <ide> <ide> function test4() { <del> /* <del> * This test creates two nested domains: d5 and d6. d6 doesn't register an <del> * error handler. When the timer's callback is called, because async <del> * operations like timer callbacks are bound to the domain that was active <del> * at the time of their creation, and because both d5 and d6 domains have <del> * exited by the time the timer's callback is called, its callback runs with <del> * only d6 on the domains stack. Since d6 doesn't register an error handler, <del> * the process' uncaughtException event should be emitted. <del> */ <add> // This test creates two nested domains: d5 and d6. d6 doesn't register an <add> // error handler. When the timer's callback is called, because async <add> // operations like timer callbacks are bound to the domain that was active <add> // at the time of their creation, and because both d5 and d6 domains have <add> // exited by the time the timer's callback is called, its callback runs with <add> // only d6 on the domains stack. Since d6 doesn't register an error handler, <add> // the process' uncaughtException event should be emitted. <ide> const d5 = domain.create(); <ide> const d6 = domain.create(); <ide> <ide> tests.push({ <ide> }); <ide> <ide> function test5() { <del> /* <del> * This test creates two nested domains: d7 and d8. d8 _does_ register an <del> * error handler, so throwing within that domain should not emit an uncaught <del> * exception. <del> */ <add> // This test creates two nested domains: d7 and d8. d8 _does_ register an <add> // error handler, so throwing within that domain should not emit an uncaught <add> // exception. <ide> const d7 = domain.create(); <ide> const d8 = domain.create(); <ide> <ide> tests.push({ <ide> }); <ide> <ide> function test6() { <del> /* <del> * This test creates two nested domains: d9 and d10. d10 _does_ register an <del> * error handler, so throwing within that domain in an async callback should <del> * _not_ emit an uncaught exception. <del> */ <add> // This test creates two nested domains: d9 and d10. d10 _does_ register an <add> // error handler, so throwing within that domain in an async callback should <add> // _not_ emit an uncaught exception. <add> // <ide> const d9 = domain.create(); <ide> const d10 = domain.create(); <ide> <ide><path>test/parallel/test-domain-with-abort-on-uncaught-exception.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> const fs = require('fs'); <ide> <del>/* <del> * The goal of this test is to make sure that: <del> * <del> * - Even if --abort_on_uncaught_exception is passed on the command line, <del> * setting up a top-level domain error handler and throwing an error <del> * within this domain does *not* make the process abort. The process exits <del> * gracefully. <del> * <del> * - When passing --abort_on_uncaught_exception on the command line and <del> * setting up a top-level domain error handler, an error thrown <del> * within this domain's error handler *does* make the process abort. <del> * <del> * - When *not* passing --abort_on_uncaught_exception on the command line and <del> * setting up a top-level domain error handler, an error thrown within this <del> * domain's error handler does *not* make the process abort, but makes it exit <del> * with the proper failure exit code. <del> * <del> * - When throwing an error within the top-level domain's error handler <del> * within a try/catch block, the process should exit gracefully, whether or <del> * not --abort_on_uncaught_exception is passed on the command line. <del> */ <add>// The goal of this test is to make sure that: <add>// <add>// - Even if --abort_on_uncaught_exception is passed on the command line, <add>// setting up a top-level domain error handler and throwing an error <add>// within this domain does *not* make the process abort. The process exits <add>// gracefully. <add>// <add>// - When passing --abort_on_uncaught_exception on the command line and <add>// setting up a top-level domain error handler, an error thrown <add>// within this domain's error handler *does* make the process abort. <add>// <add>// - When *not* passing --abort_on_uncaught_exception on the command line and <add>// setting up a top-level domain error handler, an error thrown within this <add>// domain's error handler does *not* make the process abort, but makes it exit <add>// with the proper failure exit code. <add>// <add>// - When throwing an error within the top-level domain's error handler <add>// within a try/catch block, the process should exit gracefully, whether or <add>// not --abort_on_uncaught_exception is passed on the command line. <ide> <ide> const domainErrHandlerExMessage = 'exception from domain error handler'; <ide> <ide><path>test/parallel/test-fs-access.js <ide> tmpdir.refresh(); <ide> createFileWithPerms(readOnlyFile, 0o444); <ide> createFileWithPerms(readWriteFile, 0o666); <ide> <del>/* <del> * On non-Windows supported platforms, fs.access(readOnlyFile, W_OK, ...) <del> * always succeeds if node runs as the super user, which is sometimes the <del> * case for tests running on our continuous testing platform agents. <del> * <del> * In this case, this test tries to change its process user id to a <del> * non-superuser user so that the test that checks for write access to a <del> * read-only file can be more meaningful. <del> * <del> * The change of user id is done after creating the fixtures files for the same <del> * reason: the test may be run as the superuser within a directory in which <del> * only the superuser can create files, and thus it may need superuser <del> * privileges to create them. <del> * <del> * There's not really any point in resetting the process' user id to 0 after <del> * changing it to 'nobody', since in the case that the test runs without <del> * superuser privilege, it is not possible to change its process user id to <del> * superuser. <del> * <del> * It can prevent the test from removing files created before the change of user <del> * id, but that's fine. In this case, it is the responsibility of the <del> * continuous integration platform to take care of that. <del> */ <add>// On non-Windows supported platforms, fs.access(readOnlyFile, W_OK, ...) <add>// always succeeds if node runs as the super user, which is sometimes the <add>// case for tests running on our continuous testing platform agents. <add>// <add>// In this case, this test tries to change its process user id to a <add>// non-superuser user so that the test that checks for write access to a <add>// read-only file can be more meaningful. <add>// <add>// The change of user id is done after creating the fixtures files for the same <add>// reason: the test may be run as the superuser within a directory in which <add>// only the superuser can create files, and thus it may need superuser <add>// privileges to create them. <add>// <add>// There's not really any point in resetting the process' user id to 0 after <add>// changing it to 'nobody', since in the case that the test runs without <add>// superuser privilege, it is not possible to change its process user id to <add>// superuser. <add>// <add>// It can prevent the test from removing files created before the change of user <add>// id, but that's fine. In this case, it is the responsibility of the <add>// continuous integration platform to take care of that. <ide> let hasWriteAccessForReadonlyFile = false; <ide> if (!common.isWindows && process.getuid() === 0) { <ide> hasWriteAccessForReadonlyFile = true; <ide><path>test/parallel/test-fs-options-immutable.js <ide> 'use strict'; <ide> const common = require('../common'); <ide> <del>/* <del> * These tests make sure that the `options` object passed to these functions are <del> * never altered. <del> * <del> * Refer: https://github.com/nodejs/node/issues/7655 <del> */ <add>// These tests make sure that the `options` object passed to these functions are <add>// never altered. <add>// <add>// Refer: https://github.com/nodejs/node/issues/7655 <ide> <ide> const assert = require('assert'); <ide> const fs = require('fs'); <ide><path>test/parallel/test-fs-promises-readfile-with-fd.js <ide> 'use strict'; <ide> <del>/* <del> * This test makes sure that `readFile()` always reads from the current <del> * position of the file, instead of reading from the beginning of the file. <del> */ <add>// This test makes sure that `readFile()` always reads from the current <add>// position of the file, instead of reading from the beginning of the file. <ide> <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide><path>test/parallel/test-fs-promises-writefile-with-fd.js <ide> 'use strict'; <ide> <del>/* <del> * This test makes sure that `writeFile()` always writes from the current <del> * position of the file, instead of truncating the file. <del> */ <add>// This test makes sure that `writeFile()` always writes from the current <add>// position of the file, instead of truncating the file. <ide> <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide><path>test/parallel/test-fs-readfile-fd.js <ide> function tempFdSync(callback) { <ide> } <ide> <ide> { <del> /* <del> * This test makes sure that `readFile()` always reads from the current <del> * position of the file, instead of reading from the beginning of the file, <del> * when used with file descriptors. <del> */ <add> // This test makes sure that `readFile()` always reads from the current <add> // position of the file, instead of reading from the beginning of the file, <add> // when used with file descriptors. <ide> <ide> const filename = join(tmpdir.path, 'test.txt'); <ide> fs.writeFileSync(filename, 'Hello World'); <ide> <ide> { <del> /* Tests the fs.readFileSync(). */ <add> // Tests the fs.readFileSync(). <ide> const fd = fs.openSync(filename, 'r'); <ide> <del> /* Read only five bytes, so that the position moves to five. */ <add> // Read only five bytes, so that the position moves to five. <ide> const buf = Buffer.alloc(5); <ide> assert.deepStrictEqual(fs.readSync(fd, buf, 0, 5), 5); <ide> assert.deepStrictEqual(buf.toString(), 'Hello'); <ide> <del> /* readFileSync() should read from position five, instead of zero. */ <add> // readFileSync() should read from position five, instead of zero. <ide> assert.deepStrictEqual(fs.readFileSync(fd).toString(), ' World'); <ide> <ide> fs.closeSync(fd); <ide> } <ide> <ide> { <del> /* Tests the fs.readFile(). */ <add> // Tests the fs.readFile(). <ide> fs.open(filename, 'r', common.mustCall((err, fd) => { <ide> assert.ifError(err); <ide> const buf = Buffer.alloc(5); <ide> <del> /* Read only five bytes, so that the position moves to five. */ <add> // Read only five bytes, so that the position moves to five. <ide> fs.read(fd, buf, 0, 5, null, common.mustCall((err, bytes) => { <ide> assert.ifError(err); <ide> assert.strictEqual(bytes, 5); <ide> assert.deepStrictEqual(buf.toString(), 'Hello'); <ide> <ide> fs.readFile(fd, common.mustCall((err, data) => { <ide> assert.ifError(err); <del> /* readFile() should read from position five, instead of zero. */ <add> // readFile() should read from position five, instead of zero. <ide> assert.deepStrictEqual(data.toString(), ' World'); <ide> <ide> fs.closeSync(fd); <ide><path>test/parallel/test-fs-realpath.js <ide> function test_deep_symlink_mix(realpath, realpathSync, callback) { <ide> return callback(); <ide> } <ide> <del> /* <del> /tmp/node-test-realpath-f1 -> $tmpDir/node-test-realpath-d1/foo <del> /tmp/node-test-realpath-d1 -> $tmpDir/node-test-realpath-d2 <del> /tmp/node-test-realpath-d2/foo -> $tmpDir/node-test-realpath-f2 <del> /tmp/node-test-realpath-f2 <del> -> $tmpDir/targets/nested-index/one/realpath-c <del> $tmpDir/targets/nested-index/one/realpath-c <del> -> $tmpDir/targets/nested-index/two/realpath-c <del> $tmpDir/targets/nested-index/two/realpath-c -> $tmpDir/cycles/root.js <del> $tmpDir/targets/cycles/root.js (hard) <del> */ <add> // /tmp/node-test-realpath-f1 -> $tmpDir/node-test-realpath-d1/foo <add> // /tmp/node-test-realpath-d1 -> $tmpDir/node-test-realpath-d2 <add> // /tmp/node-test-realpath-d2/foo -> $tmpDir/node-test-realpath-f2 <add> // /tmp/node-test-realpath-f2 <add> // -> $tmpDir/targets/nested-index/one/realpath-c <add> // $tmpDir/targets/nested-index/one/realpath-c <add> // -> $tmpDir/targets/nested-index/two/realpath-c <add> // $tmpDir/targets/nested-index/two/realpath-c -> $tmpDir/cycles/root.js <add> // $tmpDir/targets/cycles/root.js (hard) <add> <ide> const entry = tmp('node-test-realpath-f1'); <ide> try { fs.unlinkSync(tmp('node-test-realpath-d2/foo')); } catch {} <ide> try { fs.rmdirSync(tmp('node-test-realpath-d2')); } catch {} <ide><path>test/parallel/test-fs-writefile-with-fd.js <ide> 'use strict'; <ide> <del>/* <del> * This test makes sure that `writeFile()` always writes from the current <del> * position of the file, instead of truncating the file, when used with file <del> * descriptors. <del> */ <add>// This test makes sure that `writeFile()` always writes from the current <add>// position of the file, instead of truncating the file, when used with file <add>// descriptors. <ide> <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide><path>test/parallel/test-http-client-keep-alive-release-before-finish.js <ide> const server = http.createServer((req, res) => { <ide> res.resume(); <ide> })); <ide> <del> /* What happens here is that the server `end`s the response before we send <del> * `something`, and the client thought that this is a green light for sending <del> * next GET request <del> */ <add> // What happens here is that the server `end`s the response before we send <add> // `something`, and the client thought that this is a green light for sending <add> // next GET request <ide> post.write(Buffer.alloc(16 * 1024, 'X')); <ide> setTimeout(() => { <ide> post.end('something'); <ide><path>test/parallel/test-http-host-header-ipv6-fail.js <ide> 'use strict'; <del>/* <del> * When using the object form of http.request and using an IPv6 address <del> * as a hostname, and using a non-standard port, the Host header <del> * is improperly formatted. <del> * Issue: https://github.com/nodejs/node/issues/5308 <del> * As per https://tools.ietf.org/html/rfc7230#section-5.4 and <del> * https://tools.ietf.org/html/rfc3986#section-3.2.2 <del> * the IPv6 address should be enclosed in square brackets <del> */ <add> <add>// When using the object form of http.request and using an IPv6 address <add>// as a hostname, and using a non-standard port, the Host header <add>// is improperly formatted. <add>// Issue: https://github.com/nodejs/node/issues/5308 <add>// As per https://tools.ietf.org/html/rfc7230#section-5.4 and <add>// https://tools.ietf.org/html/rfc3986#section-3.2.2 <add>// the IPv6 address should be enclosed in square brackets <ide> <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide><path>test/parallel/test-http-no-read-no-dump.js <ide> const server = http.createServer((req, res) => { <ide> }; <ide> })); <ide> <del> /* What happens here is that the server `end`s the response before we send <del> * `something`, and the client thought that this is a green light for sending <del> * next GET request <del> */ <add> // What happens here is that the server `end`s the response before we send <add> // `something`, and the client thought that this is a green light for sending <add> // next GET request <ide> post.write('initial'); <ide> <ide> http.request({ <ide><path>test/parallel/test-http-upgrade-server.js <ide> function writeReq(socket, data, encoding) { <ide> } <ide> <ide> <del>/*----------------------------------------------- <del> connection: Upgrade with listener <del>-----------------------------------------------*/ <add>// connection: Upgrade with listener <ide> function test_upgrade_with_listener() { <ide> const conn = net.createConnection(server.address().port); <ide> conn.setEncoding('utf8'); <ide> function test_upgrade_with_listener() { <ide> }); <ide> } <ide> <del>/*----------------------------------------------- <del> connection: Upgrade, no listener <del>-----------------------------------------------*/ <add>// connection: Upgrade, no listener <ide> function test_upgrade_no_listener() { <ide> const conn = net.createConnection(server.address().port); <ide> conn.setEncoding('utf8'); <ide> function test_upgrade_no_listener() { <ide> }); <ide> } <ide> <del>/*----------------------------------------------- <del> connection: normal <del>-----------------------------------------------*/ <add>// connection: normal <ide> function test_standard_http() { <ide> const conn = net.createConnection(server.address().port); <ide> conn.setEncoding('utf8'); <ide> server.listen(0, function() { <ide> }); <ide> <ide> <del>/*----------------------------------------------- <del> Fin. <del>-----------------------------------------------*/ <add>// Fin. <ide> process.on('exit', function() { <ide> assert.strictEqual(requests_recv, 3); <ide> assert.strictEqual(requests_sent, 3); <ide><path>test/parallel/test-next-tick-doesnt-hang.js <ide> // USE OR OTHER DEALINGS IN THE SOFTWARE. <ide> <ide> 'use strict'; <del>/* <del> * This test verifies that having a single nextTick statement and nothing else <del> * does not hang the event loop. If this test times out it has failed. <del> */ <add> <add>// This test verifies that having a single nextTick statement and nothing else <add>// does not hang the event loop. If this test times out it has failed. <ide> <ide> require('../common'); <ide> process.nextTick(function() { <ide><path>test/parallel/test-next-tick-ordering.js <ide> console.log('Running from main.'); <ide> <ide> process.on('exit', function() { <ide> assert.strictEqual(done[0], 'nextTick'); <del> /* Disabling this test. I don't think we can ensure the order <del> for (i = 0; i < N; i += 1) { <del> assert.strictEqual(i, done[i + 1]); <del> } <del> */ <add> // Disabling this test. I don't think we can ensure the order <add> // for (i = 0; i < N; i += 1) { <add> // assert.strictEqual(i, done[i + 1]); <add> // } <ide> }); <ide><path>test/parallel/test-process-env.js <ide> if (process.argv[2] === 'you-are-the-child') { <ide> delete process.env.NON_EXISTING_VARIABLE; <ide> assert(delete process.env.NON_EXISTING_VARIABLE); <ide> <del>/* For the moment we are not going to support setting the timezone via the <del> * environment variables. The problem is that various V8 platform backends <del> * deal with timezone in different ways. The windows platform backend caches <del> * the timezone value while the Linux one hits libc for every query. <del> <del>https://github.com/joyent/node/blob/08782931205bc4f6d28102ebc29fd806e8ccdf1f/deps/v8/src/platform-linux.cc#L339-345 <del>https://github.com/joyent/node/blob/08782931205bc4f6d28102ebc29fd806e8ccdf1f/deps/v8/src/platform-win32.cc#L590-596 <del> <del>// set the timezone; see tzset(3) <del>process.env.TZ = 'Europe/Amsterdam'; <del> <del>// time difference between Greenwich and Amsterdam is +2 hours in the summer <del>date = new Date('Fri, 10 Sep 1982 03:15:00 GMT'); <del>assert.strictEqual(3, date.getUTCHours()); <del>assert.strictEqual(5, date.getHours()); <del>*/ <add>// For the moment we are not going to support setting the timezone via the <add>// environment variables. The problem is that various V8 platform backends <add>// deal with timezone in different ways. The Windows platform backend caches <add>// the timezone value while the Linux one hits libc for every query. <add>// <add>// https://github.com/joyent/node/blob/08782931205bc4f6d28102ebc29fd806e8ccdf1f/deps/v8/src/platform-linux.cc#L339-345 <add>// https://github.com/joyent/node/blob/08782931205bc4f6d28102ebc29fd806e8ccdf1f/deps/v8/src/platform-win32.cc#L590-596 <add>// <add>// // set the timezone; see tzset(3) <add>// process.env.TZ = 'Europe/Amsterdam'; <add>// <add>// // time difference between Greenwich and Amsterdam is +2 hours in the summer <add>// date = new Date('Fri, 10 Sep 1982 03:15:00 GMT'); <add>// assert.strictEqual(3, date.getUTCHours()); <add>// assert.strictEqual(5, date.getHours()); <ide> <ide> // Environment variables should be case-insensitive on Windows, and <ide> // case-sensitive on other platforms. <ide><path>test/parallel/test-querystring-maxKeys-non-finite.js <ide> require('../common'); <ide> const assert = require('assert'); <ide> const parse = require('querystring').parse; <ide> <del>/* <del>taken from express-js/body-parser <del>https://github.com/expressjs/body-parser/ <del>blob/ed25264fb494cf0c8bc992b8257092cd4f694d5e/test/urlencoded.js#L636-L651 <del>*/ <add>// Taken from express-js/body-parser <add>// https://github.com/expressjs/body-parser/blob/ed25264fb494cf0c8bc992b8257092cd4f694d5e/test/urlencoded.js#L636-L651 <ide> function createManyParams(count) { <ide> let str = ''; <ide> <ide><path>test/parallel/test-repl-unexpected-token-recoverable.js <ide> 'use strict'; <del>/* <del> * This is a regression test for https://github.com/joyent/node/issues/8874. <del> */ <add> <add>// This is a regression test for https://github.com/joyent/node/issues/8874. <add> <ide> require('../common'); <ide> const assert = require('assert'); <ide> <ide><path>test/parallel/test-stream-transform-final-sync.js <ide> const assert = require('assert'); <ide> const stream = require('stream'); <ide> let state = 0; <ide> <del>/* <del>What you do <del>const stream = new stream.Transform({ <del> transform: function transformCallback(chunk, _, next) { <del> // part 1 <del> this.push(chunk); <del> //part 2 <del> next(); <del> }, <del> final: function endCallback(done) { <del> // part 1 <del> process.nextTick(function () { <del> // part 2 <del> done(); <del> }); <del> }, <del> flush: function flushCallback(done) { <del> // part 1 <del> process.nextTick(function () { <del> // part 2 <del> done(); <del> }); <del> } <del>}); <del>t.on('data', dataListener); <del>t.on('end', endListener); <del>t.on('finish', finishListener); <del>t.write(1); <del>t.write(4); <del>t.end(7, endMethodCallback); <del> <del>The order things are called <ide> <del>1. transformCallback part 1 <del>2. dataListener <del>3. transformCallback part 2 <del>4. transformCallback part 1 <del>5. dataListener <del>6. transformCallback part 2 <del>7. transformCallback part 1 <del>8. dataListener <del>9. transformCallback part 2 <del>10. finalCallback part 1 <del>11. finalCallback part 2 <del>12. flushCallback part 1 <del>13. finishListener <del>14. endMethodCallback <del>15. flushCallback part 2 <del>16. endListener <del>*/ <add>// What you do <add>// <add>// const stream = new stream.Transform({ <add>// transform: function transformCallback(chunk, _, next) { <add>// // part 1 <add>// this.push(chunk); <add>// //part 2 <add>// next(); <add>// }, <add>// final: function endCallback(done) { <add>// // part 1 <add>// process.nextTick(function () { <add>// // part 2 <add>// done(); <add>// }); <add>// }, <add>// flush: function flushCallback(done) { <add>// // part 1 <add>// process.nextTick(function () { <add>// // part 2 <add>// done(); <add>// }); <add>// } <add>// }); <add>// t.on('data', dataListener); <add>// t.on('end', endListener); <add>// t.on('finish', finishListener); <add>// t.write(1); <add>// t.write(4); <add>// t.end(7, endMethodCallback); <add>// <add>// The order things are called <add>// <add>// 1. transformCallback part 1 <add>// 2. dataListener <add>// 3. transformCallback part 2 <add>// 4. transformCallback part 1 <add>// 5. dataListener <add>// 6. transformCallback part 2 <add>// 7. transformCallback part 1 <add>// 8. dataListener <add>// 9. transformCallback part 2 <add>// 10. finalCallback part 1 <add>// 11. finalCallback part 2 <add>// 12. flushCallback part 1 <add>// 13. finishListener <add>// 14. endMethodCallback <add>// 15. flushCallback part 2 <add>// 16. endListener <ide> <ide> const t = new stream.Transform({ <ide> objectMode: true, <ide><path>test/parallel/test-stream-transform-final.js <ide> const assert = require('assert'); <ide> const stream = require('stream'); <ide> let state = 0; <ide> <del>/* <del>What you do <del>const stream = new stream.Transform({ <del> transform: function transformCallback(chunk, _, next) { <del> // part 1 <del> this.push(chunk); <del> //part 2 <del> next(); <del> }, <del> final: function endCallback(done) { <del> // part 1 <del> process.nextTick(function () { <del> // part 2 <del> done(); <del> }); <del> }, <del> flush: function flushCallback(done) { <del> // part 1 <del> process.nextTick(function () { <del> // part 2 <del> done(); <del> }); <del> } <del>}); <del>t.on('data', dataListener); <del>t.on('end', endListener); <del>t.on('finish', finishListener); <del>t.write(1); <del>t.write(4); <del>t.end(7, endMethodCallback); <ide> <del>The order things are called <add>// What you do: <add>// <add>// const stream = new stream.Transform({ <add>// transform: function transformCallback(chunk, _, next) { <add>// // part 1 <add>// this.push(chunk); <add>// //part 2 <add>// next(); <add>// }, <add>// final: function endCallback(done) { <add>// // part 1 <add>// process.nextTick(function () { <add>// // part 2 <add>// done(); <add>// }); <add>// }, <add>// flush: function flushCallback(done) { <add>// // part 1 <add>// process.nextTick(function () { <add>// // part 2 <add>// done(); <add>// }); <add>// } <add>// }); <add>// t.on('data', dataListener); <add>// t.on('end', endListener); <add>// t.on('finish', finishListener); <add>// t.write(1); <add>// t.write(4); <add>// t.end(7, endMethodCallback); <add>// <add>// The order things are called <ide> <del>1. transformCallback part 1 <del>2. dataListener <del>3. transformCallback part 2 <del>4. transformCallback part 1 <del>5. dataListener <del>6. transformCallback part 2 <del>7. transformCallback part 1 <del>8. dataListener <del>9. transformCallback part 2 <del>10. finalCallback part 1 <del>11. finalCallback part 2 <del>12. flushCallback part 1 <del>13. finishListener <del>14. endMethodCallback <del>15. flushCallback part 2 <del>16. endListener <del>*/ <add>// 1. transformCallback part 1 <add>// 2. dataListener <add>// 3. transformCallback part 2 <add>// 4. transformCallback part 1 <add>// 5. dataListener <add>// 6. transformCallback part 2 <add>// 7. transformCallback part 1 <add>// 8. dataListener <add>// 9. transformCallback part 2 <add>// 10. finalCallback part 1 <add>// 11. finalCallback part 2 <add>// 12. flushCallback part 1 <add>// 13. finishListener <add>// 14. endMethodCallback <add>// 15. flushCallback part 2 <add>// 16. endListener <ide> <ide> const t = new stream.Transform({ <ide> objectMode: true, <ide><path>test/parallel/test-timers-non-integer-delay.js <ide> const common = require('../common'); <ide> const assert = require('assert'); <ide> <del>/* <del> * This test makes sure that non-integer timer delays do not make the process <del> * hang. See https://github.com/joyent/node/issues/8065 and <del> * https://github.com/joyent/node/issues/8068 which have been fixed by <del> * https://github.com/joyent/node/pull/8073. <del> * <del> * If the process hangs, this test will make the tests suite timeout, <del> * otherwise it will exit very quickly (after 50 timers with a short delay <del> * fire). <del> * <del> * We have to set at least several timers with a non-integer delay to <del> * reproduce the issue. Sometimes, a timer with a non-integer delay will <del> * expire correctly. 50 timers has always been more than enough to reproduce <del> * it 100%. <del> */ <add>// This test makes sure that non-integer timer delays do not make the process <add>// hang. See https://github.com/joyent/node/issues/8065 and <add>// https://github.com/joyent/node/issues/8068 which have been fixed by <add>// https://github.com/joyent/node/pull/8073. <add>// <add>// If the process hangs, this test will make the tests suite timeout, <add>// otherwise it will exit very quickly (after 50 timers with a short delay <add>// fire). <add>// <add>// We have to set at least several timers with a non-integer delay to <add>// reproduce the issue. Sometimes, a timer with a non-integer delay will <add>// expire correctly. 50 timers has always been more than enough to reproduce <add>// it 100%. <ide> <ide> const TIMEOUT_DELAY = 1.1; <ide> let N = 50; <ide><path>test/parallel/test-timers-same-timeout-wrong-list-deleted.js <ide> 'use strict'; <ide> <del>/* <del> * This is a regression test for https://github.com/nodejs/node/issues/7722. <del> * <del> * When nested timers have the same timeout, calling clearTimeout on the <del> * older timer after it has fired causes the list the newer timer is in <del> * to be deleted. Since the newer timer was not cleared, it still blocks <del> * the event loop completing for the duration of its timeout, however, since <del> * no reference exists to it in its list, it cannot be canceled and its <del> * callback is not called when the timeout elapses. <del> */ <add>// This is a regression test for https://github.com/nodejs/node/issues/7722. <add>// <add>// When nested timers have the same timeout, calling clearTimeout on the <add>// older timer after it has fired causes the list the newer timer is in <add>// to be deleted. Since the newer timer was not cleared, it still blocks <add>// the event loop completing for the duration of its timeout, however, since <add>// no reference exists to it in its list, it cannot be canceled and its <add>// callback is not called when the timeout elapses. <ide> <ide> const common = require('../common'); <ide> <ide><path>test/parallel/test-timers-socket-timeout-removes-other-socket-unref-timer.js <ide> 'use strict'; <ide> <del>/* <del> * This test is a regression test for joyent/node#8897. <del> */ <add>// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/8897. <ide> <ide> const common = require('../common'); <ide> const net = require('net'); <ide> const server = net.createServer(function onClient(client) { <ide> clients.push(client); <ide> <ide> if (clients.length === 2) { <del> /* <del> * Enroll two timers, and make the one supposed to fire first <del> * unenroll the other one supposed to fire later. This mutates <del> * the list of unref timers when traversing it, and exposes the <del> * original issue in joyent/node#8897. <del> */ <add> // Enroll two timers, and make the one supposed to fire first <add> // unenroll the other one supposed to fire later. This mutates <add> // the list of unref timers when traversing it, and exposes the <add> // original issue in joyent/node#8897. <ide> clients[0].setTimeout(1, () => { <ide> clients[1].setTimeout(0); <ide> clients[0].end(); <ide><path>test/parallel/test-timers-unref-active.js <ide> 'use strict'; <ide> <del>/* <del> * This test is aimed at making sure that unref timers queued with <del> * timers._unrefActive work correctly. <del> * <del> * Basically, it queues one timer in the unref queue, and then queues <del> * it again each time its timeout callback is fired until the callback <del> * has been called ten times. <del> * <del> * At that point, it unenrolls the unref timer so that its timeout callback <del> * is not fired ever again. <del> * <del> * Finally, a ref timeout is used with a delay large enough to make sure that <del> * all 10 timeouts had the time to expire. <del> */ <add>// This test is aimed at making sure that unref timers queued with <add>// timers._unrefActive work correctly. <add>// <add>// Basically, it queues one timer in the unref queue, and then queues <add>// it again each time its timeout callback is fired until the callback <add>// has been called ten times. <add>// <add>// At that point, it unenrolls the unref timer so that its timeout callback <add>// is not fired ever again. <add>// <add>// Finally, a ref timeout is used with a delay large enough to make sure that <add>// all 10 timeouts had the time to expire. <ide> <ide> require('../common'); <ide> const timers = require('timers'); <ide> const assert = require('assert'); <ide> const someObject = {}; <ide> let nbTimeouts = 0; <ide> <del>/* <del> * libuv 0.10.x uses GetTickCount on Windows to implement timers, which uses <del> * system's timers whose resolution is between 10 and 16ms. See <del> * http://msdn.microsoft.com/en-us/library/windows/desktop/ms724408.aspx <del> * for more information. That's the lowest resolution for timers across all <del> * supported platforms. We're using it as the lowest common denominator, <del> * and thus expect 5 timers to be able to fire in under 100 ms. <del> */ <add>// libuv 0.10.x uses GetTickCount on Windows to implement timers, which uses <add>// system's timers whose resolution is between 10 and 16ms. See <add>// http://msdn.microsoft.com/en-us/library/windows/desktop/ms724408.aspx <add>// for more information. That's the lowest resolution for timers across all <add>// supported platforms. We're using it as the lowest common denominator, <add>// and thus expect 5 timers to be able to fire in under 100 ms. <ide> const N = 5; <ide> const TEST_DURATION = 1000; <ide> <ide><path>test/parallel/test-timers-unref-remove-other-unref-timers-only-one-fires.js <ide> 'use strict'; <ide> <del>/* <del> * The goal of this test is to make sure that, after the regression introduced <del> * by 934bfe23a16556d05bfb1844ef4d53e8c9887c3d, the fix preserves the following <del> * behavior of unref timers: if two timers are scheduled to fire at the same <del> * time, if one unenrolls the other one in its _onTimeout callback, the other <del> * one will *not* fire. <del> * <del> * This behavior is a private implementation detail and should not be <del> * considered public interface. <del> */ <add> <add>// The goal of this test is to make sure that, after the regression introduced <add>// by 934bfe23a16556d05bfb1844ef4d53e8c9887c3d, the fix preserves the following <add>// behavior of unref timers: if two timers are scheduled to fire at the same <add>// time, if one unenrolls the other one in its _onTimeout callback, the other <add>// one will *not* fire. <add> <add>// This behavior is a private implementation detail and should not be <add>// considered public interface. <add> <ide> require('../common'); <ide> const timers = require('timers'); <ide> const assert = require('assert'); <ide><path>test/parallel/test-timers-unref-remove-other-unref-timers.js <ide> 'use strict'; <ide> <del>/* <del> * This test is a regression test for joyent/node#8897. <del> * <del> * It tests some private implementation details that should not be <del> * considered public interface. <del> */ <add>// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/8897. <add> <add>// Test some private implementation details that should not be <add>// considered public interface. <ide> const common = require('../common'); <ide> const timers = require('timers'); <ide> <ide><path>test/parallel/test-timers-unrefd-interval-still-fires.js <ide> 'use strict'; <del>/* <del> * This test is a regression test for joyent/node#8900. <del> */ <add>// Regression test for https://github.com/nodejs/node-v0.x-archive/issues/8900. <ide> const common = require('../common'); <ide> <ide> const TEST_DURATION = common.platformTimeout(1000); <ide><path>test/parallel/test-tls-server-verify.js <ide> function runTest(port, testIndex) { <ide> rejectUnauthorized: tcase.rejectUnauthorized <ide> }; <ide> <del> /* <del> * If renegotiating - session might be resumed and openssl won't request <del> * client's certificate (probably because of bug in the openssl) <del> */ <add> // If renegotiating - session might be resumed and openssl won't request <add> // client's certificate (probably because of bug in the openssl) <ide> if (tcase.renegotiate) { <ide> serverOptions.secureOptions = <ide> SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION; <ide><path>test/parallel/test-tls-wrap-event-emmiter.js <ide> 'use strict'; <ide> <del>/* <del> * Issue: https://github.com/nodejs/node/issues/3655 <del> * Test checks if we get exception instead of runtime error <del> */ <add>// Issue: https://github.com/nodejs/node/issues/3655 <add>// Test checks if we get exception instead of runtime error <ide> <ide> const common = require('../common'); <ide> if (!common.hasCrypto) <ide><path>test/parallel/test-url-relative.js <ide> const url = require('url'); <ide> // When source is false <ide> assert.strictEqual(url.resolveObject('', 'foo'), 'foo'); <ide> <del>/* <del> [from, path, expected] <del>*/ <add>// [from, path, expected] <ide> const relativeTests = [ <ide> ['/foo/bar/baz', 'quux', '/foo/bar/quux'], <ide> ['/foo/bar/baz', 'quux/asdf', '/foo/bar/quux/asdf'], <ide><path>test/parallel/test-whatwg-url-constructor.js <ide> const request = { <ide> ) <ide> }; <ide> <del>/* The following tests are copied from WPT. Modifications to them should be <del> upstreamed first. Refs: <del> https://github.com/w3c/web-platform-tests/blob/8791bed/url/url-constructor.html <del> License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <del>*/ <add>// The following tests are copied from WPT. Modifications to them should be <add>// upstreamed first. <add>// Refs: https://github.com/w3c/web-platform-tests/blob/8791bed/url/url-constructor.html <add>// License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <add> <ide> /* eslint-disable */ <ide> function runURLConstructorTests() { <ide> // var setup = async_test("Loading data…") <ide><path>test/parallel/test-whatwg-url-origin.js <ide> const request = { <ide> ) <ide> }; <ide> <del>/* The following tests are copied from WPT. Modifications to them should be <del> upstreamed first. Refs: <del> https://github.com/w3c/web-platform-tests/blob/8791bed/url/url-origin.html <del> License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <del>*/ <add>// The following tests are copied from WPT. Modifications to them should be <add>// upstreamed first. <add>// Refs: https://github.com/w3c/web-platform-tests/blob/8791bed/url/url-origin.html <add>// License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <add> <ide> /* eslint-disable */ <ide> function runURLOriginTests() { <ide> // var setup = async_test("Loading data…") <ide><path>test/parallel/test-whatwg-url-setters.js <ide> const request = { <ide> )) <ide> }; <ide> <del>/* The following tests are copied from WPT. Modifications to them should be <del> upstreamed first. Refs: <del> https://github.com/w3c/web-platform-tests/blob/8791bed/url/url-setters.html <del> License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <del>*/ <add>// The following tests are copied from WPT. Modifications to them should be <add>// upstreamed first. <add>// Refs: https://github.com/w3c/web-platform-tests/blob/8791bed/url/url-setters.html <add>// License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <add> <ide> /* eslint-disable */ <ide> function startURLSettersTests() { <ide> // var setup = async_test("Loading data…") <ide><path>test/parallel/test-whatwg-url-toascii.js <ide> const request = { <ide> ) <ide> }; <ide> <del>/* The following tests are copied from WPT. Modifications to them should be <del> upstreamed first. Refs: <del> https://github.com/w3c/web-platform-tests/blob/4839a0a804/url/toascii.window.js <del> License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <del>*/ <add>// The following tests are copied from WPT. Modifications to them should be <add>// upstreamed first. <add>// Refs: https://github.com/w3c/web-platform-tests/blob/4839a0a804/url/toascii.window.js <add>// License: http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html <add> <ide> /* eslint-disable */ <ide> // async_test(t => { <ide> // const request = new XMLHttpRequest() <ide><path>test/parallel/test-worker-debug.js <ide> class WorkerSession extends EventEmitter { <ide> } <ide> <ide> async function testBasicWorkerDebug(session, post) { <del> /* <del> 1. Do 'enable' with waitForDebuggerOnStart = true <del> 2. Run worker. It should break on start. <del> 3. Enable Runtime (to get console message) and Debugger. Resume. <del> 4. Breaks on the 'debugger' statement. Resume. <del> 5. Console message received, worker runs to a completion. <del> 6. contextCreated/contextDestroyed had been properly dispatched <del> */ <add> // 1. Do 'enable' with waitForDebuggerOnStart = true <add> // 2. Run worker. It should break on start. <add> // 3. Enable Runtime (to get console message) and Debugger. Resume. <add> // 4. Breaks on the 'debugger' statement. Resume. <add> // 5. Console message received, worker runs to a completion. <add> // 6. contextCreated/contextDestroyed had been properly dispatched <ide> console.log('Test basic debug scenario'); <ide> await post('NodeWorker.enable', { waitForDebuggerOnStart: true }); <ide> const attached = waitForWorkerAttach(session); <ide><path>test/sequential/test-child-process-exit.js <ide> assert.ok(!child.stderr); <ide> <ide> console.error('gen=%d, pid=%d', gen, process.pid); <ide> <del>/* <del>var timer = setTimeout(function() { <del> throw new Error('timeout! gen='+gen); <del>}, 1000); <del>*/ <del> <ide> child.on('exit', function(code) { <ide> console.error('exit %d from gen %d', code, gen + 1); <ide> }); <ide><path>test/sequential/test-inspector-port-cluster.js <ide> const childProcess = require('child_process'); <ide> <ide> let offset = 0; <ide> <del>/* <del> * This test suite checks that inspector port in cluster is incremented <del> * for different execArgv combinations <del> */ <add>// This test suite checks that inspector port in cluster is incremented <add>// for different execArgv combinations <ide> <ide> function testRunnerMain() { <ide> let defaultPortCase = spawnMaster({ <ide><path>test/sequential/test-pipe.js <ide> let gotThanks = false; <ide> let tcpLengthSeen = 0; <ide> <ide> <del>/* <del> * 5MB of random buffer. <del> */ <add>// 5MB of random buffer. <ide> const buffer = Buffer.allocUnsafe(bufferSize); <ide> for (let i = 0; i < buffer.length; i++) { <ide> buffer[i] = parseInt(Math.random() * 10000) % 256;
58
Python
Python
use single backticks when link needed
42fcdd0cc381a96d8096af7acda007317d08851f
<ide><path>numpy/core/numeric.py <ide> def allclose(a, b, rtol=1.e-5, atol=1.e-8, equal_nan=False): <ide> The comparison of `a` and `b` uses standard broadcasting, which <ide> means that `a` and `b` need not have the same shape in order for <ide> ``allclose(a, b)`` to evaluate to True. The same is true for <del> ``equal`` but not ``array_equal``. <add> `equal` but not `array_equal`. <ide> <ide> Examples <ide> --------
1
Java
Java
provide common cache config
3c28301ded03b7163c8450cb3cc4787cb79a8595
<ide><path>spring-context/src/main/java/org/springframework/cache/annotation/AnnotationCacheOperationSource.java <ide> /* <del> * Copyright 2002-2012 the original author or authors. <add> * Copyright 2002-2014 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> package org.springframework.cache.annotation; <ide> <ide> import java.io.Serializable; <del>import java.lang.reflect.AnnotatedElement; <ide> import java.lang.reflect.Method; <ide> import java.util.ArrayList; <ide> import java.util.Collection; <ide> * <ide> * @author Costin Leau <ide> * @author Juergen Hoeller <add> * @author Stephane Nicoll <ide> * @since 3.1 <ide> */ <ide> @SuppressWarnings("serial") <ide> public AnnotationCacheOperationSource(Set<CacheAnnotationParser> annotationParse <ide> <ide> <ide> @Override <del> protected Collection<CacheOperation> findCacheOperations(Class<?> clazz) { <del> return determineCacheOperations(clazz); <add> protected Collection<CacheOperation> findCacheOperations(final Class<?> clazz) { <add> return determineCacheOperations(new CacheOperationProvider() { <add> @Override <add> public Collection<CacheOperation> getCacheOperations(CacheAnnotationParser parser) { <add> return parser.parseCacheAnnotations(clazz); <add> } <add> }); <add> <ide> } <ide> <ide> @Override <del> protected Collection<CacheOperation> findCacheOperations(Method method) { <del> return determineCacheOperations(method); <add> protected Collection<CacheOperation> findCacheOperations(final Method method) { <add> return determineCacheOperations(new CacheOperationProvider() { <add> @Override <add> public Collection<CacheOperation> getCacheOperations(CacheAnnotationParser parser) { <add> return parser.parseCacheAnnotations(method); <add> } <add> }); <ide> } <ide> <ide> /** <del> * Determine the cache operation(s) for the given method or class. <add> * Determine the cache operation(s) for the given {@link CacheOperationProvider}. <ide> * <p>This implementation delegates to configured <ide> * {@link CacheAnnotationParser}s for parsing known annotations into <ide> * Spring's metadata attribute class. <ide> * <p>Can be overridden to support custom annotations that carry <ide> * caching metadata. <del> * @param ae the annotated method or class <add> * @param provider the cache operation provider to use <ide> * @return the configured caching operations, or {@code null} if none found <ide> */ <del> protected Collection<CacheOperation> determineCacheOperations(AnnotatedElement ae) { <add> protected Collection<CacheOperation> determineCacheOperations(CacheOperationProvider provider) { <ide> Collection<CacheOperation> ops = null; <ide> for (CacheAnnotationParser annotationParser : this.annotationParsers) { <del> Collection<CacheOperation> annOps = annotationParser.parseCacheAnnotations(ae); <add> Collection<CacheOperation> annOps = provider.getCacheOperations(annotationParser); <ide> if (annOps != null) { <ide> if (ops == null) { <ide> ops = new ArrayList<CacheOperation>(); <ide> public int hashCode() { <ide> return this.annotationParsers.hashCode(); <ide> } <ide> <add> /** <add> * Callback interface providing {@link CacheOperation} instance(s) based on <add> * a given {@link CacheAnnotationParser}. <add> */ <add> protected interface CacheOperationProvider { <add> <add> /** <add> * Returns the {@link CacheOperation} instance(s) provided by the specified parser. <add> * <add> * @param parser the parser to use <add> * @return the cache operations or {@code null} if none is found <add> */ <add> Collection<CacheOperation> getCacheOperations(CacheAnnotationParser parser); <add> } <add> <ide> } <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/CacheAnnotationParser.java <ide> /* <del> * Copyright 2002-2012 the original author or authors. <add> * Copyright 2002-2014 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> package org.springframework.cache.annotation; <ide> <del>import java.lang.reflect.AnnotatedElement; <add>import java.lang.reflect.Method; <ide> import java.util.Collection; <ide> <ide> import org.springframework.cache.interceptor.CacheOperation; <ide> * {@link Cacheable}, {@link CachePut} or {@link CacheEvict}. <ide> * <ide> * @author Costin Leau <add> * @author Stephane Nicoll <ide> * @since 3.1 <ide> */ <ide> public interface CacheAnnotationParser { <ide> <ide> /** <del> * Parses the cache definition for the given method or class, <add> * Parses the cache definition for the given class, <ide> * based on a known annotation type. <ide> * <p>This essentially parses a known cache annotation into Spring's <del> * metadata attribute class. Returns {@code null} if the method/class <add> * metadata attribute class. Returns {@code null} if the class <ide> * is not cacheable. <del> * @param ae the annotated method or class <add> * @param type the annotated class <ide> * @return CacheOperation the configured caching operation, <ide> * or {@code null} if none was found <del> * @see AnnotationCacheOperationSource#determineCacheOperations(AnnotatedElement) <add> * @see AnnotationCacheOperationSource#findCacheOperations(Class) <ide> */ <del> Collection<CacheOperation> parseCacheAnnotations(AnnotatedElement ae); <add> Collection<CacheOperation> parseCacheAnnotations(Class<?> type); <add> <add> /** <add> * Parses the cache definition for the given method, <add> * based on a known annotation type. <add> * <p>This essentially parses a known cache annotation into Spring's <add> * metadata attribute class. Returns {@code null} if the method <add> * is not cacheable. <add> * @param method the annotated method <add> * @return CacheOperation the configured caching operation, <add> * or {@code null} if none was found <add> * @see AnnotationCacheOperationSource#findCacheOperations(Method) <add> */ <add> Collection<CacheOperation> parseCacheAnnotations(Method method); <ide> } <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/CacheConfig.java <add>/* <add> * Copyright 2002-2014 the original author or authors. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.springframework.cache.annotation; <add> <add>import java.lang.annotation.Documented; <add>import java.lang.annotation.ElementType; <add>import java.lang.annotation.Retention; <add>import java.lang.annotation.RetentionPolicy; <add>import java.lang.annotation.Target; <add> <add>/** <add> * Provide a way to share common cache-related settings at class-level. <add> * <p>When this annotation is present on a given class, it provides a set <add> * of default settings for any cache operation defined on that class. <add> * <add> * @author Stephane Nicoll <add> * @since 4.1 <add> */ <add>@Target({ElementType.TYPE}) <add>@Retention(RetentionPolicy.RUNTIME) <add>@Documented <add>public @interface CacheConfig { <add> <add> /** <add> * Name of the default caches to consider for a caching operation defined in the class. <add> * <p>If none is set at the operation level, these ones are used instead of the default. <add> * <p>May be used to determine the target cache (or caches), matching the <add> * qualifier value (or the bean name(s)) of (a) specific bean definition. <add> */ <add> String[] cacheNames() default {}; <add> <add> /** <add> * The bean name of the default {@link org.springframework.cache.interceptor.KeyGenerator} to <add> * use for the class. <add> * <p>If none is set at the operation level, this one is used instead of the default. <add> * <p>The key generator is mutually exclusive with the use of a custom key. When such key is <add> * defined for the operation, the value of this key generator is ignored. <add> */ <add> String keyGenerator() default ""; <add> <add> /** <add> * The bean name of the custom {@link org.springframework.cache.CacheManager} to use. <add> * <p>If none is set at the operation level, this one is used instead of the default. <add> */ <add> String cacheManager() default ""; <add>} <add> <add> <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/CacheEvict.java <ide> * @author Costin Leau <ide> * @author Stephane Nicoll <ide> * @since 3.1 <add> * @see CacheConfig <ide> */ <ide> @Target({ElementType.METHOD, ElementType.TYPE}) <ide> @Retention(RetentionPolicy.RUNTIME) <ide> * <p>May be used to determine the target cache (or caches), matching the qualifier <ide> * value (or the bean name(s)) of (a) specific bean definition. <ide> */ <del> String[] value(); <add> String[] value() default {}; <ide> <ide> /** <ide> * Spring Expression Language (SpEL) attribute for computing the key dynamically. <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/CachePut.java <ide> * @author Phillip Webb <ide> * @author Stephane Nicoll <ide> * @since 3.1 <add> * @see CacheConfig <ide> */ <ide> @Target({ ElementType.METHOD, ElementType.TYPE }) <ide> @Retention(RetentionPolicy.RUNTIME) <ide> * <p>May be used to determine the target cache (or caches), matching the <ide> * qualifier value (or the bean name(s)) of (a) specific bean definition. <ide> */ <del> String[] value(); <add> String[] value() default {}; <ide> <ide> /** <ide> * Spring Expression Language (SpEL) attribute for computing the key dynamically. <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/Cacheable.java <ide> * @author Phillip Webb <ide> * @author Stephane Nicoll <ide> * @since 3.1 <add> * @see CacheConfig <ide> */ <ide> @Target({ElementType.METHOD, ElementType.TYPE}) <ide> @Retention(RetentionPolicy.RUNTIME) <ide> * <p>May be used to determine the target cache (or caches), matching the <ide> * qualifier value (or the bean name(s)) of (a) specific bean definition. <ide> */ <del> String[] value(); <add> String[] value() default {}; <ide> <ide> /** <ide> * Spring Expression Language (SpEL) attribute for computing the key dynamically. <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/SpringCacheAnnotationParser.java <ide> import java.io.Serializable; <ide> import java.lang.annotation.Annotation; <ide> import java.lang.reflect.AnnotatedElement; <add>import java.lang.reflect.Method; <ide> import java.util.ArrayList; <ide> import java.util.Collection; <ide> <ide> import org.springframework.cache.interceptor.CacheEvictOperation; <ide> import org.springframework.cache.interceptor.CacheOperation; <ide> import org.springframework.cache.interceptor.CachePutOperation; <ide> import org.springframework.cache.interceptor.CacheableOperation; <add>import org.springframework.core.annotation.AnnotationUtils; <ide> import org.springframework.util.ObjectUtils; <ide> import org.springframework.util.StringUtils; <ide> <ide> public class SpringCacheAnnotationParser implements CacheAnnotationParser, Serializable { <ide> <ide> @Override <del> public Collection<CacheOperation> parseCacheAnnotations(AnnotatedElement ae) { <add> public Collection<CacheOperation> parseCacheAnnotations(Class<?> type) { <add> DefaultCacheConfig defaultConfig = getDefaultCacheConfig(type); <add> return parseCacheAnnotations(defaultConfig, type); <add> } <add> <add> @Override <add> public Collection<CacheOperation> parseCacheAnnotations(Method method) { <add> DefaultCacheConfig defaultConfig = getDefaultCacheConfig(method.getDeclaringClass()); <add> return parseCacheAnnotations(defaultConfig, method); <add> } <add> <add> protected Collection<CacheOperation> parseCacheAnnotations(DefaultCacheConfig cachingConfig, <add> AnnotatedElement ae) { <ide> Collection<CacheOperation> ops = null; <ide> <ide> Collection<Cacheable> cacheables = getAnnotations(ae, Cacheable.class); <ide> if (cacheables != null) { <ide> ops = lazyInit(ops); <ide> for (Cacheable cacheable : cacheables) { <del> ops.add(parseCacheableAnnotation(ae, cacheable)); <add> ops.add(parseCacheableAnnotation(ae, cachingConfig, cacheable)); <ide> } <ide> } <ide> Collection<CacheEvict> evicts = getAnnotations(ae, CacheEvict.class); <ide> if (evicts != null) { <ide> ops = lazyInit(ops); <ide> for (CacheEvict e : evicts) { <del> ops.add(parseEvictAnnotation(ae, e)); <add> ops.add(parseEvictAnnotation(ae, cachingConfig, e)); <ide> } <ide> } <ide> Collection<CachePut> updates = getAnnotations(ae, CachePut.class); <ide> if (updates != null) { <ide> ops = lazyInit(ops); <ide> for (CachePut p : updates) { <del> ops.add(parseUpdateAnnotation(ae, p)); <add> ops.add(parseUpdateAnnotation(ae, cachingConfig, p)); <ide> } <ide> } <ide> Collection<Caching> caching = getAnnotations(ae, Caching.class); <ide> if (caching != null) { <ide> ops = lazyInit(ops); <ide> for (Caching c : caching) { <del> ops.addAll(parseCachingAnnotation(ae, c)); <add> ops.addAll(parseCachingAnnotation(ae, cachingConfig, c)); <ide> } <ide> } <ide> return ops; <ide> private <T extends Annotation> Collection<CacheOperation> lazyInit(Collection<Ca <ide> return (ops != null ? ops : new ArrayList<CacheOperation>(1)); <ide> } <ide> <del> CacheableOperation parseCacheableAnnotation(AnnotatedElement ae, Cacheable caching) { <add> CacheableOperation parseCacheableAnnotation(AnnotatedElement ae, <add> DefaultCacheConfig defaultConfig, Cacheable caching) { <ide> CacheableOperation cuo = new CacheableOperation(); <ide> cuo.setCacheNames(caching.value()); <ide> cuo.setCondition(caching.condition()); <ide> CacheableOperation parseCacheableAnnotation(AnnotatedElement ae, Cacheable cachi <ide> cuo.setCacheManager(caching.cacheManager()); <ide> cuo.setName(ae.toString()); <ide> <del> checkKeySourceConsistency(ae, caching.key(), caching.keyGenerator()); <add> defaultConfig.applyDefault(cuo); <add> <add> validateCacheOperation(ae, cuo); <ide> return cuo; <ide> } <ide> <del> CacheEvictOperation parseEvictAnnotation(AnnotatedElement ae, CacheEvict caching) { <add> CacheEvictOperation parseEvictAnnotation(AnnotatedElement ae, <add> DefaultCacheConfig defaultConfig, CacheEvict caching) { <ide> CacheEvictOperation ceo = new CacheEvictOperation(); <ide> ceo.setCacheNames(caching.value()); <ide> ceo.setCondition(caching.condition()); <ide> CacheEvictOperation parseEvictAnnotation(AnnotatedElement ae, CacheEvict caching <ide> ceo.setBeforeInvocation(caching.beforeInvocation()); <ide> ceo.setName(ae.toString()); <ide> <del> checkKeySourceConsistency(ae, caching.key(), caching.keyGenerator()); <add> defaultConfig.applyDefault(ceo); <add> <add> validateCacheOperation(ae, ceo); <ide> return ceo; <ide> } <ide> <del> CacheOperation parseUpdateAnnotation(AnnotatedElement ae, CachePut caching) { <add> CacheOperation parseUpdateAnnotation(AnnotatedElement ae, <add> DefaultCacheConfig defaultConfig, CachePut caching) { <ide> CachePutOperation cuo = new CachePutOperation(); <ide> cuo.setCacheNames(caching.value()); <ide> cuo.setCondition(caching.condition()); <ide> CacheOperation parseUpdateAnnotation(AnnotatedElement ae, CachePut caching) { <ide> cuo.setCacheManager(caching.cacheManager()); <ide> cuo.setName(ae.toString()); <ide> <del> checkKeySourceConsistency(ae, caching.key(), caching.keyGenerator()); <add> defaultConfig.applyDefault(cuo); <add> <add> validateCacheOperation(ae, cuo); <ide> return cuo; <ide> } <ide> <del> Collection<CacheOperation> parseCachingAnnotation(AnnotatedElement ae, Caching caching) { <add> Collection<CacheOperation> parseCachingAnnotation(AnnotatedElement ae, <add> DefaultCacheConfig defaultConfig, Caching caching) { <ide> Collection<CacheOperation> ops = null; <ide> <ide> Cacheable[] cacheables = caching.cacheable(); <ide> if (!ObjectUtils.isEmpty(cacheables)) { <ide> ops = lazyInit(ops); <ide> for (Cacheable cacheable : cacheables) { <del> ops.add(parseCacheableAnnotation(ae, cacheable)); <add> ops.add(parseCacheableAnnotation(ae, defaultConfig, cacheable)); <ide> } <ide> } <ide> CacheEvict[] evicts = caching.evict(); <ide> if (!ObjectUtils.isEmpty(evicts)) { <ide> ops = lazyInit(ops); <ide> for (CacheEvict evict : evicts) { <del> ops.add(parseEvictAnnotation(ae, evict)); <add> ops.add(parseEvictAnnotation(ae, defaultConfig, evict)); <ide> } <ide> } <ide> CachePut[] updates = caching.put(); <ide> if (!ObjectUtils.isEmpty(updates)) { <ide> ops = lazyInit(ops); <ide> for (CachePut update : updates) { <del> ops.add(parseUpdateAnnotation(ae, update)); <add> ops.add(parseUpdateAnnotation(ae, defaultConfig, update)); <ide> } <ide> } <ide> <ide> return ops; <ide> } <ide> <add> /** <add> * Provides the {@link DefaultCacheConfig} instance for the specified {@link Class}. <add> * <add> * @param target the class-level to handle <add> * @return the default config (never {@code null}) <add> */ <add> DefaultCacheConfig getDefaultCacheConfig(Class<?> target) { <add> final CacheConfig annotation = AnnotationUtils.getAnnotation(target, CacheConfig.class); <add> if (annotation != null) { <add> return new DefaultCacheConfig(annotation.cacheManager(), <add> annotation.keyGenerator(), annotation.cacheNames()); <add> } <add> return new DefaultCacheConfig(); <add> } <add> <ide> private <T extends Annotation> Collection<T> getAnnotations(AnnotatedElement ae, Class<T> annotationType) { <ide> Collection<T> anns = new ArrayList<T>(2); <ide> <ide> private <T extends Annotation> Collection<T> getAnnotations(AnnotatedElement ae, <ide> return (anns.isEmpty() ? null : anns); <ide> } <ide> <del> private void checkKeySourceConsistency(AnnotatedElement ae, String key, String keyGenerator) { <del> if (StringUtils.hasText(key) && StringUtils.hasText(keyGenerator)) { <add> /** <add> * Validates the specified {@link CacheOperation}. <add> * <p>Throws an {@link IllegalStateException} if the state of the operation is <add> * invalid. As there might be multiple sources for default values, this ensure <add> * that the operation is in a proper state before being returned. <add> * <add> * @param ae the annotated element of the cache operation <add> * @param operation the {@link CacheOperation} to validate <add> */ <add> private void validateCacheOperation(AnnotatedElement ae, CacheOperation operation) { <add> if (StringUtils.hasText(operation.getKey()) && StringUtils.hasText(operation.getKeyGenerator())) { <ide> throw new IllegalStateException("Invalid cache annotation configuration on '" <ide> + ae.toString() + "'. Both 'key' and 'keyGenerator' attributes have been set. " + <ide> "These attributes are mutually exclusive: either set the SpEL expression used to" + <ide> "compute the key at runtime or set the name of the KeyGenerator bean to use."); <ide> } <add> if (operation.getCacheNames().isEmpty()) { <add> throw new IllegalStateException("No cache names could be detected on '" <add> + ae.toString()+ "'. Make sure to set the value parameter on the annotation or" + <add> "declare a @CacheConfig at the class-level with the default cache name(s) to use."); <add> } <ide> } <ide> <ide> @Override <ide> public int hashCode() { <ide> return SpringCacheAnnotationParser.class.hashCode(); <ide> } <ide> <add> /** <add> * Provides default settings for a given set of cache operations. <add> */ <add> static class DefaultCacheConfig { <add> private final String cacheManager; <add> private final String keyGenerator; <add> private final String[] cacheNames; <add> <add> private DefaultCacheConfig(String cacheManager, String keyGenerator, String[] cacheNames) { <add> this.cacheManager = cacheManager; <add> this.keyGenerator = keyGenerator; <add> this.cacheNames = cacheNames; <add> } <add> <add> public DefaultCacheConfig() { <add> this(null, null, null); <add> } <add> <add> /** <add> * Apply the defaults to the specified {@link CacheOperation}. <add> * <add> * @param operation the operation to update <add> */ <add> public void applyDefault(CacheOperation operation) { <add> if (!StringUtils.hasText(operation.getCacheManager()) && StringUtils.hasText(cacheManager)) { <add> operation.setCacheManager(cacheManager); <add> } <add> if (!StringUtils.hasText(operation.getKey()) && !StringUtils.hasText(operation.getKeyGenerator()) <add> && StringUtils.hasText(keyGenerator)) { <add> operation.setKeyGenerator(keyGenerator); <add> } <add> if (operation.getCacheNames().isEmpty() && cacheNames != null) { <add> operation.setCacheNames(cacheNames); <add> } <add> } <add> } <add> <ide> } <ide><path>spring-context/src/main/java/org/springframework/cache/interceptor/CacheOperation.java <ide> public void setCacheName(String cacheName) { <ide> } <ide> <ide> public void setCacheNames(String[] cacheNames) { <del> Assert.notEmpty(cacheNames); <ide> this.cacheNames = new LinkedHashSet<String>(cacheNames.length); <del> for (String string : cacheNames) { <del> this.cacheNames.add(string); <add> for (String cacheName : cacheNames) { <add> Assert.hasText(cacheName, "Cache name must be set if specified."); <add> this.cacheNames.add(cacheName); <ide> } <ide> } <ide> <ide><path>spring-context/src/test/java/org/springframework/cache/annotation/AnnotationCacheOperationSourceTests.java <ide> import java.util.Collection; <ide> import java.util.Iterator; <ide> <add>import org.junit.Rule; <ide> import org.junit.Test; <add>import org.junit.rules.ExpectedException; <ide> import org.springframework.cache.interceptor.CacheEvictOperation; <ide> import org.springframework.cache.interceptor.CacheOperation; <ide> import org.springframework.cache.interceptor.CacheableOperation; <ide> */ <ide> public class AnnotationCacheOperationSourceTests { <ide> <add> @Rule <add> public final ExpectedException thrown = ExpectedException.none(); <add> <ide> private AnnotationCacheOperationSource source = new AnnotationCacheOperationSource(); <ide> <del> private Collection<CacheOperation> getOps(String name) { <del> Method method = ReflectionUtils.findMethod(AnnotatedClass.class, name); <del> return source.getCacheOperations(method, AnnotatedClass.class); <add> private Collection<CacheOperation> getOps(Class<?> target, String name, <add> int expectedNumberOfOperations) { <add> Collection<CacheOperation> result = getOps(target, name); <add> assertEquals("Wrong number of operation(s) for '"+name+"'", <add> expectedNumberOfOperations, result.size()); <add> return result; <add> } <add> <add> private Collection<CacheOperation> getOps(Class<?> target, String name) { <add> Method method = ReflectionUtils.findMethod(target, name); <add> return source.getCacheOperations(method, target); <ide> } <ide> <ide> @Test <ide> public void testSingularAnnotation() throws Exception { <del> Collection<CacheOperation> ops = getOps("singular"); <del> assertEquals(1, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "singular", 1); <ide> assertTrue(ops.iterator().next() instanceof CacheableOperation); <ide> } <ide> <ide> @Test <ide> public void testMultipleAnnotation() throws Exception { <del> Collection<CacheOperation> ops = getOps("multiple"); <del> assertEquals(2, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "multiple", 2); <ide> Iterator<CacheOperation> it = ops.iterator(); <ide> assertTrue(it.next() instanceof CacheableOperation); <ide> assertTrue(it.next() instanceof CacheEvictOperation); <ide> } <ide> <ide> @Test <ide> public void testCaching() throws Exception { <del> Collection<CacheOperation> ops = getOps("caching"); <del> assertEquals(2, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "caching", 2); <ide> Iterator<CacheOperation> it = ops.iterator(); <ide> assertTrue(it.next() instanceof CacheableOperation); <ide> assertTrue(it.next() instanceof CacheEvictOperation); <ide> } <ide> <ide> @Test <ide> public void testSingularStereotype() throws Exception { <del> Collection<CacheOperation> ops = getOps("singleStereotype"); <del> assertEquals(1, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "singleStereotype", 1); <ide> assertTrue(ops.iterator().next() instanceof CacheEvictOperation); <ide> } <ide> <ide> @Test <ide> public void testMultipleStereotypes() throws Exception { <del> Collection<CacheOperation> ops = getOps("multipleStereotype"); <del> assertEquals(3, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "multipleStereotype", 3); <ide> Iterator<CacheOperation> it = ops.iterator(); <ide> assertTrue(it.next() instanceof CacheableOperation); <ide> CacheOperation next = it.next(); <ide> public void testMultipleStereotypes() throws Exception { <ide> <ide> @Test <ide> public void testCustomKeyGenerator() { <del> Collection<CacheOperation> ops = getOps("customKeyGenerator"); <del> assertEquals(1, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customKeyGenerator", 1); <ide> CacheOperation cacheOperation = ops.iterator().next(); <ide> assertEquals("Custom key generator not set", "custom", cacheOperation.getKeyGenerator()); <ide> } <ide> <ide> @Test <ide> public void testCustomKeyGeneratorInherited() { <del> Collection<CacheOperation> ops = getOps("customKeyGeneratorInherited"); <del> assertEquals(1, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customKeyGeneratorInherited", 1); <ide> CacheOperation cacheOperation = ops.iterator().next(); <ide> assertEquals("Custom key generator not set", "custom", cacheOperation.getKeyGenerator()); <ide> } <ide> <ide> @Test <ide> public void testKeyAndKeyGeneratorCannotBeSetTogether() { <ide> try { <del> getOps("invalidKeyAndKeyGeneratorSet"); <add> getOps(AnnotatedClass.class, "invalidKeyAndKeyGeneratorSet"); <ide> fail("Should have failed to parse @Cacheable annotation"); <ide> } catch (IllegalStateException e) { <ide> // expected <ide> public void testKeyAndKeyGeneratorCannotBeSetTogether() { <ide> <ide> @Test <ide> public void testCustomCacheManager() { <del> Collection<CacheOperation> ops = getOps("customCacheManager"); <del> assertEquals(1, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customCacheManager", 1); <ide> CacheOperation cacheOperation = ops.iterator().next(); <ide> assertEquals("Custom cache manager not set", "custom", cacheOperation.getCacheManager()); <ide> } <ide> <ide> @Test <ide> public void testCustomCacheManagerInherited() { <del> Collection<CacheOperation> ops = getOps("customCacheManagerInherited"); <del> assertEquals(1, ops.size()); <add> Collection<CacheOperation> ops = getOps(AnnotatedClass.class, "customCacheManagerInherited", 1); <ide> CacheOperation cacheOperation = ops.iterator().next(); <ide> assertEquals("Custom cache manager not set", "custom", cacheOperation.getCacheManager()); <ide> } <ide> <add> @Test <add> public void fullClassLevelWithCustomKeyManager() { <add> Collection<CacheOperation> ops = getOps(AnnotatedClassWithFullDefault.class, "methodLevelKeyGenerator", 1); <add> CacheOperation cacheOperation = ops.iterator().next(); <add> assertSharedConfig(cacheOperation, "classCacheManager", "custom", "classCacheName"); <add> } <add> <add> @Test <add> public void fullClassLevelWithCustomCacheManager() { <add> Collection<CacheOperation> ops = getOps(AnnotatedClassWithFullDefault.class, "methodLevelCacheManager", 1); <add> CacheOperation cacheOperation = ops.iterator().next(); <add> assertSharedConfig(cacheOperation, "custom", "classKeyGenerator", "classCacheName"); <add> } <add> <add> @Test <add> public void fullClassLevelWithCustomCacheName() { <add> Collection<CacheOperation> ops = getOps(AnnotatedClassWithFullDefault.class, "methodLevelCacheName", 1); <add> CacheOperation cacheOperation = ops.iterator().next(); <add> assertSharedConfig(cacheOperation, "classCacheManager", "classKeyGenerator", "custom"); <add> } <add> <add> @Test <add> public void validateAtLeastOneCacheNameMustBeSet() { <add> thrown.expect(IllegalStateException.class); <add> getOps(AnnotatedClass.class, "noCacheNameSpecified"); <add> } <add> <add> @Test <add> public void customClassLevelWithCustomCacheName() { <add> Collection<CacheOperation> ops = getOps(AnnotatedClassWithCustomDefault.class, "methodLevelCacheName", 1); <add> CacheOperation cacheOperation = ops.iterator().next(); <add> assertSharedConfig(cacheOperation, "classCacheManager", "classKeyGenerator", "custom"); <add> } <add> <add> @Test <add> public void severalCacheConfigUseClosest() { <add> Collection<CacheOperation> ops = getOps(MultipleCacheConfig.class, "multipleCacheConfig"); <add> CacheOperation cacheOperation = ops.iterator().next(); <add> assertSharedConfig(cacheOperation, "", "", "myCache"); <add> } <add> <add> private void assertSharedConfig(CacheOperation actual, String cacheManager, <add> String keyGenerator, String... cacheNames) { <add> assertEquals("Wrong cache manager", cacheManager, actual.getCacheManager()); <add> assertEquals("Wrong key manager", keyGenerator, actual.getKeyGenerator()); <add> for (String cacheName : cacheNames) { <add> assertTrue("Cache '"+cacheName+"' not found (got "+actual.getCacheNames(), <add> actual.getCacheNames().contains(cacheName)); <add> } <add> assertEquals("Wrong number of cache name(s)", cacheNames.length, actual.getCacheNames().size()); <add> } <add> <ide> private static class AnnotatedClass { <ide> @Cacheable("test") <ide> public void singular() { <ide> public void invalidKeyAndKeyGeneratorSet() { <ide> @CacheableFooCustomCacheManager <ide> public void customCacheManagerInherited() { <ide> } <add> <add> @Cacheable // cache name can be inherited from CacheConfig. There's none here <add> public void noCacheNameSpecified() { <add> } <add> } <add> <add> @CacheConfig(cacheNames = "classCacheName", <add> cacheManager = "classCacheManager", keyGenerator = "classKeyGenerator") <add> private static class AnnotatedClassWithFullDefault { <add> <add> @Cacheable(keyGenerator = "custom") <add> public void methodLevelKeyGenerator() { <add> } <add> <add> @Cacheable(cacheManager = "custom") <add> public void methodLevelCacheManager() { <add> } <add> <add> @Cacheable("custom") <add> public void methodLevelCacheName() { <add> } <add> } <add> <add> @CacheConfigFoo <add> private static class AnnotatedClassWithCustomDefault { <add> <add> @Cacheable("custom") <add> public void methodLevelCacheName() { <add> } <add> } <add> <add> @CacheConfigFoo <add> @CacheConfig(cacheNames = "myCache") // multiple sources <add> private static class MultipleCacheConfig { <add> <add> @Cacheable <add> public void multipleCacheConfig() { <add> } <ide> } <ide> <ide> @Retention(RetentionPolicy.RUNTIME) <ide> public void customCacheManagerInherited() { <ide> @CacheEvict(value = "bar") <ide> public @interface EvictBar { <ide> } <add> <add> @Retention(RetentionPolicy.RUNTIME) <add> @Target(ElementType.TYPE) <add> @CacheConfig(cacheManager = "classCacheManager", keyGenerator = "classKeyGenerator") <add> public @interface CacheConfigFoo { <add> } <ide> } <ide>\ No newline at end of file
9
PHP
PHP
add coding standards ignore comments to fileengine
0891073d191f179a024c6c1f372082d489c96938
<ide><path>lib/Cake/Cache/Engine/FileEngine.php <ide> public function delete($key) { <ide> } <ide> $path = $this->_File->getRealPath(); <ide> $this->_File = null; <add> <add> //@codingStandardsIgnoreStart <ide> return @unlink($path); <add> //@codingStandardsIgnoreEnd <ide> } <ide> <ide> /** <ide> protected function _clearDirectory($path, $now, $threshold) { <ide> } <ide> } <ide> if ($file->isFile()) { <del> $_path = $file->getRealPath(); <add> $filePath = $file->getRealPath(); <ide> $file = null; <del> @unlink($_path); <add> <add> //@codingStandardsIgnoreStart <add> @unlink($filePath); <add> //@codingStandardsIgnoreEnd <ide> } <ide> } <ide> } <ide> public function clearGroup($group) { <ide> $containsGroup = strpos($object->getPathName(), DS . $group . DS) !== false; <ide> $hasPrefix = strpos($object->getBaseName(), $this->settings['prefix']) === 0; <ide> if ($object->isFile() && $containsGroup && $hasPrefix) { <del> @unlink($object->getPathName()); <add> $path = $object->getPathName(); <add> $object = null; <add> //@codingStandardsIgnoreStart <add> @unlink($path); <add> //@codingStandardsIgnoreEnd <ide> } <ide> } <ide> return true;
1
Text
Text
change line21 '燒毀vs燒毀圖表' to '燃尽vs燃尽图表'
6e9eccc792a3e7720cee214685ec8ece48e97d00
<ide><path>guide/chinese/agile/burndown-charts-and-burnup-charts/index.md <ide> Burnup图表非常相似,但它们显示已完成的工作与总工作量和 <ide> <ide> #### 更多信息: <ide> <del>[Burndown图表 - 维基百科](https://en.wikipedia.org/wiki/Burn_down_chart) [烧毁vs烧毁图表 - LinkedIn](https://www.linkedin.com/pulse/burn-up-vs-down-chart-alaa-el-beheri-cisa-rmp-pmp-bcp-itil/) <ide>\ No newline at end of file <add>[Burndown图表 - 维基百科](https://en.wikipedia.org/wiki/Burn_down_chart) [燃尽vs燃尽图表 - LinkedIn](https://www.linkedin.com/pulse/burn-up-vs-down-chart-alaa-el-beheri-cisa-rmp-pmp-bcp-itil/)
1
Javascript
Javascript
handle line width of zero in svg
ddabeb06452494b60fc366016bbdaa3df6eb655c
<ide><path>src/display/svg.js <ide> SVGGraphics = (function SVGGraphicsClosure() { <ide> <ide> // Path properties <ide> setLineWidth: function SVGGraphics_setLineWidth(width) { <del> this.current.lineWidth = width; <add> if (width > 0) { <add> this.current.lineWidth = width; <add> } <ide> }, <ide> setLineCap: function SVGGraphics_setLineCap(style) { <ide> this.current.lineCap = LINE_CAP_STYLES[style];
1
Mixed
Ruby
invalidate transaction as early as possible
bf24af73ff146e884339a215ebe30d3fc5a88091
<ide><path>activerecord/CHANGELOG.md <add>* Invalidate transaction as early as possible <add> <add> After rescuing a `TransactionRollbackError` exception Rails invalidates transactions earlier in the flow <add> allowing the framework to skip issuing the `ROLLBACK` statement in more cases. <add> Only affects adapters that have `savepoint_errors_invalidate_transactions?` configured as `true`, <add> which at this point is only applicable to the `mysql2` adapter. <add> <add> *Nikita Vasilevsky* <add> <ide> * Allow configuring columns list to be used in SQL queries issued by an `ActiveRecord::Base` object <ide> <ide> It is now possible to configure columns list that will be used to build an SQL query clauses when <ide><path>activerecord/lib/active_record/connection_adapters/abstract/transaction.rb <ide> def within_new_transaction(isolation: nil, joinable: true) <ide> ret <ide> rescue Exception => error <ide> if transaction <del> if error.is_a?(ActiveRecord::TransactionRollbackError) && <del> @connection.savepoint_errors_invalidate_transactions? <del> transaction.state.invalidate! <del> end <ide> rollback_transaction <ide> after_failure_actions(transaction, error) <ide> end <ide><path>activerecord/lib/active_record/connection_adapters/abstract_adapter.rb <ide> def with_raw_connection(allow_retry: false, uses_transaction: true) <ide> result <ide> rescue => original_exception <ide> translated_exception = translate_exception_class(original_exception, nil, nil) <add> invalidate_transaction(translated_exception) <ide> retry_deadline_exceeded = deadline && deadline < Process.clock_gettime(Process::CLOCK_MONOTONIC) <ide> <ide> if !retry_deadline_exceeded && retries_available > 0 <ide> def retryable_connection_error?(exception) <ide> exception.is_a?(ConnectionNotEstablished) || exception.is_a?(ConnectionFailed) <ide> end <ide> <add> def invalidate_transaction(exception) <add> return unless exception.is_a?(TransactionRollbackError) <add> return unless savepoint_errors_invalidate_transactions? <add> <add> current_transaction.state.invalidate! if current_transaction <add> end <add> <ide> def retryable_query_error?(exception) <ide> # We definitely can't retry if we were inside a transaction that was instantly <ide> # rolled back by this error <ide><path>activerecord/test/cases/adapter_test.rb <ide> def test_advisory_locks_enabled? <ide> end <ide> end <ide> end <add> <add>if ActiveRecord::Base.connection.savepoint_errors_invalidate_transactions? <add> class InvalidateTransactionTest < ActiveRecord::TestCase <add> def test_invalidates_transaction_on_rollback_error <add> @invalidated = false <add> connection = ActiveRecord::Base.connection <add> <add> connection.transaction do <add> connection.send(:with_raw_connection) do <add> raise ActiveRecord::Deadlocked, "made-up deadlock" <add> end <add> <add> rescue ActiveRecord::Deadlocked => error <add> flunk("Rescuing wrong error") unless error.message == "made-up deadlock" <add> <add> @invalidated = connection.current_transaction.state.invalidated? <add> end <add> <add> # asserting outside of the transaction to make sure we actually reach the end of the test <add> # and perform the assertion <add> assert @invalidated <add> end <add> end <add>end <ide><path>activerecord/test/cases/adapters/mysql2/nested_deadlock_test.rb <ide> class Sample < ActiveRecord::Base <ide> assert_predicate connection, :active? <ide> end <ide> <add> test "rollback exception is swallowed after a rollback" do <add> barrier = Concurrent::CyclicBarrier.new(2) <add> deadlocks = 0 <add> <add> s1 = Sample.create value: 1 <add> s2 = Sample.create value: 2 <add> <add> thread = Thread.new do <add> Sample.transaction(requires_new: false) do <add> make_parent_transaction_dirty <add> Sample.transaction(requires_new: true) do <add> assert_current_transaction_is_savepoint_transaction <add> s1.lock! <add> barrier.wait <add> s2.update value: 4 <add> <add> rescue ActiveRecord::Deadlocked <add> deadlocks += 1 <add> <add> # This rollback is actually wrong as mysql automatically rollbacks the transaction <add> # which means we have nothing to rollback on the db side <add> # but we expect the framework to handle our mistake gracefully <add> raise ActiveRecord::Rollback <add> end <add> <add> s2.update value: 10 <add> end <add> end <add> <add> begin <add> Sample.transaction(requires_new: false) do <add> make_parent_transaction_dirty <add> Sample.transaction(requires_new: true) do <add> assert_current_transaction_is_savepoint_transaction <add> s2.lock! <add> barrier.wait <add> s1.update value: 3 <add> rescue ActiveRecord::Deadlocked <add> deadlocks += 1 <add> raise ActiveRecord::Rollback <add> end <add> s1.update value: 10 <add> end <add> ensure <add> thread.join <add> end <add> <add> assert_equal 1, deadlocks, "deadlock is required for the test setup" <add> assert_equal [10, 10], Sample.pluck(:value) <add> end <add> <ide> test "deadlock inside nested SavepointTransaction is recoverable" do <ide> barrier = Concurrent::CyclicBarrier.new(2) <ide> deadlocks = 0
5
Python
Python
move setuptools import to the top. thanks iksaif
c79e95d789658c68ed7b4a2f55000b28af8b7669
<ide><path>setup.py <ide> #!/usr/bin/env python <ide> # -*- coding: utf-8 -*- <add> <add>try: <add> from setuptools import setup, find_packages <add> from setuptools.command.test import test <add>except ImportError: <add> raise <add> from ez_setup import use_setuptools <add> use_setuptools() <add> from setuptools import setup, find_packages # noqa <add> from setuptools.command.test import test # noqa <add> <ide> import os <ide> import sys <ide> import codecs <ide> pass <ide> <ide> <del>try: <del> from setuptools import setup, find_packages <del> from setuptools.command.test import test <del>except ImportError: <del> raise <del> from ez_setup import use_setuptools <del> use_setuptools() <del> from setuptools import setup, find_packages # noqa <del> from setuptools.command.test import test # noqa <del> <ide> NAME = 'celery' <ide> entrypoints = {} <ide> extra = {}
1
Ruby
Ruby
use file/line from call to helper_module
4ab00cfac0c5bda7b66c5f80c63a8e858d4eac02
<ide><path>actionpack/lib/abstract_controller/helpers.rb <ide> def helper_method(*meths) <ide> meths.flatten! <ide> self._helper_methods += meths <ide> <add> location = caller_locations(1, 1).first <add> file, line = location.path, location.lineno <add> <ide> meths.each do |meth| <del> _helpers.class_eval <<-ruby_eval, __FILE__, __LINE__ + 1 <del> def #{meth}(*args, &blk) # def current_user(*args, &blk) <del> controller.send(%(#{meth}), *args, &blk) # controller.send(:current_user, *args, &blk) <del> end # end <del> ruby_eval <add> method_def = [ <add> "def #{meth}(*args, &blk)", <add> " controller.send(%(#{meth}), *args, &blk)", <add> "end" <add> ].join(";") <add> <add> _helpers.class_eval method_def, file, line <ide> end <ide> end <ide>
1
Javascript
Javascript
remove the deprecated transform proptypes
731d4a061085558a302c3e76304140973e7b57b4
<ide><path>website/server/extractDocs.js <ide> function renderComponent(filepath) { <ide> docgenHelpers.findExportedOrFirst, <ide> docgen.defaultHandlers.concat(docgenHelpers.stylePropTypeHandler) <ide> ); <add> <ide> return componentsToMarkdown('component', json, filepath, n++, styleDocs); <ide> } <ide> <ide> function renderStyle(filepath) { <ide> docgenHelpers.findExportedObject, <ide> [docgen.handlers.propTypeHandler] <ide> ); <add> <add> // Remove deprecated style props <add> if (filepath === "../Libraries/StyleSheet/TransformPropTypes.js") { <add> ['rotation', 'scaleX', 'scaleY', 'translateX', 'translateY'].forEach(function(key) { <add> delete json['props'][key]; <add> }); <add> } <add> <add> <add> // console.log(json); <add> <ide> return componentsToMarkdown('style', json, filepath, n++); <ide> } <ide> <ide> var styleDocs = styles.slice(2).reduce(function(docs, filepath) { <ide> [docgen.handlers.propTypeHandler] <ide> ); <ide> <del> // Remove deprecated style props <del> if (docs['TransformPropTypes']) { <del> ['rotation', 'scaleX', 'scaleY', 'translateX', 'translateY'].forEach(function(key) { <del> delete docs['TransformPropTypes']['props'][key]; <del> }); <del> } <del> <ide> return docs; <ide> }, {}); <ide>
1
Ruby
Ruby
add links to changelog and blog
9fd2319afa9ad3c40b365bff7e5668f640aed82b
<ide><path>Library/Homebrew/cmd/update-report.rb <ide> def update_report <ide> install_core_tap_if_necessary <ide> <ide> updated = false <add> new_repository_version = nil <ide> <ide> initial_revision = ENV["HOMEBREW_UPDATE_BEFORE"].to_s <ide> current_revision = ENV["HOMEBREW_UPDATE_AFTER"].to_s <ide> def update_report <ide> update_preinstall_header args: args <ide> puts "Updated Homebrew from #{shorten_revision(initial_revision)} to #{shorten_revision(current_revision)}." <ide> updated = true <add> <add> tag = Utils.safe_popen_read("git", "tag", "--points-at", "HEAD") <add> new_repository_version = tag.chomp if tag.present? <ide> end <ide> <ide> Homebrew.failed = true if ENV["HOMEBREW_UPDATE_FAILED"] <ide> def update_report <ide> Commands.rebuild_commands_completion_list <ide> link_completions_manpages_and_docs <ide> Tap.each(&:link_completions_and_manpages) <add> <add> return if new_repository_version.blank? <add> <add> ohai "Homebrew was updated to version #{new_repository_version}" <add> puts <<~EOS <add> The changelog can be found at: <add> #{Formatter.url("https://github.com/Homebrew/brew/releases/tag/#{new_repository_version}")} <add> EOS <add> <add> return unless new_repository_version.split(".").last == "0" <add> <add> puts <<~EOS <add> More detailed release notes are available on the Homebrew Blog: <add> #{Formatter.url("https://brew.sh/blog/")} <add> EOS <ide> end <ide> <ide> def shorten_revision(revision)
1
Text
Text
add withfiletypes option to fspromises.readdir
de37ba34c5d6f3a2d18daf482f850be8fddb4621
<ide><path>doc/api/fs.md <ide> a colon, Node.js will open a file system stream, as described by <ide> ### fsPromises.readdir(path[, options]) <ide> <!-- YAML <ide> added: v10.0.0 <add>changes: <add> - version: REPLACEME <add> pr-url: https://github.com/nodejs/node/pull/22020 <add> description: New option `withFileTypes` was added. <ide> --> <ide> <ide> * `path` {string|Buffer|URL} <ide> * `options` {string|Object} <ide> * `encoding` {string} **Default:** `'utf8'` <add> * `withFileTypes` {boolean} **Default:** `false` <ide> * Returns: {Promise} <ide> <ide> Reads the contents of a directory then resolves the `Promise` with an array <ide> object with an `encoding` property specifying the character encoding to use for <ide> the filenames. If the `encoding` is set to `'buffer'`, the filenames returned <ide> will be passed as `Buffer` objects. <ide> <add>If `options.withFileTypes` is set to `true`, the resolved array will contain <add>[`fs.Dirent`][] objects. <add> <ide> ### fsPromises.readFile(path[, options]) <ide> <!-- YAML <ide> added: v10.0.0
1
PHP
PHP
convert routesshell into commands
fc51fa63be586ed742aa9daba0ed80e9c9e7e27a
<ide><path>src/Command/RoutesCheckCommand.php <add><?php <add>declare(strict_types=1); <add> <add>/** <add> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * For full copyright and license information, please see the LICENSE.txt <add> * Redistributions of files must retain the above copyright notice. <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * @link https://cakephp.org CakePHP(tm) Project <add> * @since 3.1.0 <add> * @license https://opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\Command; <add> <add>use Cake\Console\Arguments; <add>use Cake\Console\Command; <add>use Cake\Console\ConsoleIo; <add>use Cake\Console\ConsoleOptionParser; <add>use Cake\Http\ServerRequest; <add>use Cake\Routing\Exception\MissingRouteException; <add>use Cake\Routing\Router; <add> <add>/** <add> * Provides interactive CLI tool for testing routes. <add> */ <add>class RoutesCheckCommand extends Command <add>{ <add> /** <add> * Display all routes in an application <add> * <add> * @param \Cake\Console\Arguments $args The command arguments. <add> * @param \Cake\Console\ConsoleIo $io The console io <add> * @return null|int The exit code or null for success <add> */ <add> public function execute(Arguments $args, ConsoleIo $io): ?int <add> { <add> $url = $args->getArgument('url'); <add> try { <add> $request = new ServerRequest(['url' => $url]); <add> $route = Router::parseRequest($request); <add> $name = null; <add> foreach (Router::routes() as $r) { <add> if ($r->match($route)) { <add> $name = $r->options['_name'] ?? $r->getName(); <add> break; <add> } <add> } <add> <add> unset($route['_matchedRoute']); <add> ksort($route); <add> <add> $output = [ <add> ['Route name', 'URI template', 'Defaults'], <add> [$name, $url, json_encode($route)], <add> ]; <add> $io->helper('table')->output($output); <add> $io->out(); <add> } catch (MissingRouteException $e) { <add> $io->warning("'$url' did not match any routes."); <add> $io->out(); <add> <add> return static::CODE_ERROR; <add> } <add> <add> return static::CODE_SUCCESS; <add> } <add> <add> /** <add> * Get the option parser. <add> * <add> * @param \Cake\Console\ConsoleOptionParser $parser The option parser to update <add> * @return \Cake\Console\ConsoleOptionParser <add> */ <add> public function buildOptionParser(ConsoleOptionParser $parser): ConsoleOptionParser <add> { <add> $parser->setDescription( <add> 'Check a URL string against the routes. ' . <add> 'Will output the routing parameters the route resolves to.' <add> ) <add> ->addArgument('url', [ <add> 'help' => 'The URL to check.', <add> ]); <add> <add> return $parser; <add> } <add>} <ide><path>src/Command/RoutesCommand.php <add><?php <add>declare(strict_types=1); <add> <add>/** <add> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * For full copyright and license information, please see the LICENSE.txt <add> * Redistributions of files must retain the above copyright notice. <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * @link https://cakephp.org CakePHP(tm) Project <add> * @since 3.1.0 <add> * @license https://opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\Command; <add> <add>use Cake\Console\Arguments; <add>use Cake\Console\Command; <add>use Cake\Console\ConsoleIo; <add>use Cake\Console\ConsoleOptionParser; <add>use Cake\Routing\Router; <add> <add>/** <add> * Provides interactive CLI tools for routing. <add> */ <add>class RoutesCommand extends Command <add>{ <add> /** <add> * Display all routes in an application <add> * <add> * @param \Cake\Console\Arguments $args The command arguments. <add> * @param \Cake\Console\ConsoleIo $io The console io <add> * @return null|int The exit code or null for success <add> */ <add> public function execute(Arguments $args, ConsoleIo $io): ?int <add> { <add> $output = [ <add> ['Route name', 'URI template', 'Defaults'], <add> ]; <add> foreach (Router::routes() as $route) { <add> $name = $route->options['_name'] ?? $route->getName(); <add> ksort($route->defaults); <add> $output[] = [$name, $route->template, json_encode($route->defaults)]; <add> } <add> $io->helper('table')->output($output); <add> $io->out(); <add> <add> return static::CODE_SUCCESS; <add> } <add> <add> /** <add> * Get the option parser. <add> * <add> * @param \Cake\Console\ConsoleOptionParser $parser The option parser to update <add> * @return \Cake\Console\ConsoleOptionParser <add> */ <add> public function buildOptionParser(ConsoleOptionParser $parser): ConsoleOptionParser <add> { <add> $parser->setDescription('Get the list of routes connected in this application.'); <add> <add> return $parser; <add> } <add>} <ide><path>src/Command/RoutesGenerateCommand.php <add><?php <add>declare(strict_types=1); <add> <add>/** <add> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * For full copyright and license information, please see the LICENSE.txt <add> * Redistributions of files must retain the above copyright notice. <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * @link https://cakephp.org CakePHP(tm) Project <add> * @since 3.1.0 <add> * @license https://opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\Command; <add> <add>use Cake\Console\Arguments; <add>use Cake\Console\Command; <add>use Cake\Console\ConsoleIo; <add>use Cake\Console\ConsoleOptionParser; <add>use Cake\Routing\Exception\MissingRouteException; <add>use Cake\Routing\Router; <add> <add>/** <add> * Provides interactive CLI tools for URL generation <add> */ <add>class RoutesGenerateCommand extends Command <add>{ <add> /** <add> * Display all routes in an application <add> * <add> * @param \Cake\Console\Arguments $args The command arguments. <add> * @param \Cake\Console\ConsoleIo $io The console io <add> * @return null|int The exit code or null for success <add> */ <add> public function execute(Arguments $args, ConsoleIo $io): ?int <add> { <add> try { <add> $args = $this->_splitArgs($args->getArguments()); <add> $url = Router::url($args); <add> $io->out("> $url"); <add> $io->out(); <add> } catch (MissingRouteException $e) { <add> $io->err('<warning>The provided parameters do not match any routes.</warning>'); <add> $io->out(); <add> <add> return static::CODE_ERROR; <add> } <add> <add> return static::CODE_SUCCESS; <add> } <add> <add> /** <add> * Split the CLI arguments into a hash. <add> * <add> * @param array $args The arguments to split. <add> * @return array <add> */ <add> protected function _splitArgs(array $args): array <add> { <add> $out = []; <add> foreach ($args as $arg) { <add> if (strpos($arg, ':') !== false) { <add> [$key, $value] = explode(':', $arg); <add> if (in_array($value, ['true', 'false'], true)) { <add> $value = $value === 'true'; <add> } <add> $out[$key] = $value; <add> } else { <add> $out[] = $arg; <add> } <add> } <add> <add> return $out; <add> } <add> <add> /** <add> * Get the option parser. <add> * <add> * @param \Cake\Console\ConsoleOptionParser $parser The option parser to update <add> * @return \Cake\Console\ConsoleOptionParser <add> */ <add> public function buildOptionParser(ConsoleOptionParser $parser): ConsoleOptionParser <add> { <add> $parser->setDescription( <add> 'Check a routing array against the routes. ' . <add> 'Will output the URL if there is a match.' . <add> "\n\n" . <add> 'Routing parameters should be supplied in a key:value format. ' . <add> 'For example `controller:Articles action:view 2`' <add> ); <add> <add> return $parser; <add> } <add>} <ide><path>src/Shell/RoutesShell.php <del><?php <del>declare(strict_types=1); <del> <del>/** <del> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) <del> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <del> * <del> * Licensed under The MIT License <del> * For full copyright and license information, please see the LICENSE.txt <del> * Redistributions of files must retain the above copyright notice. <del> * <del> * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <del> * @link https://cakephp.org CakePHP(tm) Project <del> * @since 3.1.0 <del> * @license https://opensource.org/licenses/mit-license.php MIT License <del> */ <del>namespace Cake\Shell; <del> <del>use Cake\Console\ConsoleOptionParser; <del>use Cake\Console\Shell; <del>use Cake\Http\ServerRequest; <del>use Cake\Routing\Exception\MissingRouteException; <del>use Cake\Routing\Router; <del> <del>/** <del> * Provides interactive CLI tools for routing. <del> */ <del>class RoutesShell extends Shell <del>{ <del> /** <del> * Override main() to handle action <del> * Displays all routes in an application. <del> * <del> * @return void <del> */ <del> public function main(): void <del> { <del> $output = [ <del> ['Route name', 'URI template', 'Defaults'], <del> ]; <del> foreach (Router::routes() as $route) { <del> $name = $route->options['_name'] ?? $route->getName(); <del> ksort($route->defaults); <del> $output[] = [$name, $route->template, json_encode($route->defaults)]; <del> } <del> $this->helper('table')->output($output); <del> $this->out(); <del> } <del> <del> /** <del> * Checks a url for the route that will be applied. <del> * <del> * @param string $url The URL to parse <del> * @return bool Success <del> */ <del> public function check(string $url): bool <del> { <del> try { <del> $request = new ServerRequest(['url' => $url]); <del> $route = Router::parseRequest($request); <del> $name = null; <del> foreach (Router::routes() as $r) { <del> if ($r->match($route)) { <del> $name = $r->options['_name'] ?? $r->getName(); <del> break; <del> } <del> } <del> <del> unset($route['_matchedRoute']); <del> ksort($route); <del> <del> $output = [ <del> ['Route name', 'URI template', 'Defaults'], <del> [$name, $url, json_encode($route)], <del> ]; <del> $this->helper('table')->output($output); <del> $this->out(); <del> } catch (MissingRouteException $e) { <del> $this->warn("'$url' did not match any routes."); <del> $this->out(); <del> <del> return false; <del> } <del> <del> return true; <del> } <del> <del> /** <del> * Generate a URL based on a set of parameters <del> * <del> * Takes variadic arguments of key/value pairs. <del> * @return bool Success <del> */ <del> public function generate(): bool <del> { <del> try { <del> $args = $this->_splitArgs($this->args); <del> $url = Router::url($args); <del> $this->out("> $url"); <del> $this->out(); <del> } catch (MissingRouteException $e) { <del> $this->err('<warning>The provided parameters do not match any routes.</warning>'); <del> $this->out(); <del> <del> return false; <del> } <del> <del> return true; <del> } <del> <del> /** <del> * Get the option parser. <del> * <del> * @return \Cake\Console\ConsoleOptionParser <del> */ <del> public function getOptionParser(): ConsoleOptionParser <del> { <del> $parser = parent::getOptionParser(); <del> $parser->setDescription( <del> 'Get the list of routes connected in this application. ' . <del> 'This tool also lets you test URL generation and URL parsing.' <del> )->addSubcommand('check', [ <del> 'help' => 'Check a URL string against the routes. ' . <del> 'Will output the routing parameters the route resolves to.', <del> ])->addSubcommand('generate', [ <del> 'help' => 'Check a routing array against the routes. ' . <del> "Will output the URL if there is a match.\n\n" . <del> 'Routing parameters should be supplied in a key:value format. ' . <del> 'For example `controller:Articles action:view 2`', <del> ]); <del> <del> return $parser; <del> } <del> <del> /** <del> * Split the CLI arguments into a hash. <del> * <del> * @param array $args The arguments to split. <del> * @return array <del> */ <del> protected function _splitArgs(array $args): array <del> { <del> $out = []; <del> foreach ($args as $arg) { <del> if (strpos($arg, ':') !== false) { <del> [$key, $value] = explode(':', $arg); <del> if (in_array($value, ['true', 'false'], true)) { <del> $value = $value === 'true'; <del> } <del> $out[$key] = $value; <del> } else { <del> $out[] = $arg; <del> } <del> } <del> <del> return $out; <del> } <del>} <add><path>tests/TestCase/Command/RoutesCommandTest.php <del><path>tests/TestCase/Shell/RoutesShellTest.php <ide> * @since 3.1.0 <ide> * @license https://opensource.org/licenses/mit-license.php MIT License <ide> */ <del>namespace Cake\Test\TestCase\Shell; <add>namespace Cake\Test\TestCase\Command; <ide> <del>use Cake\Console\Shell; <add>use Cake\Console\Command; <ide> use Cake\Routing\Router; <ide> use Cake\TestSuite\ConsoleIntegrationTestCase; <ide> <ide> /** <del> * RoutesShellTest <add> * RoutesCommandTest <ide> */ <del>class RoutesShellTest extends ConsoleIntegrationTestCase <add>class RoutesCommandTest extends ConsoleIntegrationTestCase <ide> { <ide> /** <ide> * setUp method <ide> public function tearDown(): void <ide> Router::reload(); <ide> } <ide> <add> /** <add> * Ensure help for `routes` works <add> * <add> * @return void <add> */ <add> public function testRouteListHelp() <add> { <add> $this->exec('routes -h'); <add> $this->assertExitCode(Command::CODE_SUCCESS); <add> $this->assertOutputContains('list of routes'); <add> $this->assertErrorEmpty(); <add> } <add> <ide> /** <ide> * Test checking an non-existing route. <ide> * <ide> * @return void <ide> */ <del> public function testMain() <add> public function testRouteList() <ide> { <ide> $this->exec('routes'); <del> $this->assertExitCode(Shell::CODE_SUCCESS); <add> $this->assertExitCode(Command::CODE_SUCCESS); <ide> $this->assertOutputContainsRow([ <ide> '<info>Route name</info>', <ide> '<info>URI template</info>', <ide> public function testMain() <ide> ]); <ide> } <ide> <add> /** <add> * Ensure help for `routes` works <add> * <add> * @return void <add> */ <add> public function testCheckHelp() <add> { <add> $this->exec('routes check -h'); <add> $this->assertExitCode(Command::CODE_SUCCESS); <add> $this->assertOutputContains('Check a URL'); <add> $this->assertErrorEmpty(); <add> } <add> <ide> /** <ide> * Test checking an existing route. <ide> * <ide> public function testMain() <ide> public function testCheck() <ide> { <ide> $this->exec('routes check /app/articles/check'); <del> $this->assertExitCode(Shell::CODE_SUCCESS); <add> $this->assertExitCode(Command::CODE_SUCCESS); <ide> $this->assertOutputContainsRow([ <ide> '<info>Route name</info>', <ide> '<info>URI template</info>', <ide> public function testCheck() <ide> public function testCheckWithNamedRoute() <ide> { <ide> $this->exec('routes check /app/tests/index'); <del> $this->assertExitCode(Shell::CODE_SUCCESS); <add> $this->assertExitCode(Command::CODE_SUCCESS); <ide> $this->assertOutputContainsRow([ <ide> '<info>Route name</info>', <ide> '<info>URI template</info>', <ide> public function testCheckWithNamedRoute() <ide> public function testCheckNotFound() <ide> { <ide> $this->exec('routes check /nope'); <del> $this->assertExitCode(Shell::CODE_ERROR); <add> $this->assertExitCode(Command::CODE_ERROR); <ide> $this->assertErrorContains('did not match'); <ide> } <ide> <add> /** <add> * Ensure help for `routes` works <add> * <add> * @return void <add> */ <add> public function testGenerareHelp() <add> { <add> $this->exec('routes generate -h'); <add> $this->assertExitCode(Command::CODE_SUCCESS); <add> $this->assertOutputContains('Check a routing array'); <add> $this->assertErrorEmpty(); <add> } <add> <ide> /** <ide> * Test generating URLs <ide> * <ide> public function testCheckNotFound() <ide> public function testGenerateNoPassArgs() <ide> { <ide> $this->exec('routes generate controller:Articles action:index'); <del> $this->assertExitCode(Shell::CODE_SUCCESS); <add> $this->assertExitCode(Command::CODE_SUCCESS); <ide> $this->assertOutputContains('> /app/articles'); <ide> $this->assertErrorEmpty(); <ide> } <ide> public function testGenerateNoPassArgs() <ide> public function testGeneratePassedArguments() <ide> { <ide> $this->exec('routes generate controller:Articles action:view 2 3'); <del> $this->assertExitCode(Shell::CODE_SUCCESS); <add> $this->assertExitCode(Command::CODE_SUCCESS); <ide> $this->assertOutputContains('> /app/articles/view/2/3'); <ide> $this->assertErrorEmpty(); <ide> } <ide> public function testGeneratePassedArguments() <ide> public function testGenerateBoolParams() <ide> { <ide> $this->exec('routes generate controller:Articles action:index _ssl:true _host:example.com'); <del> $this->assertExitCode(Shell::CODE_SUCCESS); <add> $this->assertExitCode(Command::CODE_SUCCESS); <ide> $this->assertOutputContains('> https://example.com/app/articles'); <ide> } <ide> <ide> public function testGenerateBoolParams() <ide> public function testGenerateMissing() <ide> { <ide> $this->exec('routes generate plugin:Derp controller:Derp'); <del> $this->assertExitCode(Shell::CODE_ERROR); <add> $this->assertExitCode(Command::CODE_ERROR); <ide> $this->assertErrorContains('do not match'); <ide> } <ide> }
5
Ruby
Ruby
fix failing tests
1b8a7b8293e0ff944eb9a5ed84a17b51a87be696
<ide><path>activerecord/lib/active_record/attribute.rb <ide> def with_cast_value(value) <ide> end <ide> <ide> def with_type(type) <del> self.class.new(name, value_before_type_cast, type, original_attribute) <add> if changed_in_place? <add> with_value_from_user(value).with_type(type) <add> else <add> self.class.new(name, value_before_type_cast, type, original_attribute) <add> end <ide> end <ide> <ide> def type_cast(*) <ide><path>activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb <ide> def custom_primary_key? <ide> options[:primary_key] != default_primary_key <ide> end <ide> <del> def defined_for?(options_or_to_table = {}) <del> if options_or_to_table.is_a?(Hash) <del> options_or_to_table.all? {|key, value| options[key].to_s == value.to_s } <add> def defined_for?(to_table_ord = nil, to_table: nil, **options) <add> if to_table_ord <add> self.to_table == to_table_ord.to_s <ide> else <del> to_table == options_or_to_table.to_s <add> (to_table.nil? || to_table.to_s == self.to_table) && <add> options.all? { |k, v| self.options[k].to_s == v.to_s } <ide> end <ide> end <ide> <ide><path>activerecord/test/cases/attribute_test.rb <ide> def assert_valid_value(*) <ide> attribute.with_value_from_user(1) <ide> end <ide> end <add> <add> test "with_type preserves mutations" do <add> attribute = Attribute.from_database(:foo, "", Type::Value.new) <add> attribute.value << "1" <add> <add> assert_equal 1, attribute.with_type(Type::Integer.new).value <add> end <ide> end <ide> end
3
Javascript
Javascript
throw an error, not string
bbb2dccd1b650db656918f12311f04efa2495ee6
<ide><path>lib/_tls_wrap.js <ide> Server.prototype.setOptions = function(options) { <ide> // SNI Contexts High-Level API <ide> Server.prototype.addContext = function(servername, context) { <ide> if (!servername) { <del> throw 'Servername is required parameter for Server.addContext'; <add> throw new Error('Servername is required parameter for Server.addContext'); <ide> } <ide> <ide> var re = new RegExp('^' +
1
Text
Text
fix typos and clarify explanations
704f7fa4c7670d5959dc9b11899adff6a20f790f
<ide><path>guide/english/css/css3-media-queries/index.md <ide> title: CSS3 Media Queries <ide> --- <ide> ## CSS3 Media Queries <ide> <del>Media Queries allow you to have different styles for different devices/screen sizes. Their introduction in CSS3 has greatly eased the building <del>of responsive webpages. <add>Media queries allow you to design a website differently for different devices/screen sizes. Their introduction in CSS3 has greatly eased the building of responsive webpages. <ide> <del>The best approach when designing a responsive website is to think mobile first; meaning that you create your page starting with the design and content <del>of the mobile version. You may think that with some scalable sizes ( %, vw or vh ), your page will adapt perfectly to any device. But it will not. Maybe <del>for some very basic design, but certainly not for more common or complex pages! <add>The best approach when designing a responsive website is to think <em>mobile first</em> - meaning that you design the mobile version of your site first and scale up for larger devices. Using relative units of size (like %, vw or vh) will not guarantee your page adapts perfectly to any device, especially for complex layouts! Media queries let you specify <em>breakpoints</em> where different styles will be applied. <ide> <del>When designing your page for smaller devices, you will focus on the main content. On a bigger screen, you will have to adapt some font-sizes, margins, <del>paddings and so on in order to keep your site comfortable and readable. You will also likely want to add more content and fill in the space created by the screen size. <add>When designing your page for smaller devices, you should focus on the main content. On a bigger screen, you will want to adjust font sizes, margins, padding, etc. to keep your site readable, but you may also want to add secondary content to fill in the space created by the screen size. <ide> <del>The thought process should be: <add>The thought process for responsive design should be: <ide> 1. Which content to show? <del>2. How to layout the page? <add>2. How to create the layout of the page? <ide> 3. Which size to use? <ide> <ide> ### The basic syntax <ide> The thought process should be: <ide> } <ide> ``` <ide> <del>The `p` tag will have a padding of 30px as soon as the screen reaches min 768px width.</p> <add>`p` tags will have 30px of padding when the screen width is at least 768px. <ide> <ide> ### The AND syntax <ide> <ide> The `p` tag will have a padding of 30px as soon as the screen reaches min 768px <ide> } <ide> ``` <ide> <del>The `p` tag will have a padding of 30px as soon as the screen reaches min 768px height and its orientation is landscape. <add>`p` tags will have 30px of padding when the screen height is at least 768px AND its orientation is landscape. <ide> <ide> ### The OR syntax <ide> <ide> The `p` tag will have a padding of 30px as soon as the screen reaches min 768px <ide> } <ide> ``` <ide> <del>The `p` tag will have a padding of 30px as soon as the screen reaches min 768px width or its resolution reaches min 150dpi. <add>`p` tags will have 30px of padding when the screen width is at least 768px OR its resolution reaches 150dpi. <ide> <ide> ### And beyond! <ide> <del>There are more operators beyond the main two, a full list with examples can be found [https://css-tricks.com/logic-in-media-queries/](in this article from CSS Tricks.) <add>Additional tips for using media queries, such as the `not` operator and examples of greater specificity, can be found [https://css-tricks.com/logic-in-media-queries/](in this article from CSS Tricks.) <ide> <del>Beyond the core uses of media queries for mobile-first web design shown above, media queries can do a lot, especially for web accessibility. Here are just a few examples: <add>Beyond their core use for mobile-first web design, media queries can also greatly improve web accessibility. Here are a few examples: <ide> <del>1. Adjusting for screen readers that convert website text to speech for the visually impaired (for example, ignoring non-essential text). <add>1. Adjusting for screen readers that convert website text to speech for people with visual impairments (for example, ignoring non-essential text). <ide> ```css <ide> @media speech { <ide> /* ... */ <ide> } <ide> ``` <del>2. Allowing for more graceful zooming in for those with minor visual impairments, such as many elderly people. <add> <add>2. Allowing for more graceful zooming in for people with visual impairments. <add> <ide> 3. Allowing smoother experiences for those who prefer or need less animation to read a page. <ide> ```css <ide> @media (prefers-reduced-motion: reduce) { <ide> Beyond the core uses of media queries for mobile-first web design shown above, m <ide> } <ide> } <ide> ``` <del>4. Restyling a page for when it's printed as opposed to read on a screen. <add>4. Restyling a page for printing as opposed to reading on a screen. <ide> ```css <ide> @media print { <ide> /* ... */ <ide> Beyond the core uses of media queries for mobile-first web design shown above, m <ide> ### More Information <ide> * [MDN - media queries](https://developer.mozilla.org/en-US/docs/Web/CSS/Media_Queries/Using_media_queries) <ide> * [W3 Schools - @media rule](https://www.w3schools.com/cssref/css3_pr_mediaquery.asp) <del>* [CSS Tricks Standard Device Widths Article](https://css-tricks.com/snippets/css/media-queries-for-standard-devices/) <del>* [Ethan Marcotte A List Apart Atricle on Responsive Web Design](https://alistapart.com/article/responsive-web-design) <del>* [Brad Frost 7 habits of highly effective media queries](http://bradfrost.com/blog/post/7-habits-of-highly-effective-media-queries/) <add>* [CSS Tricks - Standard Device Widths Article](https://css-tricks.com/snippets/css/media-queries-for-standard-devices/) <add>* [Ethan Marcotte - A List Apart Article on Responsive Web Design](https://alistapart.com/article/responsive-web-design) <add>* [Brad Frost - 7 habits of highly effective media queries](http://bradfrost.com/blog/post/7-habits-of-highly-effective-media-queries/) <ide> * [How to make media queries work on older browsers](https://www.templatemonster.com/blog/css-media-queries-for-all-devices-and-browsers-including-ie7-and-ie8/)
1
Text
Text
clarify child_process promise rejections
43506f1013c5568243dc800c42536d5aa72a3589
<ide><path>doc/api/child_process.md <ide> the existing process and uses a shell to execute the command. <ide> <ide> If this method is invoked as its [`util.promisify()`][]ed version, it returns <ide> a Promise for an object with `stdout` and `stderr` properties. In case of an <del>error, a rejected promise is returned, with the same `error` object given in the <del>callback, but with an additional two properties `stdout` and `stderr`. <add>error (including any error resulting in an exit code other than 0), a rejected <add>promise is returned, with the same `error` object given in the callback, but <add>with an additional two properties `stdout` and `stderr`. <ide> <ide> ```js <ide> const util = require('util'); <ide> encoding, `Buffer` objects will be passed to the callback instead. <ide> <ide> If this method is invoked as its [`util.promisify()`][]ed version, it returns <ide> a Promise for an object with `stdout` and `stderr` properties. In case of an <del>error, a rejected promise is returned, with the same `error` object given in the <add>error (including any error resulting in an exit code other than 0), a rejected <add>promise is returned, with the same `error` object given in the <ide> callback, but with an additional two properties `stdout` and `stderr`. <ide> <ide> ```js
1
Javascript
Javascript
unify legacy class binding into ast preprocessor
47ddf42b2c10324d9e8d35397db8195c9108a465
<ide><path>packages/ember-htmlbars/lib/helpers/-concat.js <add>/** @private <add> This private helper is used by the legacy class bindings AST transformer <add> to concatenate class names together. <add>*/ <add>export default function concat(params, hash) { <add> return params.join(hash.separator); <add>} <ide><path>packages/ember-htmlbars/lib/helpers/-normalize-class.js <ide> import { dasherize } from "ember-runtime/system/string"; <add>import { isPath } from "ember-metal/path_cache"; <ide> <ide> /** @private <ide> This private helper is used by ComponentNode to convert the classNameBindings <ide> export default function normalizeClass(params, hash) { <ide> // If value is a Boolean and true, return the dasherized property <ide> // name. <ide> } else if (value === true) { <add> // Only apply to last segment in the path <add> if (propName && isPath(propName)) { <add> var segments = propName.split('.'); <add> propName = segments[segments.length - 1]; <add> } <add> <ide> return dasherize(propName); <ide> <ide> // If the value is not false, undefined, or null, return the current <ide><path>packages/ember-htmlbars/lib/main.js <ide> import logHelper from "ember-htmlbars/helpers/log"; <ide> import eachHelper from "ember-htmlbars/helpers/each"; <ide> import bindAttrClassHelper from "ember-htmlbars/helpers/bind-attr-class"; <ide> import normalizeClassHelper from "ember-htmlbars/helpers/-normalize-class"; <add>import concatHelper from "ember-htmlbars/helpers/-concat"; <ide> import DOMHelper from "ember-htmlbars/system/dom-helper"; <ide> <ide> // importing adds template bootstrapping <ide> registerHelper('log', logHelper); <ide> registerHelper('each', eachHelper); <ide> registerHelper('bind-attr-class', bindAttrClassHelper); <ide> registerHelper('-normalize-class', normalizeClassHelper); <add>registerHelper('-concat', concatHelper); <ide> <ide> Ember.HTMLBars = { <ide> _registerHelper: registerHelper, <ide><path>packages/ember-htmlbars/tests/helpers/view_test.js <ide> QUnit.test("allows you to pass attributes that will be assigned to the class ins <ide> equal(jQuery('#bar').text(), 'Bar'); <ide> }); <ide> <del>QUnit.skip("Should apply class without condition always", function() { <del> view = EmberView.create({ <del> controller: Ember.Object.create(), <del> template: compile('{{#view id="foo" classBinding=":foo"}} Foo{{/view}}') <del> }); <add>QUnit.test("Should apply class without condition always", function() { <add> expectDeprecation(function() { <add> view = EmberView.create({ <add> controller: Ember.Object.create(), <add> template: compile('{{#view id="foo" classBinding=":foo"}} Foo{{/view}}') <add> }); <add> }, /legacy class binding syntax/); <ide> <ide> runAppend(view); <ide> <ide> QUnit.test("Should not apply classes when bound property specified is false", fu <ide> ok(!jQuery('#foo').hasClass('some-prop'), "does not add class when value is falsey"); <ide> }); <ide> <del>QUnit.skip("Should apply classes of the dasherized property name when bound property specified is true", function() { <add>QUnit.test("Should apply classes of the dasherized property name when bound property specified is true", function() { <ide> view = EmberView.create({ <ide> controller: { <ide> someProp: true <ide> QUnit.skip("Should apply classes of the dasherized property name when bound prop <ide> ok(jQuery('#foo').hasClass('some-prop'), "adds dasherized class when value is true"); <ide> }); <ide> <del>QUnit.skip("Should update classes from a bound property", function() { <add>QUnit.test("Should update classes from a bound property", function() { <ide> var controller = { <ide> someProp: true <ide> }; <ide> QUnit.test('{{view}} should be able to point to a local view', function() { <ide> equal(view.$().text(), 'common', 'tries to look up view name locally'); <ide> }); <ide> <del>QUnit.skip('{{view}} should evaluate class bindings set to global paths DEPRECATED', function() { <add>QUnit.test('{{view}} should evaluate class bindings set to global paths DEPRECATED', function() { <ide> var App; <ide> <ide> run(function() { <ide> QUnit.skip('{{view}} should evaluate class bindings set to global paths DEPRECAT <ide> }); <ide> }); <ide> <del> view = EmberView.create({ <del> textField: TextField, <del> template: compile('{{view view.textField class="unbound" classBinding="App.isGreat:great App.directClass App.isApp App.isEnabled:enabled:disabled"}}') <del> }); <add> expectDeprecation(function() { <add> view = EmberView.create({ <add> textField: TextField, <add> template: compile('{{view view.textField class="unbound" classBinding="App.isGreat:great App.directClass App.isApp App.isEnabled:enabled:disabled"}}') <add> }); <add> }, /legacy class binding/); <ide> <ide> expectDeprecation(function() { <ide> runAppend(view); <ide> QUnit.skip('{{view}} should evaluate class bindings set to global paths DEPRECAT <ide> runDestroy(lookup.App); <ide> }); <ide> <del>QUnit.skip('{{view}} should evaluate class bindings set in the current context', function() { <del> view = EmberView.create({ <del> isView: true, <del> isEditable: true, <del> directClass: 'view-direct', <del> isEnabled: true, <del> textField: TextField, <del> template: compile('{{view view.textField class="unbound" classBinding="view.isEditable:editable view.directClass view.isView view.isEnabled:enabled:disabled"}}') <del> }); <add>QUnit.test('{{view}} should evaluate class bindings set in the current context', function() { <add> expectDeprecation(function() { <add> view = EmberView.create({ <add> isView: true, <add> isEditable: true, <add> directClass: 'view-direct', <add> isEnabled: true, <add> textField: TextField, <add> template: compile('{{view view.textField class="unbound" classBinding="view.isEditable:editable view.directClass view.isView view.isEnabled:enabled:disabled"}}') <add> }); <add> }, /legacy class binding syntax/); <ide> <ide> runAppend(view); <ide> <ide> QUnit.skip('{{view}} should evaluate class bindings set in the current context', <ide> ok(view.$('input').hasClass('disabled'), 'evaluates ternary operator in classBindings'); <ide> }); <ide> <del>QUnit.skip('{{view}} should evaluate class bindings set with either classBinding or classNameBindings from globals DEPRECATED', function() { <add>QUnit.test('{{view}} should evaluate class bindings set with either classBinding or classNameBindings from globals DEPRECATED', function() { <ide> var App; <ide> <ide> run(function() { <ide> QUnit.skip('{{view}} should evaluate class bindings set with either classBinding <ide> }); <ide> }); <ide> <del> view = EmberView.create({ <del> textField: TextField, <del> template: compile('{{view view.textField class="unbound" classBinding="App.isGreat:great App.isEnabled:enabled:disabled" classNameBindings="App.isGreat:really-great App.isEnabled:really-enabled:really-disabled"}}') <del> }); <add> expectDeprecation(function() { <add> view = EmberView.create({ <add> textField: TextField, <add> template: compile('{{view view.textField class="unbound" classBinding="App.isGreat:great App.isEnabled:enabled:disabled" classNameBindings="App.isGreat:really-great App.isEnabled:really-enabled:really-disabled"}}') <add> }); <add> }, /legacy class binding/); <ide> <ide> expectDeprecation(function() { <ide> runAppend(view); <ide> QUnit.test('{{view}} should evaluate other attributes bindings set in the curren <ide> equal(view.$('input').val(), 'myView', 'evaluates attributes bound in the current context'); <ide> }); <ide> <del>QUnit.skip('{{view}} should be able to bind class names to truthy properties', function() { <del> registry.register('template:template', compile('{{#view view.classBindingView classBinding="view.number:is-truthy"}}foo{{/view}}')); <add>QUnit.test('{{view}} should be able to bind class names to truthy properties', function() { <add> expectDeprecation(function() { <add> registry.register('template:template', compile('{{#view view.classBindingView classBinding="view.number:is-truthy"}}foo{{/view}}')); <add> }, /legacy class binding syntax/); <ide> <ide> var ClassBindingView = EmberView.extend(); <ide> <ide> QUnit.skip('{{view}} should be able to bind class names to truthy properties', f <ide> equal(view.$('.is-truthy').length, 0, 'removes class name if bound property is set to falsey'); <ide> }); <ide> <del>QUnit.skip('{{view}} should be able to bind class names to truthy or falsy properties', function() { <del> registry.register('template:template', compile('{{#view view.classBindingView classBinding="view.number:is-truthy:is-falsy"}}foo{{/view}}')); <add>QUnit.test('{{view}} should be able to bind class names to truthy or falsy properties', function() { <add> expectDeprecation(function() { <add> registry.register('template:template', compile('{{#view view.classBindingView classBinding="view.number:is-truthy:is-falsy"}}foo{{/view}}')); <add> }, /legacy class binding syntax/); <ide> <ide> var ClassBindingView = EmberView.extend(); <ide> <ide> QUnit.skip('{{view}} should be able to bind class names to truthy or falsy prope <ide> equal(view.$('.is-falsy').length, 1, "sets class name to falsy value"); <ide> }); <ide> <del>QUnit.skip('a view helper\'s bindings are to the parent context', function() { <add>QUnit.test('a view helper\'s bindings are to the parent context', function() { <ide> var Subview = EmberView.extend({ <del> classNameBindings: ['color'], <add> classNameBindings: ['attrs.color'], <ide> controller: EmberObject.create({ <ide> color: 'green', <ide> name: 'bar' <ide> }), <del> template: compile('{{view.someController.name}} {{name}}') <add> template: compile('{{attrs.someController.name}} {{name}}') <ide> }); <ide> <ide> var View = EmberView.extend({ <ide> QUnit.test('should expose a controller keyword that can be used in conditionals' <ide> equal(view.$().text(), '', 'updates the DOM when the controller is changed'); <ide> }); <ide> <del>QUnit.skip('should expose a controller keyword that persists through Ember.ContainerView', function() { <add>QUnit.test('should expose a controller keyword that persists through Ember.ContainerView', function() { <ide> var templateString = '{{view view.containerView}}'; <ide> view = EmberView.create({ <ide> containerView: ContainerView, <ide><path>packages/ember-template-compiler/lib/main.js <ide> import TransformBindAttrToAttributes from "ember-template-compiler/plugins/trans <ide> import TransformEachIntoCollection from "ember-template-compiler/plugins/transform-each-into-collection"; <ide> import TransformSingleArgEach from "ember-template-compiler/plugins/transform-single-arg-each"; <ide> import TransformOldBindingSyntax from "ember-template-compiler/plugins/transform-old-binding-syntax"; <add>import TransformOldClassBindingSyntax from "ember-template-compiler/plugins/transform-old-class-binding-syntax"; <ide> <ide> // used for adding Ember.Handlebars.compile for backwards compat <ide> import "ember-template-compiler/compat"; <ide> registerPlugin('ast', TransformBindAttrToAttributes); <ide> registerPlugin('ast', TransformSingleArgEach); <ide> registerPlugin('ast', TransformEachIntoCollection); <ide> registerPlugin('ast', TransformOldBindingSyntax); <add>registerPlugin('ast', TransformOldClassBindingSyntax); <ide> <ide> export { <ide> _Ember, <ide><path>packages/ember-template-compiler/lib/plugins/transform-old-class-binding-syntax.js <add>import Ember from 'ember-metal/core'; <add> <add>export default function TransformOldClassBindingSyntax() { <add> this.syntax = null; <add>} <add> <add>TransformOldClassBindingSyntax.prototype.transform = function TransformOldClassBindingSyntax_transform(ast) { <add> var b = this.syntax.builders; <add> var walker = new this.syntax.Walker(); <add> <add> walker.visit(ast, function(node) { <add> if (!validate(node)) { return; } <add> <add> let allOfTheMicrosyntaxes = []; <add> let allOfTheMicrosyntaxIndexes = []; <add> let classPair; <add> <add> each(node.hash.pairs, (pair, index) => { <add> let { key } = pair; <add> <add> if (key === 'classBinding' || key === 'classNameBindings') { <add> allOfTheMicrosyntaxIndexes.push(index); <add> allOfTheMicrosyntaxes.push(pair); <add> } else if (key === 'class') { <add> classPair = pair; <add> } <add> }); <add> <add> if (allOfTheMicrosyntaxes.length === 0) { return; } <add> <add> let classValue = []; <add> <add> if (classPair) { <add> classValue.push(classPair.value); <add> } else { <add> classPair = b.pair('class', null); <add> node.hash.pairs.push(classPair); <add> } <add> <add> each(allOfTheMicrosyntaxIndexes, index => { <add> node.hash.pairs.splice(index, 1); <add> }); <add> <add> each(allOfTheMicrosyntaxes, ({ value, loc }) => { <add> let sexprs = []; <add> <add> let sourceInformation = ""; <add> if (loc) { <add> let { start, source } = loc; <add> <add> sourceInformation = `@ ${start.line}:${start.column} in ${source || '(inline)'}`; <add> } <add> <add> // TODO: Parse the microsyntax and offer the correct information <add> Ember.deprecate(`You're using legacy class binding syntax: classBinding=${exprToString(value)} ${sourceInformation}. Please replace with class=""`); <add> <add> if (value.type === 'StringLiteral') { <add> let microsyntax = parseMicrosyntax(value.original); <add> <add> buildSexprs(microsyntax, sexprs, b); <add> <add> classValue.push.apply(classValue, sexprs); <add> } <add> }); <add> <add> let hash = b.hash([b.pair('separator', b.string(' '))]); <add> classPair.value = b.sexpr(b.string('-concat'), classValue, hash); <add> }); <add> <add> return ast; <add>}; <add> <add>function buildSexprs(microsyntax, sexprs, b) { <add> for (var i=0, l=microsyntax.length; i<l; i++) { <add> let [propName, activeClass, inactiveClass] = microsyntax[i]; <add> let sexpr; <add> <add> // :my-class-name microsyntax for static values <add> if (propName === '') { <add> sexpr = b.string(activeClass); <add> } else { <add> let params = [b.path(propName)]; <add> <add> if (activeClass) { <add> params.push(b.string(activeClass)); <add> } else { <add> let sexprParams = [b.string(propName), b.path(propName)]; <add> <add> let hash = b.hash(); <add> if (activeClass !== undefined) { <add> hash.pairs.push(b.pair('activeClass', b.string(activeClass))); <add> } <add> <add> if (inactiveClass !== undefined) { <add> hash.pairs.push(b.pair('inactiveClass', b.string(inactiveClass))); <add> } <add> <add> params.push(b.sexpr(b.string('-normalize-class'), sexprParams, hash)); <add> } <add> <add> if (inactiveClass) { <add> params.push(b.string(inactiveClass)); <add> } <add> <add> sexpr = b.sexpr(b.string('if'), params); <add> } <add> <add> sexprs.push(sexpr); <add> } <add>} <add> <add>function validate(node) { <add> return (node.type === 'BlockStatement' || node.type === 'MustacheStatement'); <add>} <add> <add>function each(list, callback) { <add> for (var i=0, l=list.length; i<l; i++) { <add> callback(list[i], i); <add> } <add>} <add> <add>function parseMicrosyntax(string) { <add> var segments = string.split(' '); <add> <add> for (var i=0, l=segments.length; i<l; i++) { <add> segments[i] = segments[i].split(':'); <add> } <add> <add> return segments; <add>} <add> <add>function exprToString(expr) { <add> switch (expr.type) { <add> case 'StringLiteral': return `"${expr.original}"`; <add> case 'PathExpression': return expr.original; <add> } <add>} <add> <ide><path>packages/ember-views/lib/system/build-component-template.js <ide> import { internal, render } from "htmlbars-runtime"; <ide> import { read } from "ember-metal/streams/utils"; <ide> import { get } from "ember-metal/property_get"; <add>import { isGlobal } from "ember-metal/path_cache"; <ide> <ide> export default function buildComponentTemplate(componentInfo, attrs, content) { <ide> var component, layoutTemplate, blockToRender; <ide> function normalizeClass(component, attrs) { <ide> var classNameBindings = get(component, 'classNameBindings'); <ide> <ide> if (attrs.class) { <del> normalizedClass.push(['value', attrs.class]); <add> if (typeof attrs.class === 'string') { <add> normalizedClass.push(attrs.class); <add> } else { <add> normalizedClass.push(['subexpr', '-normalize-class', [['value', attrs.class.path], ['value', attrs.class]], []]); <add> } <add> } <add> <add> if (attrs.classBinding) { <add> normalizeClasses(attrs.classBinding.split(' '), normalizedClass); <ide> } <ide> <ide> if (attrs.classNames) { <ide> function normalizeClass(component, attrs) { <ide> } <ide> <ide> if (classNameBindings) { <del> for (i=0, l=classNameBindings.length; i<l; i++) { <del> var className = classNameBindings[i]; <del> var [propName, activeClass, inactiveClass] = className.split(':'); <del> var prop = 'view.' + propName; <del> <del> normalizedClass.push(['subexpr', '-normalize-class', [ <del> // params <del> ['value', propName], <del> ['get', prop] <del> ], [ <del> // hash <del> 'activeClass', activeClass, <del> 'inactiveClass', inactiveClass <del> ]]); <del> } <add> normalizeClasses(classNameBindings, normalizedClass); <ide> } <ide> <ide> var last = normalizedClass.length - 1; <ide> function normalizeClass(component, attrs) { <ide> return ['concat', output]; <ide> } <ide> } <add> <add>function normalizeClasses(classes, output) { <add> var i, l; <add> <add> for (i=0, l=classes.length; i<l; i++) { <add> var className = classes[i]; <add> var [propName, activeClass, inactiveClass] = className.split(':'); <add> <add> // Legacy :class microsyntax for static class names <add> if (propName === '') { <add> output.push(activeClass); <add> return; <add> } <add> <add> // 2.0TODO: Remove deprecated global path <add> var prop = isGlobal(propName) ? propName : 'view.' + propName; <add> <add> output.push(['subexpr', '-normalize-class', [ <add> // params <add> ['value', propName], <add> ['get', prop] <add> ], [ <add> // hash <add> 'activeClass', activeClass, <add> 'inactiveClass', inactiveClass <add> ]]); <add> } <add>}
7
PHP
PHP
fix indentation and double slashes
e48a2d723ba6b00131b2fbb406b3698663b97013
<ide><path>application/config/application.php <ide> 'Blade' => 'Laravel\\Blade', <ide> 'Bundle' => 'Laravel\\Bundle', <ide> 'Cache' => 'Laravel\\Cache', <del> 'Command' => 'Laravel\CLI\Command', <add> 'Command' => 'Laravel\\CLI\\Command', <ide> 'Config' => 'Laravel\\Config', <ide> 'Controller' => 'Laravel\\Routing\\Controller', <ide> 'Cookie' => 'Laravel\\Cookie',
1
PHP
PHP
clarify error message
c4b80d2f7d4f47e68baafaa2510d6c85ee19f609
<ide><path>src/ORM/Table.php <ide> public function save(EntityInterface $entity, $options = []) { <ide> * @param \Cake\Datasource\EntityInterface $entity the entity to be saved <ide> * @param array $options the options to use for the save operation <ide> * @return \Cake\Datasource\EntityInterface|bool <add> * @throws \RuntimeException When an entity is missing some of the primary keys. <ide> */ <ide> protected function _processSave($entity, $options) { <del> $primary = $entity->extract((array)$this->primaryKey()); <add> $primaryColumns = (array)$this->primaryKey(); <add> $primary = $entity->extract($primaryColumns); <ide> <ide> if ($primary && $entity->isNew()) { <ide> $alias = $this->alias(); <ide> protected function _insert($entity, $data) { <ide> $primary = (array)$this->primaryKey(); <ide> if (empty($primary)) { <ide> $msg = sprintf( <del> 'Cannot insert row in "%s", it has no primary key.', <add> 'Cannot insert row in "%s" table, it has no primary key.', <ide> $this->table() <ide> ); <ide> throw new \RuntimeException($msg); <ide><path>tests/TestCase/ORM/TableTest.php <ide> public function testAfterSaveNotCalled() { <ide> * <ide> * @group save <ide> * @expectedException \RuntimeException <del> * @expectedExceptionMessage Cannot insert row in "users", it has no primary key <add> * @expectedExceptionMessage Cannot insert row in "users" table, it has no primary key <ide> * @return void <ide> */ <ide> public function testSaveNewErrorOnNoPrimaryKey() {
2
Java
Java
improve performance of stringutils#trimwhitespace
6545cab42c238170f770b1e574088238574275e9
<ide><path>spring-core/src/main/java/org/springframework/util/StringUtils.java <ide> public static String trimWhitespace(String str) { <ide> return str; <ide> } <ide> <del> StringBuilder sb = new StringBuilder(str); <del> while (sb.length() > 0 && Character.isWhitespace(sb.charAt(0))) { <del> sb.deleteCharAt(0); <add> int beginIndex = 0; <add> int endIndex = str.length() - 1; <add> <add> while (beginIndex <= endIndex && Character.isWhitespace(str.charAt(beginIndex))) { <add> beginIndex++; <ide> } <del> while (sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) { <del> sb.deleteCharAt(sb.length() - 1); <add> <add> while (endIndex > beginIndex && Character.isWhitespace(str.charAt(endIndex))) { <add> endIndex--; <ide> } <del> return sb.toString(); <add> <add> return str.substring(beginIndex, endIndex + 1); <ide> } <ide> <ide> /**
1
Text
Text
add v3.6.0-beta.3 to changelog
0f55cd9d21a6de8202e3d761b00f0d9e95d5501e
<ide><path>CHANGELOG.md <ide> # Ember Changelog <ide> <add>### v3.6.0-beta.3 (November 5, 2018) <add> <add>- [#17169](https://github.com/emberjs/ember.js/pull/17169) [BUGFIX] Add default implementations of Component lifecycle hooks <add>- [#17165](https://github.com/emberjs/ember.js/pull/17165) [BUGFIX] Fix RouteInfo.find and transition.froms <add>- [#17180](https://github.com/emberjs/ember.js/pull/17180) [BUGFIX] Router Service State should be correct in events <add> <ide> ### v3.6.0-beta.2 (October 29, 2018) <ide> <ide> - [#17130](https://github.com/emberjs/ember.js/pull/17130) [BUGFIX] Ensure that timers scheduled after a system sleep are fired properly.
1
Text
Text
remove extraneous paragraph from assert doc
2f1f48ac2ad2b75169b909d112d760ce68d01abc
<ide><path>doc/api/assert.md <ide> The `assert` module provides a simple set of assertion tests that can be used to <ide> test invariants. <ide> <del>The API for the `assert` module is [Locked][]. This means that there will be no <del>additions or changes to any of the methods implemented and exposed by the <del>module. <del> <ide> ## assert(value[, message]) <ide> <!-- YAML <ide> added: v0.5.9 <ide> assert.throws(myFunction, 'missing foo', 'did not throw with expected message'); <ide> assert.throws(myFunction, /missing foo/, 'did not throw with expected message'); <ide> ``` <ide> <del>[Locked]: documentation.html#documentation_stability_index <ide> [`assert.deepEqual()`]: #assert_assert_deepequal_actual_expected_message <ide> [`assert.deepStrictEqual()`]: #assert_assert_deepstrictequal_actual_expected_message <ide> [`assert.ok()`]: #assert_assert_ok_value_message
1
Ruby
Ruby
add test for command help strings
2c8544832eb75d2ce7a76ac178d1018c166d653a
<ide><path>Library/Homebrew/cask/lib/hbc/cli/base.rb <ide> def self.cask_tokens_from(args) <ide> end <ide> <ide> def self.help <del> "No help available for the #{command_name} command" <add> nil <ide> end <ide> <ide> def self.needs_init? <ide><path>Library/Homebrew/cask/lib/hbc/cli/internal_audit_modified_casks.rb <ide> def self.posargs(args) <ide> args.reject { |a| a.empty? || a.chars.first == "-" } <ide> end <ide> <add> def self.help <add> "audit all modified Casks in a given commit range" <add> end <add> <ide> def self.usage <ide> <<-EOS.undent <ide> Usage: brew cask _audit_modified_casks [options...] <commit range> <ide><path>Library/Homebrew/test/cask/cli_spec.rb <ide> described_class.process("noop") <ide> end <ide> end <add> <add> it "provides a help message for all commands" do <add> described_class.command_classes.each do |command_class| <add> expect(command_class.help).to match(/\w+/), command_class.name <add> end <add> end <ide> end
3
Text
Text
apply sentence case to headers in pull-requests.md
7c7b98339cef7738232117d336569103c95ac78b
<ide><path>doc/guides/contributing/pull-requests.md <ide> * [Setting up your local environment](#setting-up-your-local-environment) <ide> * [Step 1: Fork](#step-1-fork) <ide> * [Step 2: Branch](#step-2-branch) <del>* [The Process of Making Changes](#the-process-of-making-changes) <add>* [The process of making changes](#the-process-of-making-changes) <ide> * [Step 3: Code](#step-3-code) <ide> * [Step 4: Commit](#step-4-commit) <ide> * [Commit message guidelines](#commit-message-guidelines) <ide> * [Step 5: Rebase](#step-5-rebase) <ide> * [Step 6: Test](#step-6-test) <ide> * [Step 7: Push](#step-7-push) <del> * [Step 8: Opening the Pull Request](#step-8-opening-the-pull-request) <del> * [Step 9: Discuss and Update](#step-9-discuss-and-update) <del> * [Approval and Request Changes Workflow](#approval-and-request-changes-workflow) <add> * [Step 8: Opening the pull request](#step-8-opening-the-pull-request) <add> * [Step 9: Discuss and update](#step-9-discuss-and-update) <add> * [Approval and request changes workflow](#approval-and-request-changes-workflow) <ide> * [Step 10: Landing](#step-10-landing) <del>* [Reviewing Pull Requests](#reviewing-pull-requests) <add>* [Reviewing pull requests](#reviewing-pull-requests) <ide> * [Review a bit at a time](#review-a-bit-at-a-time) <ide> * [Be aware of the person behind the code](#be-aware-of-the-person-behind-the-code) <ide> * [Respect the minimum wait time for comments](#respect-the-minimum-wait-time-for-comments) <del> * [Abandoned or Stalled Pull Requests](#abandoned-or-stalled-pull-requests) <add> * [Abandoned or stalled pull requests](#abandoned-or-stalled-pull-requests) <ide> * [Approving a change](#approving-a-change) <ide> * [Accept that there are different opinions about what belongs in Node.js](#accept-that-there-are-different-opinions-about-what-belongs-in-nodejs) <ide> * [Performance is not everything](#performance-is-not-everything) <del> * [Continuous Integration Testing](#continuous-integration-testing) <add> * [Continuous integration testing](#continuous-integration-testing) <ide> * [Notes](#notes) <del> * [Commit Squashing](#commit-squashing) <del> * [Getting Approvals for your Pull Request](#getting-approvals-for-your-pull-request) <del> * [CI Testing](#ci-testing) <del> * [Waiting Until the Pull Request Gets Landed](#waiting-until-the-pull-request-gets-landed) <del> * [Check Out the Collaborator Guide](#check-out-the-collaborator-guide) <del> * [Appendix: Subsystems](#appendix-subsystems) <add> * [Commit squashing](#commit-squashing) <add> * [Getting approvals for your pull request](#getting-approvals-for-your-pull-request) <add> * [CI testing](#ci-testing) <add> * [Waiting until the pull request gets landed](#waiting-until-the-pull-request-gets-landed) <add> * [Check out the collaborator guide](#check-out-the-collaborator-guide) <add> * [Appendix: subsystems](#appendix-subsystems) <ide> <ide> ## Dependencies <ide> <ide> directly off of the `master` branch. <ide> $ git checkout -b my-branch -t upstream/master <ide> ``` <ide> <del>## The Process of Making Changes <add>## The process of making changes <ide> <ide> ### Step 3: Code <ide> <ide> your fork on GitHub. <ide> $ git push origin my-branch <ide> ``` <ide> <del>### Step 8: Opening the Pull Request <add>### Step 8: Opening the pull request <ide> <ide> From within GitHub, opening a new Pull Request will present you with a <ide> [pull request template][]. Please try to do your best at filling out the <ide> awaiting an answer on something. If you encounter words or acronyms that <ide> seem unfamiliar, refer to this <ide> [glossary](https://sites.google.com/a/chromium.org/dev/glossary). <ide> <del>#### Approval and Request Changes Workflow <add>#### Approval and request changes workflow <ide> <ide> All Pull Requests require "sign off" in order to land. Whenever a contributor <ide> reviews a Pull Request they may find specific details that they would like to <ide> point, but don't worry. If you look at the branch you raised your <ide> Pull Request against (probably `master`), you should see a commit with <ide> your name on it. Congratulations and thanks for your contribution! <ide> <del>## Reviewing Pull Requests <add>## Reviewing pull requests <ide> <ide> All Node.js contributors who choose to review and provide feedback on Pull <ide> Requests have a responsibility to both the project and the individual making the <ide> matter experts. When in doubt, do not rush. <ide> Trivial changes, typically limited to small formatting changes or fixes to <ide> documentation, may be landed within the minimum 48 hour window. <ide> <del>### Abandoned or Stalled Pull Requests <add>### Abandoned or stalled pull requests <ide> <ide> If a Pull Request appears to be abandoned or stalled, it is polite to first <ide> check with the contributor to see if they intend to continue the work before <ide> advice on what would make the Pull Request acceptable, and do not assume that <ide> the contributor should already know how to do that. Be explicit in your <ide> feedback. <ide> <del>### Continuous Integration Testing <add>### Continuous integration testing <ide> <ide> All Pull Requests that contain changes to code must be run through <ide> continuous integration (CI) testing at [https://ci.nodejs.org/][]. <ide> whether the failure was caused by the changes in the Pull Request. <ide> <ide> ## Notes <ide> <del>### Commit Squashing <add>### Commit squashing <ide> <ide> In most cases, do not squash commits that you add to your Pull Request during <ide> the review process. When the commits in your Pull Request land, they may be <ide> can be a good example. It touches the implementation, the documentation, <ide> and the tests, but is still one logical change. All tests should always pass <ide> when each individual commit lands on the master branch. <ide> <del>### Getting Approvals for Your Pull Request <add>### Getting approvals for your pull request <ide> <ide> A Pull Request is approved either by saying LGTM, which stands for <ide> "Looks Good To Me", or by using GitHub's Approve button. <ide> After you push new changes to your branch, you need to get <ide> approval for these new changes again, even if GitHub shows "Approved" <ide> because the reviewers have hit the buttons before. <ide> <del>### CI Testing <add>### CI testing <ide> <ide> Every Pull Request needs to be tested <ide> to make sure that it works on the platforms that Node.js <ide> Only a Collaborator can start a CI run. Usually one of them will do it <ide> for you as approvals for the Pull Request come in. <ide> If not, you can ask a Collaborator to start a CI run. <ide> <del>### Waiting Until the Pull Request Gets Landed <add>### Waiting until the pull request gets landed <ide> <ide> A Pull Request needs to stay open for at least 48 hours from when it is <ide> submitted, even after it gets approved and passes the CI. This is to make sure <ide> collaborators may decide it doesn't need to wait. A Pull Request may well take <ide> longer to be merged in. All these precautions are important because Node.js is <ide> widely used, so don't be discouraged! <ide> <del>### Check Out the Collaborator Guide <add>### Check out the collaborator guide <ide> <ide> If you want to know more about the code review and the landing process, see the <ide> [Collaborator Guide][]. <ide> <del>### Appendix: Subsystems <add>### Appendix: subsystems <ide> <ide> * `lib/*.js` (`assert`, `buffer`, etc.) <ide> * `build`
1
Javascript
Javascript
add source location to debug logging info
2245206c4ce7b5c47ba67e677ca7e9127b8c2926
<ide><path>src/backend/renderer.js <ide> export function attach( <ide> owners, <ide> <ide> // Location of component in source coude. <del> source: _debugSource, <add> source: _debugSource || null, <ide> }; <ide> } <ide> <ide> export function attach( <ide> if (nativeNodes !== null) { <ide> console.log('Nodes:', nativeNodes); <ide> } <add> if (result.source !== null) { <add> console.log('Location:', result.source); <add> } <ide> if (window.chrome || /firefox/i.test(navigator.userAgent)) { <ide> console.log( <ide> 'Right-click any value to save it as a global variable for further inspection.' <ide><path>src/backend/types.js <ide> export type InspectedElement = {| <ide> owners: Array<Owner> | null, <ide> <ide> // Location of component in source coude. <del> source: Object | null, <add> source: Source | null, <ide> <ide> type: ElementType, <ide> |}; <ide><path>src/devtools/views/Components/types.js <ide> export type OwnersList = {| <ide> owners: Array<Owner> | null, <ide> |}; <ide> <add>export type Source = {| <add> fileName: string, <add> lineNumber: number, <add>|}; <add> <ide> export type InspectedElement = {| <ide> id: number, <ide> <ide> export type InspectedElement = {| <ide> owners: Array<Owner> | null, <ide> <ide> // Location of component in source coude. <del> source: Object | null, <add> source: Source | null, <ide> <ide> type: ElementType, <ide> |};
3
Ruby
Ruby
fix typo in connection base documentation
7ea5619e760edc0adc6eb0749113e129221f4a13
<ide><path>lib/action_cable/connection/base.rb <ide> def initialize(server, env) <ide> end <ide> <ide> # Called by the server when a new websocket connection is established. This configures the callbacks intended for overwriting by the user. <del> # This method should now be called directly. Rely on the #connect (and #disconnect) callback instead. <add> # This method should not be called directly. Rely on the #connect (and #disconnect) callback instead. <ide> def process <ide> logger.info started_request_message <ide>
1
Javascript
Javascript
check ecdsa psychic signature
8d0f49c09355647f711eb14ec3a22471dbd056b3
<ide><path>test/parallel/test-crypto-psychic-signatures.js <add>'use strict'; <add>const common = require('../common'); <add>if (!common.hasCrypto) <add> common.skip('missing crypto'); <add> <add>const assert = require('assert'); <add> <add>const crypto = require('crypto'); <add> <add>// Tests for CVE-2022-21449 <add>// https://neilmadden.blog/2022/04/19/psychic-signatures-in-java/ <add>// Dubbed "Psychic Signatures", these signatures bypassed the ECDSA signature <add>// verification implementation in Java in 15, 16, 17, and 18. OpenSSL is not <add>// (and was not) vulnerable so these are a precaution. <add> <add>const vectors = { <add> 'ieee-p1363': [ <add> Buffer.from('0000000000000000000000000000000000000000000000000000000000000000' + <add> '0000000000000000000000000000000000000000000000000000000000000000', 'hex'), <add> Buffer.from('ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551' + <add> 'ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551', 'hex'), <add> ], <add> 'der': [ <add> Buffer.from('3046022100' + <add> '0000000000000000000000000000000000000000000000000000000000000000' + <add> '022100' + <add> '0000000000000000000000000000000000000000000000000000000000000000', 'hex'), <add> Buffer.from('3046022100' + <add> 'ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551' + <add> '022100' + <add> 'ffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551', 'hex'), <add> ], <add>}; <add> <add>const keyPair = crypto.generateKeyPairSync('ec', { <add> namedCurve: 'P-256', <add> publicKeyEncoding: { <add> format: 'der', <add> type: 'spki' <add> }, <add>}); <add> <add>const data = Buffer.from('Hello!'); <add> <add>for (const [encoding, signatures] of Object.entries(vectors)) { <add> for (const signature of signatures) { <add> const key = { <add> key: keyPair.publicKey, <add> format: 'der', <add> type: 'spki', <add> dsaEncoding: encoding, <add> }; <add> <add> // one-shot sync <add> assert.strictEqual( <add> crypto.verify( <add> 'sha256', <add> data, <add> key, <add> signature, <add> ), <add> false, <add> ); <add> <add> // one-shot async <add> crypto.verify( <add> 'sha256', <add> data, <add> key, <add> signature, <add> common.mustSucceed((verified) => assert.strictEqual(verified, false)), <add> ); <add> <add> // stream <add> assert.strictEqual( <add> crypto.createVerify('sha256') <add> .update(data) <add> .verify(key, signature), <add> false, <add> ); <add> <add> // webcrypto <add> crypto.webcrypto.subtle.importKey( <add> 'spki', <add> keyPair.publicKey, <add> { name: 'ECDSA', namedCurve: 'P-256' }, <add> false, <add> ['verify'], <add> ).then((publicKey) => { <add> return crypto.webcrypto.subtle.verify( <add> { name: 'ECDSA', hash: 'SHA-256' }, <add> publicKey, <add> signature, <add> data, <add> ); <add> }).then(common.mustCall((verified) => { <add> assert.strictEqual(verified, false); <add> })); <add> } <add>}
1
PHP
PHP
move missingpluginexception under core/error
bdcf810b018d0c5cc2aa6c2d4ea7111db33d1a66
<add><path>src/Core/Error/MissingPluginException.php <del><path>src/Error/MissingPluginException.php <ide> * @since 3.0.0 <ide> * @license MIT License (http://www.opensource.org/licenses/mit-license.php) <ide> */ <del>namespace Cake\Error; <add>namespace Cake\Core\Error; <add> <add>use Cake\Error\Exception; <ide> <ide> /** <ide> * Exception raised when a plugin could not be found <ide><path>src/Core/Plugin.php <ide> namespace Cake\Core; <ide> <ide> use Cake\Core\ClassLoader; <del>use Cake\Error; <ide> use Cake\Utility\Inflector; <ide> <ide> /** <ide> class Plugin { <ide> * <ide> * @param string|array $plugin name of the plugin to be loaded in CamelCase format or array or plugins to load <ide> * @param array $config configuration options for the plugin <del> * @throws \Cake\Error\MissingPluginException if the folder for the plugin to be loaded is not found <add> * @throws \Cake\Core\Error\MissingPluginException if the folder for the plugin to be loaded is not found <ide> * @return void <ide> */ <ide> public static function load($plugin, array $config = []) { <ide> public static function loadAll(array $options = []) { <ide> * <ide> * @param string $plugin name of the plugin in CamelCase format <ide> * @return string path to the plugin folder <del> * @throws \Cake\Error\MissingPluginException if the folder for plugin was not found or plugin has not been loaded <add> * @throws \Cake\Core\Error\MissingPluginException if the folder for plugin was not found or plugin has not been loaded <ide> */ <ide> public static function path($plugin) { <ide> if (empty(static::$_plugins[$plugin])) { <ide><path>tests/TestCase/Core/PluginTest.php <ide> public function testIgnoreMissingFiles() { <ide> * Tests that Plugin::load() throws an exception on unknown plugin <ide> * <ide> * @return void <del> * @expectedException \Cake\Error\MissingPluginException <add> * @expectedException \Cake\Core\Error\MissingPluginException <ide> */ <ide> public function testLoadNotFound() { <ide> Plugin::load('MissingPlugin'); <ide> public function testPath() { <ide> * Tests that Plugin::path() throws an exception on unknown plugin <ide> * <ide> * @return void <del> * @expectedException \Cake\Error\MissingPluginException <add> * @expectedException \Cake\Core\Error\MissingPluginException <ide> */ <ide> public function testPathNotFound() { <ide> Plugin::path('TestPlugin');
3
Text
Text
add redux-history-transitions to ecosystem
34bcdd9fd01ad433daa51ddf919d320802a10990
<ide><path>docs/introduction/Ecosystem.md <ide> On this page we will only feature a few of them that the Redux maintainers have <ide> ## Store Enhancers <ide> <ide> * [redux-batched-subscribe](https://github.com/tappleby/redux-batched-subscribe) — Customize batching and debouncing calls to the store subscribers <add>* [redux-history-transitions](https://github.com/johanneslumpe/redux-history-transitions) — History transitions based on arbitrary actions <ide> <ide> ## Utilities <ide>
1
Python
Python
fix string interpolation in times
6025cdb992db9446d747ae0855a11e74a5e311c9
<ide><path>spacy/en/tokenizer_exceptions.py <ide> # Times <ide> <ide> for h in range(1, 12 + 1): <del> hour = str(h) <ide> for period in ["a.m.", "am"]: <del> _exc[hour+period] = [ <del> {ORTH: hour}, <add> _exc["%d%s" % (h, period)] = [ <add> {ORTH: "%d" % h}, <ide> {ORTH: period, LEMMA: "a.m."}] <ide> for period in ["p.m.", "pm"]: <del> _exc[hour+period] = [ <del> {ORTH: hour}, <add> _exc["%d%s" % (h, period)] = [ <add> {ORTH: "%d" % h}, <ide> {ORTH: period, LEMMA: "p.m."}] <ide> <ide> <ide><path>spacy/es/tokenizer_exceptions.py <ide> <ide> <ide> for h in range(1, 12 + 1): <del> hour = str(h) <ide> for period in ["a.m.", "am"]: <del> _exc[hour+period] = [ <del> {ORTH: hour}, <add> _exc["%d%s" % (h, period)] = [ <add> {ORTH: "%d" % h}, <ide> {ORTH: period, LEMMA: "a.m."}] <ide> for period in ["p.m.", "pm"]: <del> _exc[hour+period] = [ <del> {ORTH: hour}, <add> _exc["%d%s" % (h, period)] = [ <add> {ORTH: "%d" % h}, <ide> {ORTH: period, LEMMA: "p.m."}] <ide> <ide>
2
Python
Python
add import fr
dbe47902bcfc61bbb9ca35c86baa81a648aeaeb2
<ide><path>spacy/__init__.py <ide> from . import zh <ide> from . import es <ide> from . import it <add>from . import fr <ide> from . import pt <ide> <ide>
1
Java
Java
improve support for content-disposition types
6e640c806f90b1d116f8613c5aa7ddb7c5af8f39
<ide><path>spring-web/src/main/java/org/springframework/http/ContentDisposition.java <ide> public final class ContentDisposition { <ide> private static final String INVALID_HEADER_FIELD_PARAMETER_FORMAT = <ide> "Invalid header field parameter format (as defined in RFC 5987)"; <ide> <add> /** <add> * The {@literal attachment} content-disposition type. <add> */ <add> private static final String ATTACHMENT = "attachment"; <add> /** <add> * The {@literal form-data} content-disposition type. <add> */ <add> private static final String FORM_DATA = "form-data"; <add> /** <add> * The {@literal inline} content-disposition type. <add> */ <add> private static final String INLINE = "inline"; <ide> <ide> @Nullable <ide> private final String type; <ide> public ZonedDateTime getReadDate() { <ide> return this.readDate; <ide> } <ide> <add> /** <add> * Return whether the {@link #getType() type} is {@literal attachment}. <add> * @since 5.3 <add> */ <add> public boolean isAttachment() { <add> return ATTACHMENT.equals(this.type); <add> } <add> <add> /** <add> * Return whether the {@link #getType() type} is {@literal form-data}. <add> * @since 5.3 <add> */ <add> public boolean isFormData() { <add> return FORM_DATA.equals(this.type); <add> } <add> <add> /** <add> * Return whether the {@link #getType() type} is {@literal inline}. <add> * @since 5.3 <add> */ <add> public boolean isInline() { <add> return INLINE.equals(this.type); <add> } <ide> <ide> @Override <ide> public boolean equals(@Nullable Object other) { <ide> public static Builder builder(String type) { <ide> return new BuilderImpl(type); <ide> } <ide> <add> /** <add> * Return a builder for a {@code ContentDisposition} with <add> * the {@link #ATTACHMENT attachment} type. <add> * @return the builder <add> * @since 5.3 <add> */ <add> public static Builder attachment() { <add> return builder(ATTACHMENT); <add> } <add> <add> /** <add> * Return a builder for a {@code ContentDisposition} with <add> * the {@link #FORM_DATA form-data} type. <add> * @return the builder <add> * @since 5.3 <add> */ <add> public static Builder formData() { <add> return builder(FORM_DATA); <add> } <add> <add> /** <add> * Return a builder for a {@code ContentDisposition} with <add> * the {@link #INLINE inline} type. <add> * @return the builder <add> * @since 5.3 <add> */ <add> public static Builder inline() { <add> return builder(INLINE); <add> } <add> <ide> /** <ide> * Return an empty content disposition. <ide> */ <ide><path>spring-web/src/main/java/org/springframework/http/HttpHeaders.java <ide> public List<String> getConnection() { <ide> */ <ide> public void setContentDispositionFormData(String name, @Nullable String filename) { <ide> Assert.notNull(name, "Name must not be null"); <del> ContentDisposition.Builder disposition = ContentDisposition.builder("form-data").name(name); <add> ContentDisposition.Builder disposition = ContentDisposition.formData().name(name); <ide> if (StringUtils.hasText(filename)) { <ide> disposition.filename(filename); <ide> } <ide><path>spring-web/src/test/java/org/springframework/http/ContentDispositionTests.java <ide> <ide> import static org.assertj.core.api.Assertions.assertThat; <ide> import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; <del>import static org.springframework.http.ContentDisposition.builder; <add>import static org.springframework.http.ContentDisposition.attachment; <add>import static org.springframework.http.ContentDisposition.formData; <ide> <ide> /** <ide> * Unit tests for {@link ContentDisposition} <ide> class ContentDispositionTests { <ide> @SuppressWarnings("deprecation") <ide> void parse() { <ide> assertThat(parse("form-data; name=\"foo\"; filename=\"foo.txt\"; size=123")) <del> .isEqualTo(builder("form-data") <add> .isEqualTo(formData() <ide> .name("foo") <ide> .filename("foo.txt") <ide> .size(123L) <ide> void parse() { <ide> @Test <ide> void parseFilenameUnquoted() { <ide> assertThat(parse("form-data; filename=unquoted")) <del> .isEqualTo(builder("form-data") <add> .isEqualTo(formData() <ide> .filename("unquoted") <ide> .build()); <ide> } <ide> <ide> @Test // SPR-16091 <ide> void parseFilenameWithSemicolon() { <ide> assertThat(parse("attachment; filename=\"filename with ; semicolon.txt\"")) <del> .isEqualTo(builder("attachment") <add> .isEqualTo(attachment() <ide> .filename("filename with ; semicolon.txt") <ide> .build()); <ide> } <ide> <ide> @Test <ide> void parseEncodedFilename() { <ide> assertThat(parse("form-data; name=\"name\"; filename*=UTF-8''%E4%B8%AD%E6%96%87.txt")) <del> .isEqualTo(builder("form-data") <add> .isEqualTo(formData() <ide> .name("name") <ide> .filename("中文.txt", StandardCharsets.UTF_8) <ide> .build()); <ide> void parseEncodedFilename() { <ide> @Test // gh-24112 <ide> void parseEncodedFilenameWithPaddedCharset() { <ide> assertThat(parse("attachment; filename*= UTF-8''some-file.zip")) <del> .isEqualTo(builder("attachment") <add> .isEqualTo(attachment() <ide> .filename("some-file.zip", StandardCharsets.UTF_8) <ide> .build()); <ide> } <ide> <ide> @Test <ide> void parseEncodedFilenameWithoutCharset() { <ide> assertThat(parse("form-data; name=\"name\"; filename*=test.txt")) <del> .isEqualTo(builder("form-data") <add> .isEqualTo(formData() <ide> .name("name") <ide> .filename("test.txt") <ide> .build()); <ide> void parseWithEscapedQuote() { <ide> BiConsumer<String, String> tester = (description, filename) -> <ide> assertThat(parse("form-data; name=\"file\"; filename=\"" + filename + "\"; size=123")) <ide> .as(description) <del> .isEqualTo(builder("form-data").name("file").filename(filename).size(123L).build()); <add> .isEqualTo(formData().name("file").filename(filename).size(123L).build()); <ide> <ide> tester.accept("Escaped quotes should be ignored", <ide> "\\\"The Twilight Zone\\\".txt"); <ide> void parseWithEscapedQuote() { <ide> @SuppressWarnings("deprecation") <ide> void parseWithExtraSemicolons() { <ide> assertThat(parse("form-data; name=\"foo\";; ; filename=\"foo.txt\"; size=123")) <del> .isEqualTo(builder("form-data") <add> .isEqualTo(formData() <ide> .name("foo") <ide> .filename("foo.txt") <ide> .size(123L) <ide> void parseDates() { <ide> "creation-date=\"" + creationTime.format(formatter) + "\"; " + <ide> "modification-date=\"" + modificationTime.format(formatter) + "\"; " + <ide> "read-date=\"" + readTime.format(formatter) + "\"")).isEqualTo( <del> builder("attachment") <add> attachment() <ide> .creationDate(creationTime) <ide> .modificationDate(modificationTime) <ide> .readDate(readTime) <ide> void parseIgnoresInvalidDates() { <ide> "creation-date=\"-1\"; " + <ide> "modification-date=\"-1\"; " + <ide> "read-date=\"" + readTime.format(formatter) + "\"")).isEqualTo( <del> builder("attachment") <add> attachment() <ide> .readDate(readTime) <ide> .build()); <ide> } <ide> private static ContentDisposition parse(String input) { <ide> @SuppressWarnings("deprecation") <ide> void format() { <ide> assertThat( <del> builder("form-data") <add> formData() <ide> .name("foo") <ide> .filename("foo.txt") <ide> .size(123L) <ide> void format() { <ide> @Test <ide> void formatWithEncodedFilename() { <ide> assertThat( <del> builder("form-data") <add> formData() <ide> .name("name") <ide> .filename("中文.txt", StandardCharsets.UTF_8) <ide> .build().toString()) <ide> void formatWithEncodedFilename() { <ide> @Test <ide> void formatWithEncodedFilenameUsingUsAscii() { <ide> assertThat( <del> builder("form-data") <add> formData() <ide> .name("name") <ide> .filename("test.txt", StandardCharsets.US_ASCII) <ide> .build() <ide> void formatWithFilenameWithQuotes() { <ide> <ide> BiConsumer<String, String> tester = (input, output) -> { <ide> <del> assertThat(builder("form-data").filename(input).build().toString()) <add> assertThat(formData().filename(input).build().toString()) <ide> .isEqualTo("form-data; filename=\"" + output + "\""); <ide> <del> assertThat(builder("form-data").filename(input, StandardCharsets.US_ASCII).build().toString()) <add> assertThat(formData().filename(input, StandardCharsets.US_ASCII).build().toString()) <ide> .isEqualTo("form-data; filename=\"" + output + "\""); <ide> }; <ide> <ide> void formatWithFilenameWithQuotes() { <ide> @Test <ide> void formatWithEncodedFilenameUsingInvalidCharset() { <ide> assertThatIllegalArgumentException().isThrownBy(() -> <del> builder("form-data") <add> formData() <ide> .name("name") <ide> .filename("test.txt", StandardCharsets.UTF_16) <ide> .build() <ide><path>spring-web/src/test/java/org/springframework/http/HttpHeadersTests.java <ide> void contentDisposition() { <ide> assertThat(disposition).isNotNull(); <ide> assertThat(headers.getContentDisposition()).as("Invalid Content-Disposition header").isEqualTo(ContentDisposition.empty()); <ide> <del> disposition = ContentDisposition.builder("attachment").name("foo").filename("foo.txt").size(123L).build(); <add> disposition = ContentDisposition.attachment().name("foo").filename("foo.txt").size(123L).build(); <ide> headers.setContentDisposition(disposition); <ide> assertThat(headers.getContentDisposition()).as("Invalid Content-Disposition header").isEqualTo(disposition); <ide> } <ide><path>spring-web/src/test/java/org/springframework/http/codec/ResourceHttpMessageReaderTests.java <ide> void readResourceAsMono() throws IOException { <ide> String body = "Test resource content"; <ide> <ide> ContentDisposition contentDisposition = <del> ContentDisposition.builder("attachment").name("file").filename(filename).build(); <add> ContentDisposition.attachment().name("file").filename(filename).build(); <ide> <ide> MockClientHttpResponse response = new MockClientHttpResponse(HttpStatus.OK); <ide> response.getHeaders().setContentType(MediaType.TEXT_PLAIN); <ide><path>spring-web/src/test/java/org/springframework/http/converter/ResourceHttpMessageConverterTests.java <ide> public void shouldReadImageResource() throws IOException { <ide> MockHttpInputMessage inputMessage = new MockHttpInputMessage(body); <ide> inputMessage.getHeaders().setContentType(MediaType.IMAGE_JPEG); <ide> inputMessage.getHeaders().setContentDisposition( <del> ContentDisposition.builder("attachment").filename("yourlogo.jpg").build()); <add> ContentDisposition.attachment().filename("yourlogo.jpg").build()); <ide> Resource actualResource = converter.read(Resource.class, inputMessage); <ide> assertThat(FileCopyUtils.copyToByteArray(actualResource.getInputStream())).isEqualTo(body); <ide> assertThat(actualResource.getFilename()).isEqualTo("yourlogo.jpg"); <ide> public void shouldReadInputStreamResource() throws IOException { <ide> MockHttpInputMessage inputMessage = new MockHttpInputMessage(body); <ide> inputMessage.getHeaders().setContentType(MediaType.IMAGE_JPEG); <ide> inputMessage.getHeaders().setContentDisposition( <del> ContentDisposition.builder("attachment").filename("yourlogo.jpg").build()); <add> ContentDisposition.attachment().filename("yourlogo.jpg").build()); <ide> inputMessage.getHeaders().setContentLength(123); <ide> Resource actualResource = converter.read(InputStreamResource.class, inputMessage); <ide> assertThat(actualResource).isInstanceOf(InputStreamResource.class);
6
Ruby
Ruby
add on_macos/on_linux to softwarespec
16270ba185e889cfb92265301b34061a123692d4
<ide><path>Library/Homebrew/software_spec.rb <ide> require "compilers" <ide> require "global" <ide> require "os/mac/version" <add>require "extend/on_os" <ide> <ide> class SoftwareSpec <ide> extend T::Sig <ide> <ide> extend Forwardable <add> include OnOS <ide> <ide> PREDEFINED_OPTIONS = { <ide> universal: Option.new("universal", "Build a universal binary"),
1
Python
Python
use @admin decorator
4ac0fae75b3e9d2a4c011a87310c910762c84cb1
<ide><path>rest_framework/authtoken/admin.py <ide> from rest_framework.authtoken.models import Token <ide> <ide> <add>@admin.register(Token) <ide> class TokenAdmin(admin.ModelAdmin): <ide> list_display = ('key', 'user', 'created') <ide> fields = ('user',) <ide> ordering = ('-created',) <del> <del> <del>admin.site.register(Token, TokenAdmin)
1
Python
Python
add shape partial inference to theano reshape
00cbeecf6c345b5e4964c4d212ed9463126a0492
<ide><path>keras/backend/theano_backend.py <ide> def to_dense(tensor): <ide> return tensor <ide> <ide> <add>def is_explicit_shape(shape): <add> if hasattr(shape, '__iter__'): <add> for x in shape: <add> if x is not None: <add> if not isinstance(x, int): <add> return False <add> return True <add> return False <add> <add> <add> <ide> def variable(value, dtype=None, name=None): <ide> '''Instantiates a variable and returns it. <ide> <ide> def moving_average_update(variable, value, momentum): <ide> <ide> <ide> def dot(x, y): <add> # TODO: `keras_shape` inference. <ide> if is_sparse(x): <ide> return th_sparse_module.basic.structured_dot(x, y) <ide> else: <ide> def batch_dot(x, y, axes=None): <ide> <ide> output_shape = (100, 30) <ide> ''' <add> # TODO: `keras_shape` inference. <ide> if isinstance(axes, int): <ide> axes = (axes, axes) <ide> if axes is None: <ide> def batch_dot(x, y, axes=None): <ide> <ide> <ide> def transpose(x): <add> # TODO: `keras_shape` inference. <ide> return T.transpose(x) <ide> <ide> <ide> def gather(reference, indices): <ide> <ide> Return: a tensor of same type as reference. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> return reference[indices] <ide> <ide> <ide> def concatenate(tensors, axis=-1): <ide> <ide> <ide> def reshape(x, shape): <del> return T.reshape(x, shape) <add> y = T.reshape(x, shape) <add> if is_explicit_shape(shape): <add> y._keras_shape = shape <add> return y <ide> <ide> <ide> def permute_dimensions(x, pattern): <ide> def permute_dimensions(x, pattern): <ide> pattern should be a tuple or list of <ide> dimension indices, e.g. [0, 2, 1]. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> pattern = tuple(pattern) <ide> return x.dimshuffle(pattern) <ide> <ide> def repeat_elements(x, rep, axis): <ide> If x has shape (s1, s2, s3) and axis=1, the output <ide> will have shape (s1, s2 * rep, s3). <ide> ''' <add> # TODO: `keras_shape` inference. <ide> return T.repeat(x, rep, axis=axis) <ide> <ide> <ide> def resize_images(X, height_factor, width_factor, dim_ordering): <ide> by a factor of (height_factor, width_factor). Both factors should be <ide> positive integers. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> if dim_ordering == 'th': <ide> output = repeat_elements(X, height_factor, axis=2) <ide> output = repeat_elements(output, width_factor, axis=3) <ide> def resize_volumes(X, depth_factor, height_factor, width_factor, dim_ordering): <ide> by a factor of (depth_factor, height_factor, width_factor). <ide> Both factors should be positive integers. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> if dim_ordering == 'th': <ide> output = repeat_elements(X, depth_factor, axis=2) <ide> output = repeat_elements(output, height_factor, axis=3) <ide> def repeat(x, n): <ide> If x has shape (samples, dim) and n=2, <ide> the output will have shape (samples, 2, dim). <ide> ''' <add> # TODO: `keras_shape` inference. <ide> assert x.ndim == 2 <ide> x = x.dimshuffle((0, 'x', 1)) <ide> return T.extra_ops.repeat(x, n, axis=1) <ide> def arange(start, stop=None, step=1, dtype='int32'): <ide> <ide> <ide> def tile(x, n): <add> # TODO: `keras_shape` inference. <ide> return T.tile(x, n) <ide> <ide> <ide> def flatten(x): <add> # TODO: `keras_shape` inference. <ide> return T.flatten(x) <ide> <ide> <ide> def batch_flatten(x): <ide> '''Turn a n-D tensor into a 2D tensor where <ide> the first dimension is conserved. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> x = T.reshape(x, (x.shape[0], T.prod(x.shape) // x.shape[0])) <ide> return x <ide> <ide> <ide> def expand_dims(x, dim=-1): <ide> '''Add a 1-sized dimension at index "dim". <ide> ''' <add> # TODO: `keras_shape` inference. <ide> pattern = [i for i in range(x.type.ndim)] <ide> if dim < 0: <ide> if x.type.ndim == 0: <ide> def expand_dims(x, dim=-1): <ide> def squeeze(x, axis): <ide> '''Remove a 1-dimension from the tensor at index "axis". <ide> ''' <add> # TODO: `keras_shape` inference. <ide> shape = list(x.shape) <ide> shape.pop(axis) <ide> return T.reshape(x, tuple(shape)) <ide> def temporal_padding(x, padding=1): <ide> Apologies for the inane API, but Theano makes this <ide> really hard. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> input_shape = x.shape <ide> output_shape = (input_shape[0], <ide> input_shape[1] + 2 * padding, <ide> def asymmetric_temporal_padding(x, left_pad=1, right_pad=1): <ide> Apologies for the inane API, but Theano makes this <ide> really hard. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> input_shape = x.shape <ide> output_shape = (input_shape[0], <ide> input_shape[1] + left_pad + right_pad, <ide> def spatial_2d_padding(x, padding=(1, 1), dim_ordering='default'): <ide> '''Pad the 2nd and 3rd dimensions of a 4D tensor <ide> with "padding[0]" and "padding[1]" (resp.) zeros left and right. <ide> ''' <add> # TODO: `keras_shape` inference. <ide> if dim_ordering == 'default': <ide> dim_ordering = image_dim_ordering() <ide> if dim_ordering not in {'th', 'tf'}:
1
Ruby
Ruby
add fast path for cvs dep
bb5e0812fb387f2b2a920c9a89e9750fe28943f8
<ide><path>Library/Homebrew/dependency_collector.rb <ide> def resource_dep(spec, tags) <ide> when strategy <= BazaarDownloadStrategy <ide> Dependency.new("bazaar", tags) <ide> when strategy <= CVSDownloadStrategy <del> Dependency.new("cvs", tags) unless MacOS::Xcode.provides_cvs? <add> Dependency.new("cvs", tags) if MacOS.version >= :mavericks || !MacOS::Xcode.provides_cvs? <ide> when strategy < AbstractDownloadStrategy <ide> # allow unknown strategies to pass through <ide> else
1
PHP
PHP
prevent command double calling
ac42a58f42dbbd25afa1eceb32db8c3334c04bfe
<ide><path>src/Illuminate/Foundation/Testing/PendingCommand.php <ide> class PendingCommand <ide> */ <ide> protected $expectedExitCode; <ide> <add> /** <add> * Determine if command was called. <add> * <add> * @var bool <add> */ <add> private $isCalled = false; <add> <ide> /** <ide> * Create a new pending console command run. <ide> * <ide> public function assertExitCode($exitCode) <ide> */ <ide> public function callNow() <ide> { <add> $this->isCalled = true; <add> <ide> return $this->app[Kernel::class]->call($this->command, $this->parameters); <ide> } <ide> <ide> private function createABufferedOutputMock() <ide> */ <ide> public function __destruct() <ide> { <add> if ($this->isCalled) { <add> return; <add> } <add> <ide> $this->mockConsoleOutput(); <ide> <ide> try {
1
Python
Python
allow the same config in the auto mapping
10c15d2d1e0eb1e0ec41de2e863c15a6bc7579ef
<ide><path>src/transformers/models/auto/configuration_auto.py <ide> def __getitem__(self, key): <ide> module_name = model_type_to_module_name(key) <ide> if module_name not in self._modules: <ide> self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models") <del> return getattr(self._modules[module_name], value) <add> if hasattr(self._modules[module_name], value): <add> return getattr(self._modules[module_name], value) <add> <add> # Some of the mappings have entries model_type -> config of another model type. In that case we try to grab the <add> # object at the top level. <add> transformers_module = importlib.import_module("transformers") <add> return getattr(transformers_module, value) <ide> <ide> def keys(self): <ide> return list(self._mapping.keys()) + list(self._extra_content.keys())
1
Python
Python
remove deprecation warnings
dff3c1f90af9053128e6e241e1b5f19d858670f4
<ide><path>tools/refguide_check.py <ide> <ide> if parse_version(sphinx.__version__) >= parse_version('1.5'): <ide> # Enable specific Sphinx directives <del> from sphinx.directives import SeeAlso, Only <add> from sphinx.directives.other import SeeAlso, Only <ide> directives.register_directive('seealso', SeeAlso) <ide> directives.register_directive('only', Only) <ide> else: <ide> def check_output(self, want, got, optionflags): <ide> # and then compare the tuples. <ide> try: <ide> num = len(a_want) <del> regex = ('[\w\d_]+\(' + <del> ', '.join(['[\w\d_]+=(.+)']*num) + <del> '\)') <add> regex = (r'[\w\d_]+\(' + <add> ', '.join([r'[\w\d_]+=(.+)']*num) + <add> r'\)') <ide> grp = re.findall(regex, got.replace('\n', ' ')) <ide> if len(grp) > 1: # no more than one for now <ide> return False
1
Java
Java
fix notification task timeout crashing
3580de541db600d39d4182b59a536c69477e46d3
<ide><path>ReactAndroid/src/main/java/com/facebook/react/jstasks/HeadlessJsTaskContext.java <ide> public synchronized int startTask(final HeadlessJsTaskConfig taskConfig) { <ide> " while in foreground, but this is not allowed."); <ide> } <ide> final int taskId = mLastTaskId.incrementAndGet(); <add> mActiveTasks.add(taskId); <ide> reactContext.getJSModule(AppRegistry.class) <ide> .startHeadlessTask(taskId, taskConfig.getTaskKey(), taskConfig.getData()); <ide> if (taskConfig.getTimeout() > 0) { <ide> scheduleTaskTimeout(taskId, taskConfig.getTimeout()); <ide> } <del> mActiveTasks.add(taskId); <ide> for (HeadlessJsTaskEventListener listener : mHeadlessJsTaskEventListeners) { <ide> listener.onHeadlessJsTaskStart(taskId); <ide> } <ide> public synchronized int startTask(final HeadlessJsTaskConfig taskConfig) { <ide> <ide> /** <ide> * Finish a JS task. Doesn't actually stop the task on the JS side, only removes it from the list <del> * of active tasks and notifies listeners. <add> * of active tasks and notifies listeners. A task can only be finished once. <ide> * <ide> * @param taskId the unique id returned by {@link #startTask}. <ide> */ <ide> public void run() { <ide> }); <ide> } <ide> <add> /** <add> * Check if a given task is currently running. A task is stopped if either {@link #finishTask} is <add> * called or it times out. <add> */ <add> public synchronized boolean isTaskRunning(final int taskId) { <add> return mActiveTasks.contains(taskId); <add> } <add> <ide> private void scheduleTaskTimeout(final int taskId, long timeout) { <ide> Runnable runnable = new Runnable() { <ide> @Override <ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/core/HeadlessJsTaskSupportModule.java <ide> <ide> package com.facebook.react.modules.core; <ide> <add>import com.facebook.common.logging.FLog; <ide> import com.facebook.react.bridge.ReactApplicationContext; <ide> import com.facebook.react.bridge.ReactContextBaseJavaModule; <ide> import com.facebook.react.bridge.ReactMethod; <ide> public String getName() { <ide> public void notifyTaskFinished(int taskId) { <ide> HeadlessJsTaskContext headlessJsTaskContext = <ide> HeadlessJsTaskContext.getInstance(getReactApplicationContext()); <del> headlessJsTaskContext.finishTask(taskId); <add> if (headlessJsTaskContext.isTaskRunning(taskId)) { <add> headlessJsTaskContext.finishTask(taskId); <add> } else { <add> FLog.w( <add> HeadlessJsTaskSupportModule.class, <add> "Tried to finish non-active task with id %d. Did it time out?", <add> taskId); <add> } <ide> } <ide> }
2
Python
Python
fix task adoption in ``kubernetesexecutor``
344e8296d7e8a9af61edd0aae4f7784ecf33e5a2
<ide><path>airflow/executors/kubernetes_executor.py <ide> def _change_state(self, key: TaskInstanceKey, state: Optional[str], pod_id: str, <ide> self.event_buffer[key] = state, None <ide> <ide> def try_adopt_task_instances(self, tis: List[TaskInstance]) -> List[TaskInstance]: <del> tis_to_flush = [ti for ti in tis if not ti.external_executor_id] <del> scheduler_job_ids = [ti.external_executor_id for ti in tis] <add> tis_to_flush = [ti for ti in tis if not ti.queued_by_job_id] <add> scheduler_job_ids = {ti.queued_by_job_id for ti in tis} <ide> pod_ids = { <ide> create_pod_id( <ide> dag_id=pod_generator.make_safe_label_value(ti.dag_id), <ide> task_id=pod_generator.make_safe_label_value(ti.task_id), <ide> ): ti <ide> for ti in tis <del> if ti.external_executor_id <add> if ti.queued_by_job_id <ide> } <ide> kube_client: client.CoreV1Api = self.kube_client <ide> for scheduler_job_id in scheduler_job_ids: <ide><path>tests/executors/test_kubernetes_executor.py <ide> def test_change_state_failed_pod_deletion( <ide> assert executor.event_buffer[key][0] == State.FAILED <ide> mock_delete_pod.assert_called_once_with('pod_id', 'test-namespace') <ide> <add> @mock.patch('airflow.executors.kubernetes_executor.KubernetesExecutor.adopt_launched_task') <add> @mock.patch('airflow.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods') <add> def test_try_adopt_task_instances(self, mock_adopt_completed_pods, mock_adopt_launched_task): <add> executor = self.kubernetes_executor <add> executor.scheduler_job_id = "10" <add> mock_ti = mock.MagicMock(queued_by_job_id="1", external_executor_id="1", dag_id="dag", task_id="task") <add> pod = k8s.V1Pod(metadata=k8s.V1ObjectMeta(name="foo", labels={"dag_id": "dag", "task_id": "task"})) <add> pod_id = create_pod_id(dag_id="dag", task_id="task") <add> mock_kube_client = mock.MagicMock() <add> mock_kube_client.list_namespaced_pod.return_value.items = [pod] <add> executor.kube_client = mock_kube_client <add> <add> # First adoption <add> executor.try_adopt_task_instances([mock_ti]) <add> mock_kube_client.list_namespaced_pod.assert_called_once_with( <add> namespace='default', label_selector='airflow-worker=1' <add> ) <add> mock_adopt_launched_task.assert_called_once_with(mock_kube_client, pod, {pod_id: mock_ti}) <add> mock_adopt_completed_pods.assert_called_once() <add> # We aren't checking the return value of `try_adopt_task_instances` because it relies on <add> # `adopt_launched_task` mutating its arg. This should be refactored, but not right now. <add> <add> # Second adoption (queued_by_job_id and external_executor_id no longer match) <add> mock_kube_client.reset_mock() <add> mock_adopt_launched_task.reset_mock() <add> mock_adopt_completed_pods.reset_mock() <add> <add> mock_ti.queued_by_job_id = "10" # scheduler_job would have updated this after the first adoption <add> executor.scheduler_job_id = "20" <add> <add> executor.try_adopt_task_instances([mock_ti]) <add> mock_kube_client.list_namespaced_pod.assert_called_once_with( <add> namespace='default', label_selector='airflow-worker=10' <add> ) <add> mock_adopt_launched_task.assert_called_once_with(mock_kube_client, pod, {pod_id: mock_ti}) <add> mock_adopt_completed_pods.assert_called_once() <add> <add> @mock.patch('airflow.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods') <add> def test_try_adopt_task_instances_multiple_scheduler_ids(self, mock_adopt_completed_pods): <add> """We try to find pods only once per scheduler id""" <add> executor = self.kubernetes_executor <add> mock_kube_client = mock.MagicMock() <add> executor.kube_client = mock_kube_client <add> <add> mock_tis = [ <add> mock.MagicMock(queued_by_job_id="10", external_executor_id="1", dag_id="dag", task_id="task"), <add> mock.MagicMock(queued_by_job_id="40", external_executor_id="1", dag_id="dag", task_id="task2"), <add> mock.MagicMock(queued_by_job_id="40", external_executor_id="1", dag_id="dag", task_id="task3"), <add> ] <add> <add> executor.try_adopt_task_instances(mock_tis) <add> assert mock_kube_client.list_namespaced_pod.call_count == 2 <add> mock_kube_client.list_namespaced_pod.assert_has_calls( <add> [ <add> mock.call(namespace='default', label_selector='airflow-worker=10'), <add> mock.call(namespace='default', label_selector='airflow-worker=40'), <add> ], <add> any_order=True, <add> ) <add> <add> @mock.patch('airflow.executors.kubernetes_executor.KubernetesExecutor.adopt_launched_task') <add> @mock.patch('airflow.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods') <add> def test_try_adopt_task_instances_no_matching_pods( <add> self, mock_adopt_completed_pods, mock_adopt_launched_task <add> ): <add> executor = self.kubernetes_executor <add> mock_ti = mock.MagicMock(queued_by_job_id="1", external_executor_id="1", dag_id="dag", task_id="task") <add> mock_kube_client = mock.MagicMock() <add> mock_kube_client.list_namespaced_pod.return_value.items = [] <add> executor.kube_client = mock_kube_client <add> <add> tis_to_flush = executor.try_adopt_task_instances([mock_ti]) <add> assert tis_to_flush == [mock_ti] <add> mock_adopt_launched_task.assert_not_called() <add> mock_adopt_completed_pods.assert_called_once() <add> <ide> @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') <ide> def test_adopt_launched_task(self, mock_kube_client): <ide> executor = self.kubernetes_executor
2
Javascript
Javascript
import only jquery instead of ember in ember-views
72b16349ed083fd5e40735823dbf7e5d95007166
<ide><path>packages/ember-views/lib/views/states/in_buffer.js <ide> import _default from "ember-views/views/states/default"; <ide> import EmberError from "ember-metal/error"; <ide> <del>import Ember from "ember-metal/core"; // Ember.assert <add>import jQuery from "ember-views/system/jquery"; <ide> import { create } from "ember-metal/platform"; <ide> import merge from "ember-metal/merge"; <ide> <ide> merge(inBuffer, { <ide> // rerender the view to allow the render method to reflect the <ide> // changes. <ide> view.rerender(); <del> return Ember.$(); <add> return jQuery(); <ide> }, <ide> <ide> // when a view is rendered in a buffer, rerendering it simply
1
Ruby
Ruby
improve empty installation detection
6826cd68da17560e8440b880c05741a5820358a6
<ide><path>Library/Homebrew/formula_installer.rb <ide> require "cleaner" <ide> require "formula_cellar_checks" <ide> require "install_renamed" <add>require "cmd/audit" <ide> require "cmd/postinstall" <ide> require "hooks/bottles" <ide> require "debrew" <ide> def build <ide> end <ide> end <ide> <del> raise "Empty installation" if Dir["#{formula.prefix}/*"].empty? <add> auditor = FormulaAuditor.new(formula) <add> auditor.audit_prefix_has_contents <add> unless formula.prefix.exist? && auditor.problems.empty? <add> raise "Empty installation" <add> end <ide> <ide> rescue Exception <ide> ignore_interrupts do
1
PHP
PHP
fix other test checking error message
c7042b1fb6bc803d1b8f2ad6e0e56bd554fd8f80
<ide><path>tests/TestCase/Controller/ComponentTest.php <ide> public function testMultipleComponentInitialize() <ide> public function testDuplicateComponentInitialize() <ide> { <ide> $this->expectException(\RuntimeException::class); <del> $this->expectExceptionMessage('The "Banana" alias has already been loaded with the following config:'); <add> $this->expectExceptionMessage('The "Banana" alias has already been loaded. The `property` key'); <ide> $Collection = new ComponentRegistry(); <ide> $Collection->load('Banana', ['property' => ['closure' => function () { <ide> }]]);
1
Javascript
Javascript
improve error handling and reporting
7343b2a89cacce56d36292f109d8cb65df6a2570
<ide><path>lib/container/ContainerEntryModule.js <ide> class ContainerEntryModule extends Module { <ide> "? moduleMap[module]()", <ide> `: Promise.resolve().then(${runtimeTemplate.basicFunction( <ide> "", <del> 'throw new Error("Module " + module + " does not exist in container.");' <add> "throw new Error('Module \"' + module + '\" does not exist in container.');" <ide> )})` <ide> ]) <ide> ]), <ide><path>lib/container/RemoteRuntimeModule.js <ide> class RemoteRuntimeModule extends RuntimeModule { <ide> } <ide> } <ide> return Template.asString([ <add> "var installedModules = {};", <ide> `var chunkMapping = ${JSON.stringify( <ide> chunkToRemotesMapping, <ide> null, <ide> class RemoteRuntimeModule extends RuntimeModule { <ide> `if(${RuntimeGlobals.hasOwnProperty}(chunkMapping, chunkId)) {`, <ide> Template.indent([ <ide> `chunkMapping[chunkId].forEach(${runtimeTemplate.basicFunction("id", [ <del> "if(__webpack_modules__[id]) return;", <add> `if(${RuntimeGlobals.hasOwnProperty}(installedModules, id)) return installedModules[id] && promises.push(installedModules[id]);`, <ide> "var data = idToExternalAndNameMapping[id];", <del> `promises.push(Promise.resolve(__webpack_require__(data[0])(__webpack_require__(data[1])).get(data[2])).then(${runtimeTemplate.basicFunction( <del> "factory", <del> [ <del> `__webpack_modules__[id] = ${runtimeTemplate.basicFunction( <del> "module", <del> ["module.exports = factory();"] <del> )}` <del> ] <del> )}))` <add> `var onError = ${runtimeTemplate.basicFunction("error", [ <add> "if(error && typeof error.message === \"string\") error.message += '\\nwhile loading \"' + data[2] + '\" from ' + data[1];", <add> `__webpack_modules__[id] = ${runtimeTemplate.basicFunction("", [ <add> "throw error;" <add> ])}`, <add> "delete installedModules[id];" <add> ])};`, <add> `var onFactory = ${runtimeTemplate.basicFunction("factory", [ <add> `__webpack_modules__[id] = ${runtimeTemplate.basicFunction( <add> "module", <add> ["module.exports = factory();"] <add> )}` <add> ])};`, <add> "try {", <add> Template.indent([ <add> "var promise = __webpack_require__(data[0])(__webpack_require__(data[1])).get(data[2]);", <add> "if(promise && promise.then) {", <add> Template.indent([ <add> `promises.push(installedModules[id] = promise.then(onFactory, onError));` <add> ]), <add> "} else {", <add> Template.indent([`onFactory(promise);`]), <add> "}" <add> ]), <add> "} catch(error) {", <add> Template.indent(["onError(error);"]), <add> "}" <ide> ])});` <ide> ]), <ide> "}" <ide><path>test/configCases/container/error-handling/evaluation-error-cjs.js <add>export let error; <add>try { <add> require("remote/module"); <add>} catch (err) { <add> error = err; <add>} <ide><path>test/configCases/container/error-handling/evaluation-error-tl-await.js <add>export let error; <add>try { <add> await import("remote/module"); <add>} catch (err) { <add> error = err; <add>} <ide><path>test/configCases/container/error-handling/evaluation-error.js <add>import "remote/module"; <ide><path>test/configCases/container/error-handling/index.js <add>"use strict"; <add> <add>it("should allow to handle remote loading error with import()", async () => { <add> await expect(import("./loading-error")).rejects.toEqual( <add> expect.objectContaining({ <add> code: "ENOENT" <add> }) <add> ); <add>}); <add> <add>it("should allow to handle remote loading error with require", async () => { <add> const { error } = await import("./loading-error-cjs"); <add> expect(error).toEqual( <add> expect.objectContaining({ <add> code: "ENOENT" <add> }) <add> ); <add>}); <add> <add>it("should allow to handle remote loading error with top-level-await import()", async () => { <add> const { error } = await import("./loading-error-tl-await"); <add> expect(error).toEqual( <add> expect.objectContaining({ <add> code: "ENOENT" <add> }) <add> ); <add>}); <add> <add>it("should allow to handle invalid remote module error with import()", async () => { <add> await expect(import("./invalid-module")).rejects.toEqual( <add> expect.objectContaining({ <add> message: <add> 'Module "invalid" does not exist in container.\nwhile loading "invalid" from webpack/container/reference/remote' <add> }) <add> ); <add>}); <add> <add>it("should allow to handle invalid remote module error with require", async () => { <add> const { error } = await import("./invalid-module-cjs"); <add> expect(error).toEqual( <add> expect.objectContaining({ <add> message: <add> 'Module "invalid" does not exist in container.\nwhile loading "invalid" from webpack/container/reference/remote' <add> }) <add> ); <add>}); <add> <add>it("should allow to handle invalid remote module error with top-level-await import()", async () => { <add> const { error } = await import("./invalid-module-tl-await"); <add> expect(error).toEqual( <add> expect.objectContaining({ <add> message: <add> 'Module "invalid" does not exist in container.\nwhile loading "invalid" from webpack/container/reference/remote' <add> }) <add> ); <add>}); <add> <add>it("should allow to handle remote module evaluation error with import()", async () => { <add> await expect(import("./evaluation-error")).rejects.toEqual( <add> expect.objectContaining({ <add> message: "evaluation error" <add> }) <add> ); <add>}); <add> <add>it("should allow to handle remote module evaluation error with require", async () => { <add> const { error } = await import("./evaluation-error-cjs"); <add> expect(error).toEqual( <add> expect.objectContaining({ <add> message: "evaluation error" <add> }) <add> ); <add>}); <add> <add>it("should allow to handle remote module evaluation error with top-level-await import()", async () => { <add> const { error } = await import("./evaluation-error-tl-await"); <add> expect(error).toEqual( <add> expect.objectContaining({ <add> message: "evaluation error" <add> }) <add> ); <add>}); <ide><path>test/configCases/container/error-handling/invalid-module-cjs.js <add>export let error; <add>try { <add> require("remote/invalid"); <add>} catch (err) { <add> error = err; <add>} <ide><path>test/configCases/container/error-handling/invalid-module-tl-await.js <add>export let error; <add>try { <add> await import("remote/invalid"); <add>} catch (err) { <add> error = err; <add>} <ide><path>test/configCases/container/error-handling/invalid-module.js <add>import "remote/invalid"; <ide><path>test/configCases/container/error-handling/loading-error-cjs.js <add>export let error; <add>try { <add> require("invalid/module"); <add>} catch (err) { <add> error = err; <add>} <ide><path>test/configCases/container/error-handling/loading-error-tl-await.js <add>export let error; <add>try { <add> await import("invalid/module"); <add>} catch (err) { <add> error = err; <add>} <ide><path>test/configCases/container/error-handling/loading-error.js <add>import "invalid/module"; <ide><path>test/configCases/container/error-handling/module.js <add>throw new Error("evaluation error"); <ide><path>test/configCases/container/error-handling/webpack.config.js <add>const { ModuleFederationPlugin } = require("../../../../").container; <add> <add>/** @type {import("../../../../").Configuration} */ <add>module.exports = { <add> optimization: { <add> chunkIds: "named", <add> moduleIds: "named" <add> }, <add> output: { <add> strictModuleExceptionHandling: true <add> }, <add> plugins: [ <add> new ModuleFederationPlugin({ <add> name: "container", <add> library: { type: "commonjs-module" }, <add> filename: "container.js", <add> exposes: ["./module"], <add> remotes: { <add> remote: "./container.js", <add> invalid: "./invalid.js" <add> } <add> }) <add> ], <add> experiments: { <add> topLevelAwait: true <add> } <add>};
14
Javascript
Javascript
add stream map tests
ce41395f89414dfd459084ea61a7eeac1f67713a
<ide><path>test/parallel/test-stream-map.js <ide> const { <ide> Readable, <ide> } = require('stream'); <ide> const assert = require('assert'); <add>const { once } = require('events'); <ide> const { setTimeout } = require('timers/promises'); <ide> <ide> { <ide> // Map works on synchronous streams with a synchronous mapper <ide> const stream = Readable.from([1, 2, 3, 4, 5]).map((x) => x + x); <del> const result = [2, 4, 6, 8, 10]; <ide> (async () => { <del> for await (const item of stream) { <del> assert.strictEqual(item, result.shift()); <del> } <add> assert.deepStrictEqual(await stream.toArray(), [2, 4, 6, 8, 10]); <ide> })().then(common.mustCall()); <ide> } <ide> <ide> const { setTimeout } = require('timers/promises'); <ide> await Promise.resolve(); <ide> return x + x; <ide> }); <del> const result = [2, 4, 6, 8, 10]; <add> (async () => { <add> assert.deepStrictEqual(await stream.toArray(), [2, 4, 6, 8, 10]); <add> })().then(common.mustCall()); <add>} <add> <add>{ <add> // Map works on asynchronous streams with a asynchronous mapper <add> const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { <add> return x + x; <add> }).map((x) => x + x); <add> (async () => { <add> assert.deepStrictEqual(await stream.toArray(), [4, 8, 12, 16, 20]); <add> })().then(common.mustCall()); <add>} <add> <add>{ <add> // Map works on an infinite stream <add> const stream = Readable.from(async function* () { <add> while (true) yield 1; <add> }()).map(common.mustCall(async (x) => { <add> return x + x; <add> }, 5)); <add> (async () => { <add> let i = 1; <add> for await (const item of stream) { <add> assert.strictEqual(item, 2); <add> if (++i === 5) break; <add> } <add> })().then(common.mustCall()); <add>} <add> <add>{ <add> // Map works on non-objectMode streams <add> const stream = new Readable({ <add> read() { <add> this.push(Uint8Array.from([1])); <add> this.push(Uint8Array.from([2])); <add> this.push(null); <add> } <add> }).map(async ([x]) => { <add> return x + x; <add> }).map((x) => x + x); <add> const result = [4, 8]; <ide> (async () => { <ide> for await (const item of stream) { <ide> assert.strictEqual(item, result.shift()); <ide> const { setTimeout } = require('timers/promises'); <ide> } <ide> <ide> { <del> // Map works on asynchronous streams with a asynchronous mapper <del> const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { <add> // Does not care about data events <add> const source = new Readable({ <add> read() { <add> this.push(Uint8Array.from([1])); <add> this.push(Uint8Array.from([2])); <add> this.push(null); <add> } <add> }); <add> setImmediate(() => stream.emit('data', Uint8Array.from([1]))); <add> const stream = source.map(async ([x]) => { <ide> return x + x; <ide> }).map((x) => x + x); <del> const result = [4, 8, 12, 16, 20]; <add> const result = [4, 8]; <ide> (async () => { <ide> for await (const item of stream) { <ide> assert.strictEqual(item, result.shift()); <ide> } <ide> })().then(common.mustCall()); <ide> } <ide> <add>{ <add> // Emitting an error during `map` <add> const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { <add> if (x === 3) { <add> stream.emit('error', new Error('boom')); <add> } <add> return x + x; <add> }); <add> assert.rejects( <add> stream.map((x) => x + x).toArray(), <add> /boom/, <add> ).then(common.mustCall()); <add>} <add> <add>{ <add> // Throwing an error during `map` (sync) <add> const stream = Readable.from([1, 2, 3, 4, 5]).map((x) => { <add> if (x === 3) { <add> throw new Error('boom'); <add> } <add> return x + x; <add> }); <add> assert.rejects( <add> stream.map((x) => x + x).toArray(), <add> /boom/, <add> ).then(common.mustCall()); <add>} <add> <add> <add>{ <add> // Throwing an error during `map` (async) <add> const stream = Readable.from([1, 2, 3, 4, 5]).map(async (x) => { <add> if (x === 3) { <add> throw new Error('boom'); <add> } <add> return x + x; <add> }); <add> assert.rejects( <add> stream.map((x) => x + x).toArray(), <add> /boom/, <add> ).then(common.mustCall()); <add>} <add> <ide> { <ide> // Concurrency + AbortSignal <ide> const ac = new AbortController(); <del> let calls = 0; <del> const stream = Readable.from([1, 2, 3, 4, 5]).map(async (_, { signal }) => { <del> calls++; <del> await setTimeout(100, { signal }); <del> }, { signal: ac.signal, concurrency: 2 }); <add> const range = Readable.from([1, 2, 3, 4, 5]); <add> const stream = range.map(common.mustCall(async (_, { signal }) => { <add> await once(signal, 'abort'); <add> throw signal.reason; <add> }, 2), { signal: ac.signal, concurrency: 2 }); <ide> // pump <ide> assert.rejects(async () => { <ide> for await (const item of stream) { <del> // nope <del> console.log(item); <add> assert.fail('should not reach here, got ' + item); <ide> } <ide> }, { <ide> name: 'AbortError', <ide> }).then(common.mustCall()); <ide> <ide> setImmediate(() => { <ide> ac.abort(); <del> assert.strictEqual(calls, 2); <ide> }); <ide> } <ide>
1
Ruby
Ruby
restrict frameworks to load in engine test
6adc3da727a8b658784fad9d92927f8a53236793
<ide><path>railties/test/railties/engine_test.rb <ide> def up <ide> end <ide> RUBY <ide> <add> restrict_frameworks <ide> boot_rails <ide> <ide> Dir.chdir(app_path) do <del> # Install Active Storage, Action Mailbox, and Action Text migration files first so as not to affect test. <del> `bundle exec rake active_storage:install action_mailbox:install action_text:install` <ide> output = `bundle exec rake bukkits:install:migrations` <ide> <ide> ["CreateUsers", "AddLastNameToUsers", "CreateSessions"].each do |migration_name| <ide> class CreateUsers < ActiveRecord::Migration::Current; end <ide> class CreateKeys < ActiveRecord::Migration::Current; end <ide> RUBY <ide> <add> restrict_frameworks <ide> boot_rails <ide> <ide> Dir.chdir(app_path) do <del> # Install Active Storage, Action Mailbox, and Action Text migrations first so as not to affect test. <del> `bundle exec rake active_storage:install action_mailbox:install action_text:install` <ide> output = `bundle exec rake railties:install:migrations`.split("\n") <ide> <ide> assert_match(/Copied migration \d+_create_users\.core_engine\.rb from core_engine/, output.first) <ide> def index <ide> def app <ide> Rails.application <ide> end <add> <add> # Restrict frameworks to load in order to avoid engine frameworks affect tests. <add> def restrict_frameworks <add> remove_from_config("require 'rails/all'") <add> remove_from_config("require_relative 'boot'") <add> remove_from_env_config("development", "config.active_storage.*") <add> frameworks = <<~RUBY <add> require "rails" <add> require "active_model/railtie" <add> require "active_job/railtie" <add> require "active_record/railtie" <add> require "action_controller/railtie" <add> require "action_mailer/railtie" <add> require "action_view/railtie" <add> require "sprockets/railtie" <add> require "rails/test_unit/railtie" <add> RUBY <add> environment = File.read("#{app_path}/config/application.rb") <add> File.open("#{app_path}/config/application.rb", "w") { |f| f.puts frameworks + "\n" + environment } <add> end <ide> end <ide> end
1
Ruby
Ruby
remove our use of #outside_transaction?
61951427903dbc0d92f6106ec5874025e2185056
<ide><path>activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb <ide> def delete(arel, name = nil, binds = []) <ide> exec_delete(to_sql(arel, binds), name, binds) <ide> end <ide> <del> # Checks whether there is currently no transaction active. This is done <del> # by querying the database driver, and does not use the transaction <del> # house-keeping information recorded by #increment_open_transactions and <del> # friends. <del> # <del> # Returns true if there is no transaction active, false if there is a <del> # transaction active, and nil if this information is unknown. <del> # <del> # Not all adapters supports transaction state introspection. Currently, <del> # only the PostgreSQL adapter supports this. <del> def outside_transaction? <del> nil <del> end <del> <ide> # Returns +true+ when the connection adapter supports prepared statement <ide> # caching, otherwise returns +false+ <ide> def supports_statement_cache? <ide> def transaction(options = {}) <ide> options.assert_valid_keys :requires_new, :joinable <ide> <ide> if !options[:requires_new] && current_transaction.joinable? <del> within_existing_transaction { yield } <add> yield <ide> else <ide> within_new_transaction(options) { yield } <ide> end <ide> def within_new_transaction(options = {}) #:nodoc: <ide> begin_transaction(options) <ide> yield <ide> rescue Exception => error <del> rollback_transaction unless outside_transaction? <add> rollback_transaction <ide> raise <ide> ensure <del> if outside_transaction? <del> reset_transaction <del> else <del> begin <del> commit_transaction unless error <del> rescue Exception => e <del> rollback_transaction <del> raise <del> end <add> begin <add> commit_transaction unless error <add> rescue Exception => e <add> rollback_transaction <add> raise <ide> end <ide> end <ide> <del> def within_existing_transaction #:nodoc: <del> yield <del> ensure <del> reset_transaction if outside_transaction? <del> end <del> <ide> def current_transaction #:nodoc: <ide> @transaction <ide> end <ide> def rollback_transaction #:nodoc: <ide> @transaction = @transaction.rollback <ide> end <ide> <del> def reset_transaction <add> def reset_transaction #:nodoc: <ide> @transaction = ClosedTransaction.new(self) <ide> end <ide> <ide><path>activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb <add>require 'active_support/deprecation' <add> <ide> module ActiveRecord <ide> module ConnectionAdapters <ide> class PostgreSQLAdapter < AbstractAdapter <ide> def rollback_db_transaction <ide> end <ide> <ide> def outside_transaction? <add> ActiveSupport::Deprecation.warn( <add> "#outside_transaction? is deprecated. This method was only really used " \ <add> "internally, but you can use #transaction_open? instead." <add> ) <ide> @connection.transaction_status == PGconn::PQTRANS_IDLE <ide> end <ide> <ide><path>activerecord/test/cases/transactions_test.rb <ide> def test_many_savepoints <ide> def test_rollback_when_commit_raises <ide> Topic.connection.expects(:begin_db_transaction) <ide> Topic.connection.expects(:commit_db_transaction).raises('OH NOES') <del> Topic.connection.expects(:outside_transaction?).returns(false) <ide> Topic.connection.expects(:rollback_db_transaction) <ide> <ide> assert_raise RuntimeError do <ide> def test_restore_active_record_state_for_all_records_in_a_transaction <ide> <ide> if current_adapter?(:PostgreSQLAdapter) && defined?(PGconn::PQTRANS_IDLE) <ide> def test_outside_transaction_works <del> assert Topic.connection.outside_transaction? <add> assert assert_deprecated { Topic.connection.outside_transaction? } <ide> Topic.connection.begin_db_transaction <del> assert !Topic.connection.outside_transaction? <add> assert assert_deprecated { !Topic.connection.outside_transaction? } <ide> Topic.connection.rollback_db_transaction <del> assert Topic.connection.outside_transaction? <del> end <del> <del> def test_rollback_wont_be_executed_if_no_transaction_active <del> assert_raise RuntimeError do <del> Topic.transaction do <del> Topic.connection.rollback_db_transaction <del> Topic.connection.expects(:rollback_db_transaction).never <del> raise "Rails doesn't scale!" <del> end <del> end <del> end <del> <del> def test_open_transactions_count_is_reset_to_zero_if_no_transaction_active <del> Topic.transaction do <del> Topic.transaction do <del> Topic.connection.rollback_db_transaction <del> end <del> assert_equal 0, Topic.connection.open_transactions <del> end <del> assert_equal 0, Topic.connection.open_transactions <del> <del> Topic.transaction do <del> Topic.connection.rollback_db_transaction <del> end <del> assert_equal 0, Topic.connection.open_transactions <add> assert assert_deprecated { Topic.connection.outside_transaction? } <ide> end <ide> end <ide>
3
Python
Python
apply `imports` fixer
09a52ed47bb26498c97a579ce1147861df696d84
<ide><path>doc/cdoc/numpyfilter.py <ide> import os <ide> import textwrap <ide> import optparse <del>import cPickle as pickle <add> <add>if sys.version_info[0] >= 3: <add> import pickle <add>else: <add> import cPickle as pickle. <ide> <ide> CACHE_FILE = 'build/rst-cache.pck' <ide> <ide><path>doc/numpybook/runcode.py <ide> <ide> import sys <ide> import optparse <del>import cStringIO <add>import io <ide> import re <ide> import os <ide> <ide> def getoutput(tstr, dic): <ide> print "\n\nRunning..." <ide> print tstr, <del> tempstr = cStringIO.StringIO() <add> tempstr = io.StringIO() <ide> sys.stdout = tempstr <ide> code = compile(tstr, '<input>', 'exec') <ide> try: <ide> def getnewcodestr(substr, dic): <ide> <ide> def runpycode(lyxstr, name='MyCode'): <ide> schobj = re.compile(r"\\layout %s\s+>>> " % name) <del> outstr = cStringIO.StringIO() <add> outstr = io.StringIO() <ide> num = 0 <ide> indx = [] <ide> for it in schobj.finditer(lyxstr): <ide><path>doc/sphinxext/numpydoc/comment_eater.py <ide> if sys.version_info[0] >= 3: <ide> from io import StringIO <ide> else: <del> from cStringIO import StringIO <add> from io import StringIO <ide> <ide> import compiler <ide> import inspect <ide><path>doc/sphinxext/numpydoc/compiler_unparse.py <ide> if sys.version_info[0] >= 3: <ide> from io import StringIO <ide> else: <del> from cStringIO import StringIO <add> from io import StringIO <ide> <ide> def unparse(ast, single_line_functions=False): <ide> s = StringIO() <ide><path>doc/sphinxext/numpydoc/docscrape.py <ide> if sys.version_info[0] >= 3: <ide> from io import StringIO <ide> else: <del> from cStringIO import StringIO <add> from io import StringIO <ide> <ide> class Reader(object): <ide> """A line-based string reader. <ide><path>doc/sphinxext/numpydoc/plot_directive.py <ide> if sys.version_info[0] >= 3: <ide> from io import StringIO <ide> else: <del> from cStringIO import StringIO <add> from io import StringIO <ide> <ide> import warnings <ide> warnings.warn("A plot_directive module is also available under " <ide><path>numpy/__init__.py <ide> """ <ide> from __future__ import division, absolute_import <ide> <add>import sys <add> <ide> # We first need to detect if we're being called as part of the numpy setup <ide> # procedure itself in a reliable manner. <ide> try: <ide> def pkgload(*packages, **options): <ide> <ide> # Make these accessible from numpy name-space <ide> # but not imported in from numpy import * <del> from __builtin__ import bool, int, long, float, complex, \ <del> object, unicode, str <add> if sys.version_info[0] >= 3: <add> from builtins import bool, int, long, float, complex, object, unicode, str <add> else: <add> from __builtin__ import bool, int, long, float, complex, object, unicode, str <add> <ide> from .core import round, abs, max, min <ide> <ide> __all__.extend(['__version__', 'pkgload', 'PackageLoader', <ide><path>numpy/core/__init__.py <ide> def _ufunc_reduce(func): <ide> <ide> <ide> import sys <del>if sys.version_info[0] < 3: <del> import copy_reg as copyreg <del>else: <add>if sys.version_info[0] >= 3: <ide> import copyreg <add>else: <add> import copy_reg as copyreg <ide> <ide> copyreg.pickle(ufunc, _ufunc_reduce, _ufunc_reconstruct) <ide> # Unclutter namespace (must keep _ufunc_reconstruct for unpickling) <ide><path>numpy/core/numeric.py <ide> from __future__ import division, absolute_import <ide> <add>import sys <add>import warnings <add>from . import multiarray <add>from . import umath <add>from .umath import * <add>from . import numerictypes <add>from .numerictypes import * <add>import collections <add> <add>if sys.version_info[0] >= 3: <add> import pickle <add>else: <add> import cPickle as pickle <add> <add>loads = pickle.loads <add> <add> <ide> __all__ = ['newaxis', 'ndarray', 'flatiter', 'nditer', 'nested_iters', 'ufunc', <ide> 'arange', 'array', 'zeros', 'count_nonzero', <ide> 'empty', 'broadcast', 'dtype', 'fromstring', 'fromfile', <ide> 'CLIP', 'RAISE', 'WRAP', 'MAXDIMS', 'BUFSIZE', 'ALLOW_THREADS', <ide> 'ComplexWarning'] <ide> <del>import sys <del>import warnings <del>from . import multiarray <del>from . import umath <del>from .umath import * <del>from . import numerictypes <del>from .numerictypes import * <del>import collections <del> <del> <ide> if sys.version_info[0] < 3: <ide> __all__.extend(['getbuffer', 'newbuffer']) <ide> <add> <ide> class ComplexWarning(RuntimeWarning): <ide> """ <ide> The warning raised when casting a complex dtype to a real dtype. <ide> def base_repr(number, base=2, padding=0): <ide> res.append('-') <ide> return ''.join(reversed(res or '0')) <ide> <del>from cPickle import load, loads <del>_cload = load <del>_file = open <ide> <ide> def load(file): <ide> """ <ide> def load(file): <ide> <ide> """ <ide> if isinstance(file, type("")): <del> file = _file(file,"rb") <del> return _cload(file) <add> file = open(file, "rb") <add> return pickle.load(file) <ide> <ide> # These are all essentially abbreviations <ide> # These might wind up in a special abbreviations module <ide><path>numpy/core/numerictypes.py <ide> <ide> # we don't export these for import *, but we do want them accessible <ide> # as numerictypes.bool, etc. <del>from __builtin__ import bool, int, long, float, complex, object, unicode, str <add>if sys.version_info[0] >= 3: <add> from builtins import bool, int, long, float, complex, object, unicode, str <add>else: <add> from __builtin__ import bool, int, long, float, complex, object, unicode, str <add> <ide> from numpy.compat import bytes <ide> <ide> if sys.version_info[0] >= 3: <ide><path>numpy/core/records.py <ide> def fromrecords(recList, dtype=None, shape=None, formats=None, names=None, <ide> >>> r.col2 <ide> chararray(['dbe', 'de'], <ide> dtype='|S3') <del> >>> import cPickle <del> >>> print cPickle.loads(cPickle.dumps(r)) <add> >>> import pickle <add> >>> print pickle.loads(pickle.dumps(r)) <ide> [(456, 'dbe', 1.2) (2, 'de', 1.3)] <ide> """ <ide> <ide><path>numpy/core/setup.py <ide> import os <ide> import sys <ide> import shutil <add>import pickle <add>import copy <add>import warnings <add>import re <ide> from os.path import join <ide> from numpy.distutils import log <ide> from distutils.dep_util import newer <ide> from distutils.sysconfig import get_config_var <del>import warnings <del>import re <ide> <ide> from setup_common import * <ide> <ide> # configuration informations between extensions is not easy. <ide> # Using a pickled-based memoize does not work because config_cmd is an instance <ide> # method, which cPickle does not like. <del>try: <del> import cPickle as _pik <del>except ImportError: <del> import pickle as _pik <del>import copy <add># <add># Use pickle in all cases, as cPickle is gone in python3 and the difference <add># in time is only in build. -- Charles Harris, 2013-03-30 <ide> <ide> class CallOnceOnly(object): <ide> def __init__(self): <ide> def __init__(self): <ide> def check_types(self, *a, **kw): <ide> if self._check_types is None: <ide> out = check_types(*a, **kw) <del> self._check_types = _pik.dumps(out) <add> self._check_types = pickle.dumps(out) <ide> else: <del> out = copy.deepcopy(_pik.loads(self._check_types)) <add> out = copy.deepcopy(pickle.loads(self._check_types)) <ide> return out <ide> <ide> def check_ieee_macros(self, *a, **kw): <ide> if self._check_ieee_macros is None: <ide> out = check_ieee_macros(*a, **kw) <del> self._check_ieee_macros = _pik.dumps(out) <add> self._check_ieee_macros = pickle.dumps(out) <ide> else: <del> out = copy.deepcopy(_pik.loads(self._check_ieee_macros)) <add> out = copy.deepcopy(pickle.loads(self._check_ieee_macros)) <ide> return out <ide> <ide> def check_complex(self, *a, **kw): <ide> if self._check_complex is None: <ide> out = check_complex(*a, **kw) <del> self._check_complex = _pik.dumps(out) <add> self._check_complex = pickle.dumps(out) <ide> else: <del> out = copy.deepcopy(_pik.loads(self._check_complex)) <add> out = copy.deepcopy(pickle.loads(self._check_complex)) <ide> return out <ide> <ide> PYTHON_HAS_UNICODE_WIDE = True <ide><path>numpy/core/tests/test_print.py <ide> <ide> import locale <ide> import sys <del>from StringIO import StringIO <add> <add>if sys.version_info[0] >= 3: <add> from io import StringIO <add>else: <add> from StringIO import StringIO <ide> <ide> _REF = {np.inf: 'inf', -np.inf: '-inf', np.nan: 'nan'} <ide> <ide><path>numpy/core/tests/test_regression.py <ide> import copy <ide> import warnings <ide> import tempfile <del>from StringIO import StringIO <ide> from os import path <add>from io import BytesIO <add> <ide> import numpy as np <ide> from numpy.testing import ( <ide> run_module_suite, TestCase, assert_, assert_equal, <ide> from numpy.testing.utils import _assert_valid_refcount, WarningManager <ide> from numpy.compat import asbytes, asunicode, asbytes_nested <ide> <del>if sys.version_info[0] >= 3: <del> import io <del> StringIO = io.BytesIO <del> <ide> rlevel = 1 <ide> <ide> class TestRegression(TestCase): <ide> def test_mem_empty(self,level=rlevel): <ide> def test_pickle_transposed(self,level=rlevel): <ide> """Ticket #16""" <ide> a = np.transpose(np.array([[2,9],[7,0],[3,8]])) <del> f = StringIO() <add> f = BytesIO() <ide> pickle.dump(a,f) <ide> f.seek(0) <ide> b = pickle.load(f) <ide> def test_negative_nd_indexing(self,level=rlevel): <ide> <ide> def test_char_dump(self,level=rlevel): <ide> """Ticket #50""" <del> f = StringIO() <add> f = BytesIO() <ide> ca = np.char.array(np.arange(1000,1010),itemsize=4) <ide> ca.dump(f) <ide> f.seek(0) <ide> def assign(a, b, c): <ide> def test_unpickle_dtype_with_object(self,level=rlevel): <ide> """Implemented in r2840""" <ide> dt = np.dtype([('x',int),('y',np.object_),('z','O')]) <del> f = StringIO() <add> f = BytesIO() <ide> pickle.dump(dt,f) <ide> f.seek(0) <ide> dt_ = pickle.load(f) <ide> def test_lexsort(self,level=rlevel): <ide> <ide> def test_pickle_dtype(self,level=rlevel): <ide> """Ticket #251""" <del> import pickle <ide> pickle.dumps(np.float) <ide> <ide> def test_swap_real(self, level=rlevel): <ide> def rs(): <ide> <ide> def test_unicode_scalar(self, level=rlevel): <ide> """Ticket #600""" <del> import cPickle <ide> x = np.array(["DROND", "DROND1"], dtype="U6") <ide> el = x[1] <del> new = cPickle.loads(cPickle.dumps(el)) <add> new = pickle.loads(pickle.dumps(el)) <ide> assert_equal(new, el) <ide> <ide> def test_arange_non_native_dtype(self, level=rlevel): <ide><path>numpy/distutils/cpuinfo.py <ide> <ide> import sys, re, types <ide> import os <del>if sys.version_info[0] < 3: <del> from commands import getstatusoutput <del>else: <add> <add>if sys.version_info[0] >= 3: <ide> from subprocess import getstatusoutput <add>else: <add> from commands import getstatusoutput <add> <ide> import warnings <ide> import platform <ide> <ide> def __init__(self): <ide> info = [] <ide> try: <ide> #XXX: Bad style to use so long `try:...except:...`. Fix it! <del> import _winreg <add> if sys.version_info[0] >= 3: <add> import winreg <add> else: <add> import _winreg as winreg <add> <ide> prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"\ <ide> "\s+stepping\s+(?P<STP>\d+)",re.IGNORECASE) <del> chnd=_winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, self.pkey) <add> chnd=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, self.pkey) <ide> pnum=0 <ide> while 1: <ide> try: <del> proc=_winreg.EnumKey(chnd,pnum) <del> except _winreg.error: <add> proc=winreg.EnumKey(chnd,pnum) <add> except winreg.error: <ide> break <ide> else: <ide> pnum+=1 <ide> info.append({"Processor":proc}) <del> phnd=_winreg.OpenKey(chnd,proc) <add> phnd=winreg.OpenKey(chnd,proc) <ide> pidx=0 <ide> while True: <ide> try: <del> name,value,vtpe=_winreg.EnumValue(phnd,pidx) <del> except _winreg.error: <add> name,value,vtpe=winreg.EnumValue(phnd,pidx) <add> except winreg.error: <ide> break <ide> else: <ide> pidx=pidx+1 <ide><path>numpy/distutils/mingw32ccompiler.py <ide> from __future__ import division, absolute_import <ide> <ide> import os <del>import subprocess <ide> import sys <ide> import subprocess <ide> import re <ide><path>numpy/distutils/misc_util.py <ide> def get_info(pkgname, dirs=None): <ide> return info <ide> <ide> def is_bootstrapping(): <del> import __builtin__ <add> if sys.version_info[0] >= 3: <add> import builtins <add> else: <add> import __builtin__ as builtins <add> <ide> try: <del> __builtin__.__NUMPY_SETUP__ <add> builtins.__NUMPY_SETUP__ <ide> return True <ide> except AttributeError: <ide> return False <ide><path>numpy/distutils/npy_pkg_config.py <ide> from __future__ import division, absolute_import <ide> <ide> import sys <add>import re <add>import os <add>import shlex <add> <ide> if sys.version_info[0] < 3: <ide> from ConfigParser import SafeConfigParser, NoOptionError <ide> else: <ide> from configparser import ConfigParser, SafeConfigParser, NoOptionError <del>import re <del>import os <del>import shlex <ide> <ide> __all__ = ['FormatError', 'PkgNotFound', 'LibraryInfo', 'VariableSet', <ide> 'read_config', 'parse_flags'] <ide><path>numpy/distutils/system_info.py <ide> import copy <ide> import warnings <ide> from glob import glob <add> <ide> if sys.version_info[0] < 3: <ide> from ConfigParser import NoOptionError, ConfigParser <ide> else: <ide><path>numpy/distutils/tests/test_exec_command.py <ide> <ide> import os <ide> import sys <del>import StringIO <ide> from tempfile import TemporaryFile <ide> <ide> from numpy.distutils import exec_command <ide> <add># In python 3 stdout, stderr are text (unicode compliant) devices, so to <add># emulate them import StringIO from the io module. <add>if sys.version_info[0] >= 3: <add> from io import StringIO <add>else: <add> from StringIO import StringIO <ide> <ide> class redirect_stdout(object): <ide> """Context manager to redirect stdout for exec_command test.""" <ide> def test_exec_command_stdout(): <ide> # both that the special case works and that the generic code works. <ide> <ide> # Test posix version: <del> with redirect_stdout(StringIO.StringIO()): <add> with redirect_stdout(StringIO()): <ide> with redirect_stderr(TemporaryFile()): <ide> exec_command.exec_command("cd '.'") <ide> <ide> if os.name == 'posix': <ide> # Test general (non-posix) version: <ide> with emulate_nonposix(): <del> with redirect_stdout(StringIO.StringIO()): <add> with redirect_stdout(StringIO()): <ide> with redirect_stderr(TemporaryFile()): <ide> exec_command.exec_command("cd '.'") <ide> <ide> def test_exec_command_stderr(): <ide> # Test posix version: <ide> with redirect_stdout(TemporaryFile(mode='w+')): <del> with redirect_stderr(StringIO.StringIO()): <add> with redirect_stderr(StringIO()): <ide> exec_command.exec_command("cd '.'") <ide> <ide> if os.name == 'posix': <ide> # Test general (non-posix) version: <ide> with emulate_nonposix(): <ide> with redirect_stdout(TemporaryFile()): <del> with redirect_stderr(StringIO.StringIO()): <add> with redirect_stderr(StringIO()): <ide> exec_command.exec_command("cd '.'") <ide><path>numpy/f2py/__init__.py <ide> <ide> import os <ide> import sys <del>import commands <add>import subprocess <ide> <ide> from . import f2py2e <ide> from . import f2py_testing <ide><path>numpy/f2py/doc/collectinput.py <ide> import sys <ide> import fileinput <ide> import re <del>import commands <add> <add>if sys.version_info[0] >= 3: <add> from subprocess import getoutput <add>else: <add> from commands import getoutput <ide> <ide> try: fn=sys.argv[2] <ide> except: <ide> elif flag==1: <ide> sys.stderr.write(fn+'\n') <ide> print '%%%%% Begin of '+fn <del> print commands.getoutput(sys.argv[0]+' < '+fn) <add> print getoutput(sys.argv[0]+' < '+fn) <ide> print '%%%%% End of '+fn <ide> else: <ide> sys.stderr.write('Could not extract a file name from: '+l) <ide><path>numpy/lib/_datasource.py <ide> __docformat__ = "restructuredtext en" <ide> <ide> import os <add>import sys <ide> from shutil import rmtree, copyfile, copyfileobj <ide> <ide> _open = open <ide> def _isurl(self, path): <ide> """Test if path is a net location. Tests the scheme and netloc.""" <ide> <ide> # We do this here to reduce the 'import numpy' initial import time. <del> from urlparse import urlparse <add> if sys.version_info[0] >= 3: <add> from urllib.parse import urlparse <add> else: <add> from urlparse import urlparse <ide> <ide> # BUG : URLs require a scheme string ('http://') to be used. <ide> # www.google.com will fail. <ide> def abspath(self, path): <ide> <ide> """ <ide> # We do this here to reduce the 'import numpy' initial import time. <del> from urlparse import urlparse <del> <add> if sys.version_info[0] >= 3: <add> from urllib.parse import urlparse <add> else: <add> from urlparse import urlparse <ide> <ide> # TODO: This should be more robust. Handles case where path includes <ide> # the destpath, but not other sub-paths. Failing case: <ide><path>numpy/lib/_iotools.py <ide> import sys <ide> import numpy as np <ide> import numpy.core.numeric as nx <del>from __builtin__ import bool, int, long, float, complex, object, unicode, str <add> <add>if sys.version_info[0] >= 3: <add> from builtins import bool, int, long, float, complex, object, unicode, str <add>else: <add> from __builtin__ import bool, int, long, float, complex, object, unicode, str <ide> <ide> from numpy.compat import asbytes, bytes, asbytes_nested <ide> <ide><path>numpy/lib/format.py <ide> """ <ide> from __future__ import division, absolute_import <ide> <del>import cPickle <del> <ide> import numpy <ide> import sys <ide> from numpy.lib.utils import safe_eval <ide> from numpy.compat import asbytes, isfileobj <ide> <add>if sys.version_info[0] >= 3: <add> import pickle <add>else: <add> import cPickle as pickle <add> <ide> MAGIC_PREFIX = asbytes('\x93NUMPY') <ide> MAGIC_LEN = len(MAGIC_PREFIX) + 2 <ide> <ide> def write_array(fp, array, version=(1,0)): <ide> if array.dtype.hasobject: <ide> # We contain Python objects so we cannot write out the data directly. <ide> # Instead, we will pickle it out with version 2 of the pickle protocol. <del> cPickle.dump(array, fp, protocol=2) <add> pickle.dump(array, fp, protocol=2) <ide> elif array.flags.f_contiguous and not array.flags.c_contiguous: <ide> if isfileobj(fp): <ide> array.T.tofile(fp) <ide> def read_array(fp): <ide> # Now read the actual data. <ide> if dtype.hasobject: <ide> # The array contained Python objects. We need to unpickle the data. <del> array = cPickle.load(fp) <add> array = pickle.load(fp) <ide> else: <ide> if isfileobj(fp): <ide> # We can use the fast fromfile() function. <ide><path>numpy/lib/npyio.py <ide> from __future__ import division, absolute_import <ide> <del>__all__ = ['savetxt', 'loadtxt', 'genfromtxt', 'ndfromtxt', 'mafromtxt', <del> 'recfromtxt', 'recfromcsv', 'load', 'loads', 'save', 'savez', <del> 'savez_compressed', 'packbits', 'unpackbits', 'fromregex', 'DataSource'] <del> <ide> import numpy as np <ide> from . import format <ide> import sys <ide> import weakref <ide> from operator import itemgetter <ide> <del>from cPickle import load as _cload, loads <ide> from ._datasource import DataSource <ide> from ._compiled_base import packbits, unpackbits <ide> <ide> easy_dtype, _bytes_to_name <ide> <ide> from numpy.compat import asbytes, asstr, asbytes_nested, bytes <add>from io import BytesIO <ide> <ide> if sys.version_info[0] >= 3: <del> from io import BytesIO <add> import pickle <ide> else: <del> from cStringIO import StringIO as BytesIO <add> import cPickle as pickle <add> <add>loads = pickle.loads <add> <add>__all__ = ['savetxt', 'loadtxt', 'genfromtxt', 'ndfromtxt', 'mafromtxt', <add> 'recfromtxt', 'recfromcsv', 'load', 'loads', 'save', 'savez', <add> 'savez_compressed', 'packbits', 'unpackbits', 'fromregex', 'DataSource'] <ide> <ide> _string_like = _is_string_like <ide> <ide> def load(file, mmap_mode=None): <ide> return format.read_array(fid) <ide> else: # Try a pickle <ide> try: <del> return _cload(fid) <add> return pickle.load(fid) <ide> except: <ide> raise IOError( <ide> "Failed to interpret file %s as a pickle" % repr(file)) <ide><path>numpy/lib/tests/test__datasource.py <ide> from __future__ import division, absolute_import <ide> <ide> import os <add>import urllib2 <add>import sys <add>import numpy.lib._datasource as datasource <ide> from tempfile import mkdtemp, mkstemp, NamedTemporaryFile <ide> from shutil import rmtree <del>from urlparse import urlparse <ide> from urllib2 import URLError <del>import urllib2 <del> <add>from numpy.compat import asbytes <ide> from numpy.testing import * <ide> <del>from numpy.compat import asbytes <ide> <del>import numpy.lib._datasource as datasource <add>if sys.version_info[0] >= 3: <add> from urllib.parse import urlparse <add>else: <add> from urlparse import urlparse <ide> <ide> def urlopen_stub(url, data=None): <ide> '''Stub to replace urlopen for testing.''' <ide><path>numpy/lib/tests/test__iotools.py <ide> from __future__ import division, absolute_import <ide> <ide> import sys <del> <del>if sys.version_info[0] >= 3: <del> from io import BytesIO <del> def StringIO(s=""): <del> return BytesIO(asbytes(s)) <del>else: <del> from StringIO import StringIO <del> <del>from datetime import date <ide> import time <add>from datetime import date <ide> <ide> import numpy as np <ide> from numpy.lib._iotools import LineSplitter, NameValidator, StringConverter, \ <ide> has_nested_fields, easy_dtype, flatten_dtype <ide> from numpy.testing import * <del> <ide> from numpy.compat import asbytes, asbytes_nested <ide> <ide> class TestLineSplitter(TestCase): <ide><path>numpy/lib/tests/test_format.py <ide> Set up: <ide> <ide> >>> import sys <del> >>> if sys.version_info[0] >= 3: <del> ... from io import BytesIO as StringIO <del> ... else: <del> ... from cStringIO import StringIO <add> >>> from io import BytesIO <ide> >>> from numpy.lib import format <ide> >>> <ide> >>> scalars = [ <ide> <ide> Test the magic string reading. <ide> <del> >>> format.read_magic(StringIO(format.magic(1, 0))) <add> >>> format.read_magic(BytesIO(format.magic(1, 0))) <ide> (1, 0) <del> >>> format.read_magic(StringIO(format.magic(0, 0))) <add> >>> format.read_magic(BytesIO(format.magic(0, 0))) <ide> (0, 0) <del> >>> format.read_magic(StringIO(format.magic(255, 255))) <add> >>> format.read_magic(BytesIO(format.magic(255, 255))) <ide> (255, 255) <del> >>> format.read_magic(StringIO(format.magic(2, 5))) <add> >>> format.read_magic(BytesIO(format.magic(2, 5))) <ide> (2, 5) <ide> <ide> Test the header writing. <ide> <ide> >>> for arr in basic_arrays + record_arrays: <del> ... f = StringIO() <add> ... f = BytesIO() <ide> ... format.write_array_header_1_0(f, arr) # XXX: arr is not a dict, items gets called on it <ide> ... print repr(f.getvalue()) <ide> ... <ide> "\x16\x02{'descr': [('x', '>i4', (2,)),\n ('Info',\n [('value', '>c16'),\n ('y2', '>f8'),\n ('Info2',\n [('name', '|S2'),\n ('value', '>c16', (2,)),\n ('y3', '>f8', (2,)),\n ('z3', '>u4', (2,))]),\n ('name', '|S2'),\n ('z2', '|b1')]),\n ('color', '|S2'),\n ('info', [('Name', '>U8'), ('Value', '>c16')]),\n ('y', '>f8', (2, 2)),\n ('z', '|u1')],\n 'fortran_order': False,\n 'shape': (2,)} \n" <ide> ''' <ide> <del> <ide> import sys <ide> import os <ide> import shutil <ide> import tempfile <del> <del>if sys.version_info[0] >= 3: <del> from io import BytesIO as StringIO <del>else: <del> from cStringIO import StringIO <add>from io import BytesIO <ide> <ide> import numpy as np <ide> from numpy.testing import * <del> <ide> from numpy.lib import format <del> <ide> from numpy.compat import asbytes, asbytes_nested <ide> <ide> <ide> def teardown_module(): <ide> ] <ide> <ide> def roundtrip(arr): <del> f = StringIO() <add> f = BytesIO() <ide> format.write_array(f, arr) <del> f2 = StringIO(f.getvalue()) <add> f2 = BytesIO(f.getvalue()) <ide> arr2 = format.read_array(f2) <ide> return arr2 <ide> <ide> def test_memmap_roundtrip(): <ide> <ide> <ide> def test_write_version_1_0(): <del> f = StringIO() <add> f = BytesIO() <ide> arr = np.arange(1) <ide> # These should pass. <ide> format.write_array(f, arr, version=(1, 0)) <ide> def test_write_version_1_0(): <ide> <ide> def test_read_magic_bad_magic(): <ide> for magic in malformed_magic: <del> f = StringIO(magic) <add> f = BytesIO(magic) <ide> yield raises(ValueError)(format.read_magic), f <ide> <ide> def test_read_version_1_0_bad_magic(): <ide> for magic in bad_version_magic + malformed_magic: <del> f = StringIO(magic) <add> f = BytesIO(magic) <ide> yield raises(ValueError)(format.read_array), f <ide> <ide> def test_bad_magic_args(): <ide> def test_bad_magic_args(): <ide> assert_raises(ValueError, format.magic, 1, 256) <ide> <ide> def test_large_header(): <del> s = StringIO() <add> s = BytesIO() <ide> d = {'a':1,'b':2} <ide> format.write_array_header_1_0(s,d) <ide> <del> s = StringIO() <add> s = BytesIO() <ide> d = {'a':1,'b':2,'c':'x'*256*256} <ide> assert_raises(ValueError, format.write_array_header_1_0, s, d) <ide> <ide> def test_bad_header(): <ide> # header of length less than 2 should fail <del> s = StringIO() <add> s = BytesIO() <ide> assert_raises(ValueError, format.read_array_header_1_0, s) <del> s = StringIO(asbytes('1')) <add> s = BytesIO(asbytes('1')) <ide> assert_raises(ValueError, format.read_array_header_1_0, s) <ide> <ide> # header shorter than indicated size should fail <del> s = StringIO(asbytes('\x01\x00')) <add> s = BytesIO(asbytes('\x01\x00')) <ide> assert_raises(ValueError, format.read_array_header_1_0, s) <ide> <ide> # headers without the exact keys required should fail <ide> d = {"shape":(1,2), <ide> "descr":"x"} <del> s = StringIO() <add> s = BytesIO() <ide> format.write_array_header_1_0(s,d) <ide> assert_raises(ValueError, format.read_array_header_1_0, s) <ide> <ide> d = {"shape":(1,2), <ide> "fortran_order":False, <ide> "descr":"x", <ide> "extrakey":-1} <del> s = StringIO() <add> s = BytesIO() <ide> format.write_array_header_1_0(s,d) <ide> assert_raises(ValueError, format.read_array_header_1_0, s) <ide> <ide><path>numpy/lib/tests/test_io.py <ide> import gzip <ide> import os <ide> import threading <del>from tempfile import mkstemp, NamedTemporaryFile <ide> import time <del>from datetime import datetime <ide> import warnings <ide> import gc <add>from tempfile import mkstemp, NamedTemporaryFile <add>from io import BytesIO <add>from datetime import datetime <ide> from numpy.testing.utils import WarningManager <ide> <ide> import numpy as np <ide> import numpy.ma as ma <ide> from numpy.lib._iotools import ConverterError, ConverterLockError, \ <ide> ConversionWarning <del>from numpy.compat import asbytes, asbytes_nested, bytes <del> <add>from numpy.compat import asbytes, asbytes_nested, bytes, asstr <ide> from nose import SkipTest <ide> from numpy.ma.testutils import (TestCase, assert_equal, assert_array_equal, <ide> assert_raises, run_module_suite) <ide> from numpy.testing import assert_warns, assert_, build_err_msg <ide> <del>if sys.version_info[0] >= 3: <del> from io import BytesIO <del> def StringIO(s=""): <del> return BytesIO(asbytes(s)) <del>else: <del> from StringIO import StringIO <del> BytesIO = StringIO <add> <add>class TextIO(BytesIO): <add> """Helper IO class. <add> <add> Writes encode strings to bytes if needed, reads return bytes. <add> This makes it easier to emulate files opened in binary mode <add> without needing to explicitly convert strings to bytes in <add> setting up the test data. <add> <add> """ <add> def __init__(self, s=""): <add> BytesIO.__init__(self, asbytes(s)) <add> <add> def write(self, s): <add> BytesIO.write(self, asbytes(s)) <add> <add> def writelines(self, lines): <add> BytesIO.writelines(self, [asbytes(s) for s in lines]) <add> <ide> <ide> MAJVER, MINVER = sys.version_info[:2] <ide> <ide> def roundtrip(self, save_func, *args, **kwargs): <ide> target_file = NamedTemporaryFile() <ide> load_file = target_file.name <ide> else: <del> target_file = StringIO() <add> target_file = BytesIO() <ide> load_file = target_file <ide> <ide> arr = args <ide> def test_multiple_arrays(self): <ide> def test_named_arrays(self): <ide> a = np.array([[1, 2], [3, 4]], float) <ide> b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex) <del> c = StringIO() <add> c = BytesIO() <ide> np.savez(c, file_a=a, file_b=b) <ide> c.seek(0) <ide> l = np.load(c) <ide> class TestSaveTxt(TestCase): <ide> def test_array(self): <ide> a = np.array([[1, 2], [3, 4]], float) <ide> fmt = "%.18e" <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt=fmt) <ide> c.seek(0) <ide> assert_equal(c.readlines(), <del> asbytes_nested( <del> [(fmt + ' ' + fmt + '\n') % (1, 2), <del> (fmt + ' ' + fmt + '\n') % (3, 4)])) <add> [asbytes((fmt + ' ' + fmt + '\n') % (1, 2)), <add> asbytes((fmt + ' ' + fmt + '\n') % (3, 4))]) <ide> <ide> a = np.array([[1, 2], [3, 4]], int) <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt='%d') <ide> c.seek(0) <del> assert_equal(c.readlines(), asbytes_nested(['1 2\n', '3 4\n'])) <add> assert_equal(c.readlines(), [b'1 2\n', b'3 4\n']) <ide> <ide> def test_1D(self): <ide> a = np.array([1, 2, 3, 4], int) <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt='%d') <ide> c.seek(0) <ide> lines = c.readlines() <del> assert_equal(lines, asbytes_nested(['1\n', '2\n', '3\n', '4\n'])) <add> assert_equal(lines, [b'1\n', b'2\n', b'3\n', b'4\n']) <ide> <ide> def test_record(self): <ide> a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')]) <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt='%d') <ide> c.seek(0) <del> assert_equal(c.readlines(), asbytes_nested(['1 2\n', '3 4\n'])) <add> assert_equal(c.readlines(), [b'1 2\n', b'3 4\n']) <ide> <ide> def test_delimiter(self): <ide> a = np.array([[1., 2.], [3., 4.]]) <del> c = StringIO() <del> np.savetxt(c, a, delimiter=asbytes(','), fmt='%d') <add> c = BytesIO() <add> np.savetxt(c, a, delimiter=',', fmt='%d') <ide> c.seek(0) <del> assert_equal(c.readlines(), asbytes_nested(['1,2\n', '3,4\n'])) <add> assert_equal(c.readlines(), [b'1,2\n', b'3,4\n']) <ide> <ide> def test_format(self): <ide> a = np.array([(1, 2), (3, 4)]) <del> c = StringIO() <add> c = BytesIO() <ide> # Sequence of formats <ide> np.savetxt(c, a, fmt=['%02d', '%3.1f']) <ide> c.seek(0) <del> assert_equal(c.readlines(), asbytes_nested(['01 2.0\n', '03 4.0\n'])) <add> assert_equal(c.readlines(), [b'01 2.0\n', b'03 4.0\n']) <ide> <ide> # A single multiformat string <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt='%02d : %3.1f') <ide> c.seek(0) <ide> lines = c.readlines() <del> assert_equal(lines, asbytes_nested(['01 : 2.0\n', '03 : 4.0\n'])) <add> assert_equal(lines, [b'01 : 2.0\n', b'03 : 4.0\n']) <ide> <ide> # Specify delimiter, should be overiden <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt='%02d : %3.1f', delimiter=',') <ide> c.seek(0) <ide> lines = c.readlines() <del> assert_equal(lines, asbytes_nested(['01 : 2.0\n', '03 : 4.0\n'])) <add> assert_equal(lines, [b'01 : 2.0\n', b'03 : 4.0\n']) <ide> <ide> def test_header_footer(self): <ide> """ <ide> Test the functionality of the header and footer keyword argument. <ide> """ <del> c = StringIO() <add> c = BytesIO() <ide> a = np.array([(1, 2), (3, 4)], dtype=np.int) <ide> test_header_footer = 'Test header / footer' <ide> # Test the header keyword argument <ide> np.savetxt(c, a, fmt='%1d', header=test_header_footer) <ide> c.seek(0) <ide> assert_equal(c.read(), <del> asbytes('# ' + test_header_footer +'\n1 2\n3 4\n' )) <add> asbytes('# ' + test_header_footer + '\n1 2\n3 4\n')) <ide> # Test the footer keyword argument <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt='%1d', footer=test_header_footer) <ide> c.seek(0) <ide> assert_equal(c.read(), <ide> asbytes('1 2\n3 4\n# ' + test_header_footer + '\n')) <ide> # Test the commentstr keyword argument used on the header <del> c = StringIO() <add> c = BytesIO() <ide> commentstr = '% ' <del> np.savetxt(c, a, fmt='%1d', header=test_header_footer, <del> comments=commentstr) <add> np.savetxt(c, a, fmt='%1d', <add> header=test_header_footer, comments=commentstr) <ide> c.seek(0) <ide> assert_equal(c.read(), <ide> asbytes(commentstr + test_header_footer + '\n' + '1 2\n3 4\n')) <ide> # Test the commentstr keyword argument used on the footer <del> c = StringIO() <add> c = BytesIO() <ide> commentstr = '% ' <del> np.savetxt(c, a, fmt='%1d', footer=test_header_footer, <del> comments=commentstr) <add> np.savetxt(c, a, fmt='%1d', <add> footer=test_header_footer, comments=commentstr) <ide> c.seek(0) <ide> assert_equal(c.read(), <ide> asbytes('1 2\n3 4\n' + commentstr + test_header_footer + '\n')) <ide> def test_complex_arrays(self): <ide> im = np.e <ide> a[:] = re + 1.0j * im <ide> # One format only <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt=' %+.3e') <ide> c.seek(0) <ide> lines = c.readlines() <del> _assert_floatstr_lines_equal(lines, asbytes_nested([ <del> ' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n', <del> ' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n'])) <add> _assert_floatstr_lines_equal(lines, <add> [b' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n', <add> b' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n']) <ide> # One format for each real and imaginary part <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt=' %+.3e' * 2 * ncols) <ide> c.seek(0) <ide> lines = c.readlines() <del> _assert_floatstr_lines_equal(lines, asbytes_nested([ <del> ' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n', <del> ' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n'])) <add> _assert_floatstr_lines_equal(lines, <add> [b' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n', <add> b' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n']) <ide> # One format for each complex number <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a, fmt=['(%.3e%+.3ej)'] * ncols) <ide> c.seek(0) <ide> lines = c.readlines() <del> _assert_floatstr_lines_equal(lines, asbytes_nested([ <del> '(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n', <del> '(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n'])) <add> _assert_floatstr_lines_equal(lines, <add> [b'(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n', <add> b'(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n']) <ide> <ide> <ide> def _assert_floatstr_lines_equal(actual_lines, expected_lines): <ide> def _assert_floatstr_lines_equal(actual_lines, expected_lines): <ide> <ide> class TestLoadTxt(TestCase): <ide> def test_record(self): <del> c = StringIO() <del> c.write(asbytes('1 2\n3 4')) <add> c = TextIO() <add> c.write('1 2\n3 4') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=[('x', np.int32), ('y', np.int32)]) <ide> a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')]) <ide> assert_array_equal(x, a) <ide> <del> d = StringIO() <del> d.write(asbytes('M 64.0 75.0\nF 25.0 60.0')) <add> d = TextIO() <add> d.write('M 64.0 75.0\nF 25.0 60.0') <ide> d.seek(0) <ide> mydescriptor = {'names': ('gender', 'age', 'weight'), <ide> 'formats': ('S1', <ide> def test_record(self): <ide> assert_array_equal(y, b) <ide> <ide> def test_array(self): <del> c = StringIO() <del> c.write(asbytes('1 2\n3 4')) <add> c = TextIO() <add> c.write('1 2\n3 4') <ide> <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int) <ide> def test_array(self): <ide> assert_array_equal(x, a) <ide> <ide> def test_1D(self): <del> c = StringIO() <del> c.write(asbytes('1\n2\n3\n4\n')) <add> c = TextIO() <add> c.write('1\n2\n3\n4\n') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int) <ide> a = np.array([1, 2, 3, 4], int) <ide> assert_array_equal(x, a) <ide> <del> c = StringIO() <del> c.write(asbytes('1,2,3,4\n')) <add> c = TextIO() <add> c.write('1,2,3,4\n') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int, delimiter=',') <ide> a = np.array([1, 2, 3, 4], int) <ide> assert_array_equal(x, a) <ide> <ide> def test_missing(self): <del> c = StringIO() <del> c.write(asbytes('1,2,3,,5\n')) <add> c = TextIO() <add> c.write('1,2,3,,5\n') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int, delimiter=',', \ <ide> converters={3:lambda s: int(s or - 999)}) <ide> a = np.array([1, 2, 3, -999, 5], int) <ide> assert_array_equal(x, a) <ide> <ide> def test_converters_with_usecols(self): <del> c = StringIO() <del> c.write(asbytes('1,2,3,,5\n6,7,8,9,10\n')) <add> c = TextIO() <add> c.write('1,2,3,,5\n6,7,8,9,10\n') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int, delimiter=',', \ <ide> converters={3:lambda s: int(s or - 999)}, \ <ide> def test_converters_with_usecols(self): <ide> assert_array_equal(x, a) <ide> <ide> def test_comments(self): <del> c = StringIO() <del> c.write(asbytes('# comment\n1,2,3,5\n')) <add> c = TextIO() <add> c.write('# comment\n1,2,3,5\n') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int, delimiter=',', \ <ide> comments='#') <ide> a = np.array([1, 2, 3, 5], int) <ide> assert_array_equal(x, a) <ide> <ide> def test_skiprows(self): <del> c = StringIO() <del> c.write(asbytes('comment\n1,2,3,5\n')) <add> c = TextIO() <add> c.write('comment\n1,2,3,5\n') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int, delimiter=',', \ <ide> skiprows=1) <ide> a = np.array([1, 2, 3, 5], int) <ide> assert_array_equal(x, a) <ide> <del> c = StringIO() <del> c.write(asbytes('# comment\n1,2,3,5\n')) <add> c = TextIO() <add> c.write('# comment\n1,2,3,5\n') <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=int, delimiter=',', \ <ide> skiprows=1) <ide> def test_skiprows(self): <ide> <ide> def test_usecols(self): <ide> a = np.array([[1, 2], [3, 4]], float) <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a) <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=float, usecols=(1,)) <ide> assert_array_equal(x, a[:, 1]) <ide> <ide> a = np.array([[1, 2, 3], [3, 4, 5]], float) <del> c = StringIO() <add> c = BytesIO() <ide> np.savetxt(c, a) <ide> c.seek(0) <ide> x = np.loadtxt(c, dtype=float, usecols=(1, 2)) <ide> def test_usecols(self): <ide> data = '''JOE 70.1 25.3 <ide> BOB 60.5 27.9 <ide> ''' <del> c = StringIO(data) <add> c = TextIO(data) <ide> names = ['stid', 'temp'] <ide> dtypes = ['S4', 'f8'] <ide> arr = np.loadtxt(c, usecols=(0, 2), dtype=zip(names, dtypes)) <del> assert_equal(arr['stid'], asbytes_nested(["JOE", "BOB"])) <add> assert_equal(arr['stid'], [b"JOE", b"BOB"]) <ide> assert_equal(arr['temp'], [25.3, 27.9]) <ide> <ide> def test_fancy_dtype(self): <del> c = StringIO() <del> c.write(asbytes('1,2,3.0\n4,5,6.0\n')) <add> c = TextIO() <add> c.write('1,2,3.0\n4,5,6.0\n') <ide> c.seek(0) <ide> dt = np.dtype([('x', int), ('y', [('t', int), ('s', float)])]) <ide> x = np.loadtxt(c, dtype=dt, delimiter=',') <ide> a = np.array([(1, (2, 3.0)), (4, (5, 6.0))], dt) <ide> assert_array_equal(x, a) <ide> <ide> def test_shaped_dtype(self): <del> c = StringIO("aaaa 1.0 8.0 1 2 3 4 5 6") <add> c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6") <ide> dt = np.dtype([('name', 'S4'), ('x', float), ('y', float), <ide> ('block', int, (2, 3))]) <ide> x = np.loadtxt(c, dtype=dt) <ide> def test_shaped_dtype(self): <ide> assert_array_equal(x, a) <ide> <ide> def test_3d_shaped_dtype(self): <del> c = StringIO("aaaa 1.0 8.0 1 2 3 4 5 6 7 8 9 10 11 12") <add> c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6 7 8 9 10 11 12") <ide> dt = np.dtype([('name', 'S4'), ('x', float), ('y', float), <ide> ('block', int, (2, 2, 3))]) <ide> x = np.loadtxt(c, dtype=dt) <ide> def test_empty_file(self): <ide> try: <ide> warnings.filterwarnings("ignore", <ide> message="loadtxt: Empty input file:") <del> c = StringIO() <add> c = TextIO() <ide> x = np.loadtxt(c) <ide> assert_equal(x.shape, (0,)) <ide> x = np.loadtxt(c, dtype=np.int64) <ide> def test_empty_file(self): <ide> <ide> <ide> def test_unused_converter(self): <del> c = StringIO() <del> c.writelines([asbytes('1 21\n'), asbytes('3 42\n')]) <add> c = TextIO() <add> c.writelines(['1 21\n', '3 42\n']) <ide> c.seek(0) <ide> data = np.loadtxt(c, usecols=(1,), <ide> converters={0: lambda s: int(s, 16)}) <ide> def test_dtype_with_object(self): <ide> "Test using an explicit dtype with an object" <ide> from datetime import date <ide> import time <del> data = asbytes(""" 1; 2001-01-01 <del> 2; 2002-01-31 """) <add> data = """ 1; 2001-01-01 <add> 2; 2002-01-31 """ <ide> ndtype = [('idx', int), ('code', np.object)] <ide> func = lambda s: strptime(s.strip(), "%Y-%m-%d") <ide> converters = {1: func} <del> test = np.loadtxt(StringIO(data), delimiter=";", dtype=ndtype, <add> test = np.loadtxt(TextIO(data), delimiter=";", dtype=ndtype, <ide> converters=converters) <ide> control = np.array([(1, datetime(2001, 1, 1)), (2, datetime(2002, 1, 31))], <ide> dtype=ndtype) <ide> assert_equal(test, control) <ide> <ide> def test_uint64_type(self): <ide> tgt = (9223372043271415339, 9223372043271415853) <del> c = StringIO() <del> c.write(asbytes("%s %s" % tgt)) <add> c = TextIO() <add> c.write("%s %s" % tgt) <ide> c.seek(0) <ide> res = np.loadtxt(c, dtype=np.uint64) <ide> assert_equal(res, tgt) <ide> <ide> def test_int64_type(self): <ide> tgt = (-9223372036854775807, 9223372036854775807) <del> c = StringIO() <del> c.write(asbytes("%s %s" % tgt)) <add> c = TextIO() <add> c.write("%s %s" % tgt) <ide> c.seek(0) <ide> res = np.loadtxt(c, dtype=np.int64) <ide> assert_equal(res, tgt) <ide> <ide> def test_universal_newline(self): <ide> f, name = mkstemp() <del> os.write(f, asbytes('1 21\r3 42\r')) <add> os.write(f, b'1 21\r3 42\r') <ide> os.close(f) <ide> <ide> try: <ide> def test_universal_newline(self): <ide> os.unlink(name) <ide> <ide> def test_empty_field_after_tab(self): <del> c = StringIO() <del> c.write(asbytes('1 \t2 \t3\tstart \n4\t5\t6\t \n7\t8\t9.5\t')) <add> c = TextIO() <add> c.write('1 \t2 \t3\tstart \n4\t5\t6\t \n7\t8\t9.5\t') <ide> c.seek(0) <ide> dt = { 'names': ('x', 'y', 'z', 'comment'), <ide> 'formats': ('<i4', '<i4', '<f4', '|S8')} <ide> x = np.loadtxt(c, dtype=dt, delimiter='\t') <del> a = np.array([asbytes('start '), asbytes(' '), asbytes('')]) <add> a = np.array([b'start ', b' ', b'']) <ide> assert_array_equal(x['comment'], a) <ide> <ide> def test_structure_unpack(self): <del> txt = StringIO(asbytes("M 21 72\nF 35 58")) <add> txt = TextIO("M 21 72\nF 35 58") <ide> dt = { 'names': ('a', 'b', 'c'), 'formats': ('|S1', '<i4', '<f4')} <ide> a, b, c = np.loadtxt(txt, dtype=dt, unpack=True) <ide> assert_(a.dtype.str == '|S1') <ide> assert_(b.dtype.str == '<i4') <ide> assert_(c.dtype.str == '<f4') <del> assert_array_equal(a, np.array([asbytes('M'), asbytes('F')])) <add> assert_array_equal(a, np.array([b'M', b'F'])) <ide> assert_array_equal(b, np.array([21, 35])) <ide> assert_array_equal(c, np.array([ 72., 58.])) <ide> <ide> def test_ndmin_keyword(self): <del> c = StringIO() <del> c.write(asbytes('1,2,3\n4,5,6')) <add> c = TextIO() <add> c.write('1,2,3\n4,5,6') <ide> c.seek(0) <ide> assert_raises(ValueError, np.loadtxt, c, ndmin=3) <ide> c.seek(0) <ide> def test_ndmin_keyword(self): <ide> x = np.loadtxt(c, dtype=int, delimiter=',', ndmin=1) <ide> a = np.array([[1, 2, 3], [4, 5, 6]]) <ide> assert_array_equal(x, a) <del> d = StringIO() <del> d.write(asbytes('0,1,2')) <add> <add> d = TextIO() <add> d.write('0,1,2') <ide> d.seek(0) <ide> x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=2) <ide> assert_(x.shape == (1, 3)) <ide> def test_ndmin_keyword(self): <ide> d.seek(0) <ide> x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=0) <ide> assert_(x.shape == (3,)) <del> e = StringIO() <del> e.write(asbytes('0\n1\n2')) <add> <add> e = TextIO() <add> e.write('0\n1\n2') <ide> e.seek(0) <ide> x = np.loadtxt(e, dtype=int, delimiter=',', ndmin=2) <ide> assert_(x.shape == (3, 1)) <ide> def test_ndmin_keyword(self): <ide> try: <ide> warnings.filterwarnings("ignore", <ide> message="loadtxt: Empty input file:") <del> f = StringIO() <add> f = TextIO() <ide> assert_(np.loadtxt(f, ndmin=2).shape == (0, 1,)) <ide> assert_(np.loadtxt(f, ndmin=1).shape == (0,)) <ide> finally: <ide> def test_ndmin_keyword(self): <ide> def test_generator_source(self): <ide> def count(): <ide> for i in range(10): <del> yield asbytes("%d" % i) <add> yield "%d" % i <ide> <ide> res = np.loadtxt(count()) <ide> assert_array_equal(res, np.arange(10)) <ide> <ide> class Testfromregex(TestCase): <add> # np.fromregex expects files opened in binary mode. <ide> def test_record(self): <del> c = StringIO() <del> c.write(asbytes('1.312 foo\n1.534 bar\n4.444 qux')) <add> c = TextIO() <add> c.write('1.312 foo\n1.534 bar\n4.444 qux') <ide> c.seek(0) <ide> <ide> dt = [('num', np.float64), ('val', 'S3')] <ide> def test_record(self): <ide> assert_array_equal(x, a) <ide> <ide> def test_record_2(self): <del> c = StringIO() <del> c.write(asbytes('1312 foo\n1534 bar\n4444 qux')) <add> c = TextIO() <add> c.write('1312 foo\n1534 bar\n4444 qux') <ide> c.seek(0) <ide> <ide> dt = [('num', np.int32), ('val', 'S3')] <ide> def test_record_2(self): <ide> assert_array_equal(x, a) <ide> <ide> def test_record_3(self): <del> c = StringIO() <del> c.write(asbytes('1312 foo\n1534 bar\n4444 qux')) <add> c = TextIO() <add> c.write('1312 foo\n1534 bar\n4444 qux') <ide> c.seek(0) <ide> <ide> dt = [('num', np.float64)] <ide> class TestFromTxt(TestCase): <ide> # <ide> def test_record(self): <ide> "Test w/ explicit dtype" <del> data = StringIO(asbytes('1 2\n3 4')) <add> data = TextIO('1 2\n3 4') <ide> # data.seek(0) <ide> test = np.ndfromtxt(data, dtype=[('x', np.int32), ('y', np.int32)]) <ide> control = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')]) <ide> assert_equal(test, control) <ide> # <del> data = StringIO('M 64.0 75.0\nF 25.0 60.0') <add> data = TextIO('M 64.0 75.0\nF 25.0 60.0') <ide> # data.seek(0) <ide> descriptor = {'names': ('gender', 'age', 'weight'), <ide> 'formats': ('S1', 'i4', 'f4')} <ide> def test_record(self): <ide> <ide> def test_array(self): <ide> "Test outputing a standard ndarray" <del> data = StringIO('1 2\n3 4') <add> data = TextIO('1 2\n3 4') <ide> control = np.array([[1, 2], [3, 4]], dtype=int) <ide> test = np.ndfromtxt(data, dtype=int) <ide> assert_array_equal(test, control) <ide> def test_1D(self): <ide> "Test squeezing to 1D" <ide> control = np.array([1, 2, 3, 4], int) <ide> # <del> data = StringIO('1\n2\n3\n4\n') <add> data = TextIO('1\n2\n3\n4\n') <ide> test = np.ndfromtxt(data, dtype=int) <ide> assert_array_equal(test, control) <ide> # <del> data = StringIO('1,2,3,4\n') <del> test = np.ndfromtxt(data, dtype=int, delimiter=asbytes(',')) <add> data = TextIO('1,2,3,4\n') <add> test = np.ndfromtxt(data, dtype=int, delimiter=',') <ide> assert_array_equal(test, control) <ide> <ide> def test_comments(self): <ide> "Test the stripping of comments" <ide> control = np.array([1, 2, 3, 5], int) <ide> # Comment on its own line <del> data = StringIO('# comment\n1,2,3,5\n') <del> test = np.ndfromtxt(data, dtype=int, delimiter=asbytes(','), comments=asbytes('#')) <add> data = TextIO('# comment\n1,2,3,5\n') <add> test = np.ndfromtxt(data, dtype=int, delimiter=',', comments='#') <ide> assert_equal(test, control) <ide> # Comment at the end of a line <del> data = StringIO('1,2,3,5# comment\n') <del> test = np.ndfromtxt(data, dtype=int, delimiter=asbytes(','), comments=asbytes('#')) <add> data = TextIO('1,2,3,5# comment\n') <add> test = np.ndfromtxt(data, dtype=int, delimiter=',', comments='#') <ide> assert_equal(test, control) <ide> <ide> def test_skiprows(self): <ide> "Test row skipping" <ide> control = np.array([1, 2, 3, 5], int) <del> kwargs = dict(dtype=int, delimiter=asbytes(',')) <add> kwargs = dict(dtype=int, delimiter=',') <ide> # <del> data = StringIO('comment\n1,2,3,5\n') <add> data = TextIO('comment\n1,2,3,5\n') <ide> test = np.ndfromtxt(data, skip_header=1, **kwargs) <ide> assert_equal(test, control) <ide> # <del> data = StringIO('# comment\n1,2,3,5\n') <add> data = TextIO('# comment\n1,2,3,5\n') <ide> test = np.loadtxt(data, skiprows=1, **kwargs) <ide> assert_equal(test, control) <ide> <ide> def test_skip_footer(self): <ide> data.extend(["%i,%3.1f,%03s" % (i, i, i) for i in range(51)]) <ide> data[-1] = "99,99" <ide> kwargs = dict(delimiter=",", names=True, skip_header=5, skip_footer=10) <del> test = np.genfromtxt(StringIO(asbytes("\n".join(data))), **kwargs) <add> test = np.genfromtxt(TextIO("\n".join(data)), **kwargs) <ide> ctrl = np.array([("%f" % i, "%f" % i, "%f" % i) for i in range(41)], <ide> dtype=[(_, float) for _ in "ABC"]) <ide> assert_equal(test, ctrl) <ide> def test_skip_footer_with_invalid(self): <ide> warnings.filterwarnings("ignore") <ide> # Footer too small to get rid of all invalid values <ide> assert_raises(ValueError, np.genfromtxt, <del> StringIO(basestr), skip_footer=1) <add> TextIO(basestr), skip_footer=1) <ide> # except ValueError: <ide> # pass <del> a = np.genfromtxt(StringIO(basestr), skip_footer=1, invalid_raise=False) <add> a = np.genfromtxt(TextIO(basestr), skip_footer=1, invalid_raise=False) <ide> assert_equal(a, np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]])) <ide> # <del> a = np.genfromtxt(StringIO(basestr), skip_footer=3) <add> a = np.genfromtxt(TextIO(basestr), skip_footer=3) <ide> assert_equal(a, np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]])) <ide> # <ide> basestr = '1 1\n2 \n3 3\n4 4\n5 \n6 6\n7 7\n' <del> a = np.genfromtxt(StringIO(basestr), skip_footer=1, invalid_raise=False) <add> a = np.genfromtxt(TextIO(basestr), skip_footer=1, invalid_raise=False) <ide> assert_equal(a, np.array([[1., 1.], [3., 3.], [4., 4.], [6., 6.]])) <del> a = np.genfromtxt(StringIO(basestr), skip_footer=3, invalid_raise=False) <add> a = np.genfromtxt(TextIO(basestr), skip_footer=3, invalid_raise=False) <ide> assert_equal(a, np.array([[1., 1.], [3., 3.], [4., 4.]])) <ide> finally: <ide> warn_ctx.__exit__() <ide> <ide> <ide> def test_header(self): <ide> "Test retrieving a header" <del> data = StringIO('gender age weight\nM 64.0 75.0\nF 25.0 60.0') <add> data = TextIO('gender age weight\nM 64.0 75.0\nF 25.0 60.0') <ide> test = np.ndfromtxt(data, dtype=None, names=True) <del> control = {'gender': np.array(asbytes_nested(['M', 'F'])), <add> control = {'gender': np.array([b'M', b'F']), <ide> 'age': np.array([64.0, 25.0]), <ide> 'weight': np.array([75.0, 60.0])} <ide> assert_equal(test['gender'], control['gender']) <ide> def test_header(self): <ide> <ide> def test_auto_dtype(self): <ide> "Test the automatic definition of the output dtype" <del> data = StringIO('A 64 75.0 3+4j True\nBCD 25 60.0 5+6j False') <add> data = TextIO('A 64 75.0 3+4j True\nBCD 25 60.0 5+6j False') <ide> test = np.ndfromtxt(data, dtype=None) <del> control = [np.array(asbytes_nested(['A', 'BCD'])), <add> control = [np.array([b'A', b'BCD']), <ide> np.array([64, 25]), <ide> np.array([75.0, 60.0]), <ide> np.array([3 + 4j, 5 + 6j]), <ide> def test_auto_dtype(self): <ide> <ide> def test_auto_dtype_uniform(self): <ide> "Tests whether the output dtype can be uniformized" <del> data = StringIO('1 2 3 4\n5 6 7 8\n') <add> data = TextIO('1 2 3 4\n5 6 7 8\n') <ide> test = np.ndfromtxt(data, dtype=None) <ide> control = np.array([[1, 2, 3, 4], [5, 6, 7, 8]]) <ide> assert_equal(test, control) <ide> <ide> <ide> def test_fancy_dtype(self): <ide> "Check that a nested dtype isn't MIA" <del> data = StringIO('1,2,3.0\n4,5,6.0\n') <add> data = TextIO('1,2,3.0\n4,5,6.0\n') <ide> fancydtype = np.dtype([('x', int), ('y', [('t', int), ('s', float)])]) <ide> test = np.ndfromtxt(data, dtype=fancydtype, delimiter=',') <ide> control = np.array([(1, (2, 3.0)), (4, (5, 6.0))], dtype=fancydtype) <ide> def test_names_overwrite(self): <ide> "Test overwriting the names of the dtype" <ide> descriptor = {'names': ('g', 'a', 'w'), <ide> 'formats': ('S1', 'i4', 'f4')} <del> data = StringIO('M 64.0 75.0\nF 25.0 60.0') <add> data = TextIO(b'M 64.0 75.0\nF 25.0 60.0') <ide> names = ('gender', 'age', 'weight') <ide> test = np.ndfromtxt(data, dtype=descriptor, names=names) <ide> descriptor['names'] = names <ide> def test_names_overwrite(self): <ide> <ide> def test_commented_header(self): <ide> "Check that names can be retrieved even if the line is commented out." <del> data = StringIO(""" <add> data = TextIO(""" <ide> #gender age weight <ide> M 21 72.100000 <ide> F 35 58.330000 <ide> def test_commented_header(self): <ide> dtype=[('gender', '|S1'), ('age', int), ('weight', float)]) <ide> assert_equal(test, ctrl) <ide> # Ditto, but we should get rid of the first element <del> data = StringIO(""" <add> data = TextIO(b""" <ide> # gender age weight <ide> M 21 72.100000 <ide> F 35 58.330000 <ide> def test_commented_header(self): <ide> <ide> def test_autonames_and_usecols(self): <ide> "Tests names and usecols" <del> data = StringIO('A B C D\n aaaa 121 45 9.1') <add> data = TextIO('A B C D\n aaaa 121 45 9.1') <ide> test = np.ndfromtxt(data, usecols=('A', 'C', 'D'), <ide> names=True, dtype=None) <ide> control = np.array(('aaaa', 45, 9.1), <ide> def test_autonames_and_usecols(self): <ide> <ide> def test_converters_with_usecols(self): <ide> "Test the combination user-defined converters and usecol" <del> data = StringIO('1,2,3,,5\n6,7,8,9,10\n') <add> data = TextIO('1,2,3,,5\n6,7,8,9,10\n') <ide> test = np.ndfromtxt(data, dtype=int, delimiter=',', <ide> converters={3:lambda s: int(s or - 999)}, <ide> usecols=(1, 3,)) <ide> def test_converters_with_usecols(self): <ide> <ide> def test_converters_with_usecols_and_names(self): <ide> "Tests names and usecols" <del> data = StringIO('A B C D\n aaaa 121 45 9.1') <add> data = TextIO('A B C D\n aaaa 121 45 9.1') <ide> test = np.ndfromtxt(data, usecols=('A', 'C', 'D'), names=True, <ide> dtype=None, converters={'C':lambda s: 2 * int(s)}) <ide> control = np.array(('aaaa', 90, 9.1), <ide> def test_converters_with_usecols_and_names(self): <ide> def test_converters_cornercases(self): <ide> "Test the conversion to datetime." <ide> converter = {'date': lambda s: strptime(s, '%Y-%m-%d %H:%M:%SZ')} <del> data = StringIO('2009-02-03 12:00:00Z, 72214.0') <add> data = TextIO('2009-02-03 12:00:00Z, 72214.0') <ide> test = np.ndfromtxt(data, delimiter=',', dtype=None, <ide> names=['date', 'stid'], converters=converter) <del> control = np.array((datetime(2009, 02, 03), 72214.), <add> control = np.array((datetime(2009, 2, 3), 72214.), <ide> dtype=[('date', np.object_), ('stid', float)]) <ide> assert_equal(test, control) <ide> <ide> def test_converters_cornercases2(self): <ide> "Test the conversion to datetime64." <ide> converter = {'date': lambda s: np.datetime64(strptime(s, '%Y-%m-%d %H:%M:%SZ'))} <del> data = StringIO('2009-02-03 12:00:00Z, 72214.0') <add> data = TextIO('2009-02-03 12:00:00Z, 72214.0') <ide> test = np.ndfromtxt(data, delimiter=',', dtype=None, <ide> names=['date', 'stid'], converters=converter) <del> control = np.array((datetime(2009, 02, 03), 72214.), <add> control = np.array((datetime(2009, 2, 3), 72214.), <ide> dtype=[('date', 'datetime64[us]'), ('stid', float)]) <ide> assert_equal(test, control) <ide> <ide> def test_unused_converter(self): <ide> "Test whether unused converters are forgotten" <del> data = StringIO("1 21\n 3 42\n") <add> data = TextIO("1 21\n 3 42\n") <ide> test = np.ndfromtxt(data, usecols=(1,), <ide> converters={0: lambda s: int(s, 16)}) <ide> assert_equal(test, [21, 42]) <ide> def test_unused_converter(self): <ide> <ide> <ide> def test_invalid_converter(self): <del> strip_rand = lambda x : float((asbytes('r') in x.lower() and x.split()[-1]) or <del> (not asbytes('r') in x.lower() and x.strip() or 0.0)) <del> strip_per = lambda x : float((asbytes('%') in x.lower() and x.split()[0]) or <del> (not asbytes('%') in x.lower() and x.strip() or 0.0)) <del> s = StringIO("D01N01,10/1/2003 ,1 %,R 75,400,600\r\n" \ <add> strip_rand = lambda x : float((b'r' in x.lower() and x.split()[-1]) or <add> (b'r' not in x.lower() and x.strip() or 0.0)) <add> strip_per = lambda x : float((b'%' in x.lower() and x.split()[0]) or <add> (b'%' not in x.lower() and x.strip() or 0.0)) <add> s = TextIO("D01N01,10/1/2003 ,1 %,R 75,400,600\r\n" <ide> "L24U05,12/5/2003, 2 %,1,300, 150.5\r\n" <ide> "D02N03,10/10/2004,R 1,,7,145.55") <ide> kwargs = dict(converters={2 : strip_per, 3 : strip_rand}, delimiter=",", <ide> def test_invalid_converter(self): <ide> <ide> def test_tricky_converter_bug1666(self): <ide> "Test some corner case" <del> s = StringIO('q1,2\nq3,4') <add> s = TextIO('q1,2\nq3,4') <ide> cnv = lambda s:float(s[1:]) <ide> test = np.genfromtxt(s, delimiter=',', converters={0:cnv}) <ide> control = np.array([[1., 2.], [3., 4.]]) <ide> def test_tricky_converter_bug1666(self): <ide> <ide> def test_dtype_with_converters(self): <ide> dstr = "2009; 23; 46" <del> test = np.ndfromtxt(StringIO(dstr,), <add> test = np.ndfromtxt(TextIO(dstr,), <ide> delimiter=";", dtype=float, converters={0:bytes}) <ide> control = np.array([('2009', 23., 46)], <ide> dtype=[('f0', '|S4'), ('f1', float), ('f2', float)]) <ide> assert_equal(test, control) <del> test = np.ndfromtxt(StringIO(dstr,), <add> test = np.ndfromtxt(TextIO(dstr,), <ide> delimiter=";", dtype=float, converters={0:float}) <ide> control = np.array([2009., 23., 46],) <ide> assert_equal(test, control) <ide> def test_dtype_with_object(self): <ide> "Test using an explicit dtype with an object" <ide> from datetime import date <ide> import time <del> data = asbytes(""" 1; 2001-01-01 <del> 2; 2002-01-31 """) <add> data = """ 1; 2001-01-01 <add> 2; 2002-01-31 """ <ide> ndtype = [('idx', int), ('code', np.object)] <ide> func = lambda s: strptime(s.strip(), "%Y-%m-%d") <ide> converters = {1: func} <del> test = np.genfromtxt(StringIO(data), delimiter=";", dtype=ndtype, <add> test = np.genfromtxt(TextIO(data), delimiter=";", dtype=ndtype, <ide> converters=converters) <ide> control = np.array([(1, datetime(2001, 1, 1)), (2, datetime(2002, 1, 31))], <ide> dtype=ndtype) <ide> assert_equal(test, control) <ide> # <ide> ndtype = [('nest', [('idx', int), ('code', np.object)])] <ide> try: <del> test = np.genfromtxt(StringIO(data), delimiter=";", <add> test = np.genfromtxt(TextIO(data), delimiter=";", <ide> dtype=ndtype, converters=converters) <ide> except NotImplementedError: <ide> pass <ide> def test_dtype_with_object(self): <ide> <ide> def test_userconverters_with_explicit_dtype(self): <ide> "Test user_converters w/ explicit (standard) dtype" <del> data = StringIO('skip,skip,2001-01-01,1.0,skip') <add> data = TextIO('skip,skip,2001-01-01,1.0,skip') <ide> test = np.genfromtxt(data, delimiter=",", names=None, dtype=float, <ide> usecols=(2, 3), converters={2: bytes}) <ide> control = np.array([('2001-01-01', 1.)], <ide> def test_userconverters_with_explicit_dtype(self): <ide> <ide> def test_spacedelimiter(self): <ide> "Test space delimiter" <del> data = StringIO("1 2 3 4 5\n6 7 8 9 10") <add> data = TextIO("1 2 3 4 5\n6 7 8 9 10") <ide> test = np.ndfromtxt(data) <ide> control = np.array([[ 1., 2., 3., 4., 5.], <ide> [ 6., 7., 8., 9., 10.]]) <ide> def test_spacedelimiter(self): <ide> def test_integer_delimiter(self): <ide> "Test using an integer for delimiter" <ide> data = " 1 2 3\n 4 5 67\n890123 4" <del> test = np.genfromtxt(StringIO(data), delimiter=3) <add> test = np.genfromtxt(TextIO(data), delimiter=3) <ide> control = np.array([[1, 2, 3], [4, 5, 67], [890, 123, 4]]) <ide> assert_equal(test, control) <ide> <ide> <ide> def test_missing(self): <del> data = StringIO('1,2,3,,5\n') <add> data = TextIO('1,2,3,,5\n') <ide> test = np.ndfromtxt(data, dtype=int, delimiter=',', \ <ide> converters={3:lambda s: int(s or - 999)}) <ide> control = np.array([1, 2, 3, -999, 5], int) <ide> def test_missing(self): <ide> def test_missing_with_tabs(self): <ide> "Test w/ a delimiter tab" <ide> txt = "1\t2\t3\n\t2\t\n1\t\t3" <del> test = np.genfromtxt(StringIO(txt), delimiter="\t", <add> test = np.genfromtxt(TextIO(txt), delimiter="\t", <ide> usemask=True,) <ide> ctrl_d = np.array([(1, 2, 3), (np.nan, 2, np.nan), (1, np.nan, 3)],) <ide> ctrl_m = np.array([(0, 0, 0), (1, 0, 1), (0, 1, 0)], dtype=bool) <ide> def test_usecols(self): <ide> "Test the selection of columns" <ide> # Select 1 column <ide> control = np.array([[1, 2], [3, 4]], float) <del> data = StringIO() <add> data = TextIO() <ide> np.savetxt(data, control) <ide> data.seek(0) <ide> test = np.ndfromtxt(data, dtype=float, usecols=(1,)) <ide> assert_equal(test, control[:, 1]) <ide> # <ide> control = np.array([[1, 2, 3], [3, 4, 5]], float) <del> data = StringIO() <add> data = TextIO() <ide> np.savetxt(data, control) <ide> data.seek(0) <ide> test = np.ndfromtxt(data, dtype=float, usecols=(1, 2)) <ide> def test_usecols(self): <ide> def test_usecols_as_css(self): <ide> "Test giving usecols with a comma-separated string" <ide> data = "1 2 3\n4 5 6" <del> test = np.genfromtxt(StringIO(data), <add> test = np.genfromtxt(TextIO(data), <ide> names="a, b, c", usecols="a, c") <ide> ctrl = np.array([(1, 3), (4, 6)], dtype=[(_, float) for _ in "ac"]) <ide> assert_equal(test, ctrl) <ide> <ide> def test_usecols_with_structured_dtype(self): <ide> "Test usecols with an explicit structured dtype" <del> data = StringIO("""JOE 70.1 25.3\nBOB 60.5 27.9""") <add> data = TextIO("JOE 70.1 25.3\nBOB 60.5 27.9") <ide> names = ['stid', 'temp'] <ide> dtypes = ['S4', 'f8'] <ide> test = np.ndfromtxt(data, usecols=(0, 2), dtype=zip(names, dtypes)) <del> assert_equal(test['stid'], asbytes_nested(["JOE", "BOB"])) <add> assert_equal(test['stid'], [b"JOE", b"BOB"]) <ide> assert_equal(test['temp'], [25.3, 27.9]) <ide> <ide> def test_usecols_with_integer(self): <ide> "Test usecols with an integer" <del> test = np.genfromtxt(StringIO("1 2 3\n4 5 6"), usecols=0) <add> test = np.genfromtxt(TextIO(b"1 2 3\n4 5 6"), usecols=0) <ide> assert_equal(test, np.array([1., 4.])) <ide> <ide> def test_usecols_with_named_columns(self): <ide> "Test usecols with named columns" <ide> ctrl = np.array([(1, 3), (4, 6)], dtype=[('a', float), ('c', float)]) <ide> data = "1 2 3\n4 5 6" <ide> kwargs = dict(names="a, b, c") <del> test = np.genfromtxt(StringIO(data), usecols=(0, -1), **kwargs) <add> test = np.genfromtxt(TextIO(data), usecols=(0, -1), **kwargs) <ide> assert_equal(test, ctrl) <del> test = np.genfromtxt(StringIO(data), <add> test = np.genfromtxt(TextIO(data), <ide> usecols=('a', 'c'), **kwargs) <ide> assert_equal(test, ctrl) <ide> <ide> def test_empty_file(self): <ide> warn_ctx.__enter__() <ide> try: <ide> warnings.filterwarnings("ignore", message="genfromtxt: Empty input file:") <del> data = StringIO() <add> data = TextIO() <ide> test = np.genfromtxt(data) <ide> assert_equal(test, np.array([])) <ide> finally: <ide> warn_ctx.__exit__() <ide> <ide> def test_fancy_dtype_alt(self): <ide> "Check that a nested dtype isn't MIA" <del> data = StringIO('1,2,3.0\n4,5,6.0\n') <add> data = TextIO('1,2,3.0\n4,5,6.0\n') <ide> fancydtype = np.dtype([('x', int), ('y', [('t', int), ('s', float)])]) <ide> test = np.mafromtxt(data, dtype=fancydtype, delimiter=',') <ide> control = ma.array([(1, (2, 3.0)), (4, (5, 6.0))], dtype=fancydtype) <ide> assert_equal(test, control) <ide> <ide> <ide> def test_shaped_dtype(self): <del> c = StringIO("aaaa 1.0 8.0 1 2 3 4 5 6") <add> c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6") <ide> dt = np.dtype([('name', 'S4'), ('x', float), ('y', float), <ide> ('block', int, (2, 3))]) <ide> x = np.ndfromtxt(c, dtype=dt) <ide> def test_shaped_dtype(self): <ide> assert_array_equal(x, a) <ide> <ide> def test_withmissing(self): <del> data = StringIO('A,B\n0,1\n2,N/A') <add> data = TextIO('A,B\n0,1\n2,N/A') <ide> kwargs = dict(delimiter=",", missing_values="N/A", names=True) <ide> test = np.mafromtxt(data, dtype=None, **kwargs) <ide> control = ma.array([(0, 1), (2, -1)], <ide> def test_user_missing_values(self): <ide> basekwargs = dict(dtype=None, delimiter=",", names=True,) <ide> mdtype = [('A', int), ('B', float), ('C', complex)] <ide> # <del> test = np.mafromtxt(StringIO(data), missing_values="N/A", <add> test = np.mafromtxt(TextIO(data), missing_values="N/A", <ide> **basekwargs) <ide> control = ma.array([(0, 0.0, 0j), (1, -999, 1j), <ide> (-9, 2.2, -999j), (3, -99, 3j)], <ide> def test_user_missing_values(self): <ide> assert_equal(test, control) <ide> # <ide> basekwargs['dtype'] = mdtype <del> test = np.mafromtxt(StringIO(data), <add> test = np.mafromtxt(TextIO(data), <ide> missing_values={0:-9, 1:-99, 2:-999j}, **basekwargs) <ide> control = ma.array([(0, 0.0, 0j), (1, -999, 1j), <ide> (-9, 2.2, -999j), (3, -99, 3j)], <ide> mask=[(0, 0, 0), (0, 1, 0), (1, 0, 1), (0, 1, 0)], <ide> dtype=mdtype) <ide> assert_equal(test, control) <ide> # <del> test = np.mafromtxt(StringIO(data), <add> test = np.mafromtxt(TextIO(data), <ide> missing_values={0:-9, 'B':-99, 'C':-999j}, <ide> **basekwargs) <ide> control = ma.array([(0, 0.0, 0j), (1, -999, 1j), <ide> def test_user_filling_values(self): <ide> names="a,b,c", <ide> missing_values={0:"N/A", 'b':" ", 2:"???"}, <ide> filling_values={0:0, 'b':0, 2:-999}) <del> test = np.genfromtxt(StringIO(data), **kwargs) <add> test = np.genfromtxt(TextIO(data), **kwargs) <ide> ctrl = np.array([(0, 2, 3), (4, 0, -999)], <ide> dtype=[(_, int) for _ in "abc"]) <ide> assert_equal(test, ctrl) <ide> # <del> test = np.genfromtxt(StringIO(data), usecols=(0, -1), **kwargs) <add> test = np.genfromtxt(TextIO(data), usecols=(0, -1), **kwargs) <ide> ctrl = np.array([(0, 3), (4, -999)], dtype=[(_, int) for _ in "ac"]) <ide> assert_equal(test, ctrl) <ide> <ide> <ide> def test_withmissing_float(self): <del> data = StringIO('A,B\n0,1.5\n2,-999.00') <add> data = TextIO('A,B\n0,1.5\n2,-999.00') <ide> test = np.mafromtxt(data, dtype=None, delimiter=',', <ide> missing_values='-999.0', names=True,) <ide> control = ma.array([(0, 1.5), (2, -1.)], <ide> def test_withmissing_float(self): <ide> <ide> def test_with_masked_column_uniform(self): <ide> "Test masked column" <del> data = StringIO('1 2 3\n4 5 6\n') <add> data = TextIO('1 2 3\n4 5 6\n') <ide> test = np.genfromtxt(data, dtype=None, <ide> missing_values='2,5', usemask=True) <ide> control = ma.array([[1, 2, 3], [4, 5, 6]], mask=[[0, 1, 0], [0, 1, 0]]) <ide> assert_equal(test, control) <ide> <ide> def test_with_masked_column_various(self): <ide> "Test masked column" <del> data = StringIO('True 2 3\nFalse 5 6\n') <add> data = TextIO('True 2 3\nFalse 5 6\n') <ide> test = np.genfromtxt(data, dtype=None, <ide> missing_values='2,5', usemask=True) <ide> control = ma.array([(1, 2, 3), (0, 5, 6)], <ide> def test_invalid_raise(self): <ide> for i in range(5): <ide> data[10 * i] = "2, 2, 2, 2 2" <ide> data.insert(0, "a, b, c, d, e") <del> mdata = StringIO("\n".join(data)) <add> mdata = TextIO("\n".join(data)) <ide> # <ide> kwargs = dict(delimiter=",", dtype=None, names=True) <ide> # XXX: is there a better way to get the return value of the callable in <ide> def test_invalid_raise_with_usecols(self): <ide> for i in range(5): <ide> data[10 * i] = "2, 2, 2, 2 2" <ide> data.insert(0, "a, b, c, d, e") <del> mdata = StringIO("\n".join(data)) <add> mdata = TextIO("\n".join(data)) <ide> kwargs = dict(delimiter=",", dtype=None, names=True, <ide> invalid_raise=False) <ide> # XXX: is there a better way to get the return value of the callable in <ide> def f(_ret={}): <ide> def test_inconsistent_dtype(self): <ide> "Test inconsistent dtype" <ide> data = ["1, 1, 1, 1, -1.1"] * 50 <del> mdata = StringIO("\n".join(data)) <add> mdata = TextIO("\n".join(data)) <ide> <ide> converters = {4: lambda x:"(%s)" % x} <ide> kwargs = dict(delimiter=",", converters=converters, <ide> def test_inconsistent_dtype(self): <ide> def test_default_field_format(self): <ide> "Test default format" <ide> data = "0, 1, 2.3\n4, 5, 6.7" <del> mtest = np.ndfromtxt(StringIO(data), <add> mtest = np.ndfromtxt(TextIO(data), <ide> delimiter=",", dtype=None, defaultfmt="f%02i") <ide> ctrl = np.array([(0, 1, 2.3), (4, 5, 6.7)], <ide> dtype=[("f00", int), ("f01", int), ("f02", float)]) <ide> def test_default_field_format(self): <ide> def test_single_dtype_wo_names(self): <ide> "Test single dtype w/o names" <ide> data = "0, 1, 2.3\n4, 5, 6.7" <del> mtest = np.ndfromtxt(StringIO(data), <add> mtest = np.ndfromtxt(TextIO(data), <ide> delimiter=",", dtype=float, defaultfmt="f%02i") <ide> ctrl = np.array([[0., 1., 2.3], [4., 5., 6.7]], dtype=float) <ide> assert_equal(mtest, ctrl) <ide> <ide> def test_single_dtype_w_explicit_names(self): <ide> "Test single dtype w explicit names" <ide> data = "0, 1, 2.3\n4, 5, 6.7" <del> mtest = np.ndfromtxt(StringIO(data), <add> mtest = np.ndfromtxt(TextIO(data), <ide> delimiter=",", dtype=float, names="a, b, c") <ide> ctrl = np.array([(0., 1., 2.3), (4., 5., 6.7)], <ide> dtype=[(_, float) for _ in "abc"]) <ide> def test_single_dtype_w_explicit_names(self): <ide> def test_single_dtype_w_implicit_names(self): <ide> "Test single dtype w implicit names" <ide> data = "a, b, c\n0, 1, 2.3\n4, 5, 6.7" <del> mtest = np.ndfromtxt(StringIO(data), <add> mtest = np.ndfromtxt(TextIO(data), <ide> delimiter=",", dtype=float, names=True) <ide> ctrl = np.array([(0., 1., 2.3), (4., 5., 6.7)], <ide> dtype=[(_, float) for _ in "abc"]) <ide> def test_single_dtype_w_implicit_names(self): <ide> def test_easy_structured_dtype(self): <ide> "Test easy structured dtype" <ide> data = "0, 1, 2.3\n4, 5, 6.7" <del> mtest = np.ndfromtxt(StringIO(data), delimiter=",", <add> mtest = np.ndfromtxt(TextIO(data), delimiter=",", <ide> dtype=(int, float, float), defaultfmt="f_%02i") <ide> ctrl = np.array([(0, 1., 2.3), (4, 5., 6.7)], <ide> dtype=[("f_00", int), ("f_01", float), ("f_02", float)]) <ide> def test_autostrip(self): <ide> "Test autostrip" <ide> data = "01/01/2003 , 1.3, abcde" <ide> kwargs = dict(delimiter=",", dtype=None) <del> mtest = np.ndfromtxt(StringIO(data), **kwargs) <add> mtest = np.ndfromtxt(TextIO(data), **kwargs) <ide> ctrl = np.array([('01/01/2003 ', 1.3, ' abcde')], <ide> dtype=[('f0', '|S12'), ('f1', float), ('f2', '|S8')]) <ide> assert_equal(mtest, ctrl) <del> mtest = np.ndfromtxt(StringIO(data), autostrip=True, **kwargs) <add> mtest = np.ndfromtxt(TextIO(data), autostrip=True, **kwargs) <ide> ctrl = np.array([('01/01/2003', 1.3, 'abcde')], <ide> dtype=[('f0', '|S10'), ('f1', float), ('f2', '|S5')]) <ide> assert_equal(mtest, ctrl) <ide> def test_replace_space(self): <ide> "Test the 'replace_space' option" <ide> txt = "A.A, B (B), C:C\n1, 2, 3.14" <ide> # Test default: replace ' ' by '_' and delete non-alphanum chars <del> test = np.genfromtxt(StringIO(txt), <add> test = np.genfromtxt(TextIO(txt), <ide> delimiter=",", names=True, dtype=None) <ide> ctrl_dtype = [("AA", int), ("B_B", int), ("CC", float)] <ide> ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype) <ide> assert_equal(test, ctrl) <ide> # Test: no replace, no delete <del> test = np.genfromtxt(StringIO(txt), <add> test = np.genfromtxt(TextIO(txt), <ide> delimiter=",", names=True, dtype=None, <ide> replace_space='', deletechars='') <ide> ctrl_dtype = [("A.A", int), ("B (B)", int), ("C:C", float)] <ide> ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype) <ide> assert_equal(test, ctrl) <ide> # Test: no delete (spaces are replaced by _) <del> test = np.genfromtxt(StringIO(txt), <add> test = np.genfromtxt(TextIO(txt), <ide> delimiter=",", names=True, dtype=None, <ide> deletechars='') <ide> ctrl_dtype = [("A.A", int), ("B_(B)", int), ("C:C", float)] <ide> def test_incomplete_names(self): <ide> # w/ dtype=None <ide> ctrl = np.array([(0, 1, 2), (3, 4, 5)], <ide> dtype=[(_, int) for _ in ('A', 'f0', 'C')]) <del> test = np.ndfromtxt(StringIO(data), dtype=None, **kwargs) <add> test = np.ndfromtxt(TextIO(data), dtype=None, **kwargs) <ide> assert_equal(test, ctrl) <ide> # w/ default dtype <ide> ctrl = np.array([(0, 1, 2), (3, 4, 5)], <ide> dtype=[(_, float) for _ in ('A', 'f0', 'C')]) <del> test = np.ndfromtxt(StringIO(data), **kwargs) <add> test = np.ndfromtxt(TextIO(data), **kwargs) <ide> <ide> def test_names_auto_completion(self): <ide> "Make sure that names are properly completed" <ide> data = "1 2 3\n 4 5 6" <del> test = np.genfromtxt(StringIO(data), <add> test = np.genfromtxt(TextIO(data), <ide> dtype=(int, float, int), names="a") <ide> ctrl = np.array([(1, 2, 3), (4, 5, 6)], <ide> dtype=[('a', int), ('f0', float), ('f1', int)]) <ide> def test_names_with_usecols_bug1636(self): <ide> "Make sure we pick up the right names w/ usecols" <ide> data = "A,B,C,D,E\n0,1,2,3,4\n0,1,2,3,4\n0,1,2,3,4" <ide> ctrl_names = ("A", "C", "E") <del> test = np.genfromtxt(StringIO(data), <add> test = np.genfromtxt(TextIO(data), <ide> dtype=(int, int, int), delimiter=",", <ide> usecols=(0, 2, 4), names=True) <ide> assert_equal(test.dtype.names, ctrl_names) <ide> # <del> test = np.genfromtxt(StringIO(data), <add> test = np.genfromtxt(TextIO(data), <ide> dtype=(int, int, int), delimiter=",", <ide> usecols=("A", "C", "E"), names=True) <ide> assert_equal(test.dtype.names, ctrl_names) <ide> # <del> test = np.genfromtxt(StringIO(data), <add> test = np.genfromtxt(TextIO(data), <ide> dtype=int, delimiter=",", <ide> usecols=("A", "C", "E"), names=True) <ide> assert_equal(test.dtype.names, ctrl_names) <ide> def test_fixed_width_names(self): <ide> kwargs = dict(delimiter=(5, 5, 4), names=True, dtype=None) <ide> ctrl = np.array([(0, 1, 2.3), (45, 67, 9.)], <ide> dtype=[('A', int), ('B', int), ('C', float)]) <del> test = np.ndfromtxt(StringIO(data), **kwargs) <add> test = np.ndfromtxt(TextIO(data), **kwargs) <ide> assert_equal(test, ctrl) <ide> # <ide> kwargs = dict(delimiter=5, names=True, dtype=None) <ide> ctrl = np.array([(0, 1, 2.3), (45, 67, 9.)], <ide> dtype=[('A', int), ('B', int), ('C', float)]) <del> test = np.ndfromtxt(StringIO(data), **kwargs) <add> test = np.ndfromtxt(TextIO(data), **kwargs) <ide> assert_equal(test, ctrl) <ide> <ide> def test_filling_values(self): <ide> "Test missing values" <del> data = "1, 2, 3\n1, , 5\n0, 6, \n" <add> data = b"1, 2, 3\n1, , 5\n0, 6, \n" <ide> kwargs = dict(delimiter=",", dtype=None, filling_values= -999) <ide> ctrl = np.array([[1, 2, 3], [1, -999, 5], [0, 6, -999]], dtype=int) <del> test = np.ndfromtxt(StringIO(data), **kwargs) <add> test = np.ndfromtxt(TextIO(data), **kwargs) <ide> assert_equal(test, ctrl) <ide> <ide> def test_comments_is_none(self): <ide> # Github issue 329 (None was previously being converted to 'None'). <del> test = np.genfromtxt(StringIO("test1,testNonetherestofthedata"), <add> test = np.genfromtxt(TextIO("test1,testNonetherestofthedata"), <ide> dtype=None, comments=None, delimiter=',') <del> assert_equal(test[1], asbytes('testNonetherestofthedata')) <del> test = np.genfromtxt(StringIO("test1, testNonetherestofthedata"), <add> assert_equal(test[1], b'testNonetherestofthedata') <add> test = np.genfromtxt(TextIO("test1, testNonetherestofthedata"), <ide> dtype=None, comments=None, delimiter=',') <del> assert_equal(test[1], asbytes(' testNonetherestofthedata')) <add> assert_equal(test[1], b' testNonetherestofthedata') <ide> <ide> def test_recfromtxt(self): <ide> # <del> data = StringIO('A,B\n0,1\n2,3') <add> data = TextIO('A,B\n0,1\n2,3') <ide> kwargs = dict(delimiter=",", missing_values="N/A", names=True) <ide> test = np.recfromtxt(data, **kwargs) <ide> control = np.array([(0, 1), (2, 3)], <ide> dtype=[('A', np.int), ('B', np.int)]) <ide> self.assertTrue(isinstance(test, np.recarray)) <ide> assert_equal(test, control) <ide> # <del> data = StringIO('A,B\n0,1\n2,N/A') <add> data = TextIO('A,B\n0,1\n2,N/A') <ide> test = np.recfromtxt(data, dtype=None, usemask=True, **kwargs) <ide> control = ma.array([(0, 1), (2, -1)], <ide> mask=[(False, False), (False, True)], <ide> def test_recfromtxt(self): <ide> <ide> def test_recfromcsv(self): <ide> # <del> data = StringIO('A,B\n0,1\n2,3') <add> data = TextIO('A,B\n0,1\n2,3') <ide> kwargs = dict(missing_values="N/A", names=True, case_sensitive=True) <ide> test = np.recfromcsv(data, dtype=None, **kwargs) <ide> control = np.array([(0, 1), (2, 3)], <ide> dtype=[('A', np.int), ('B', np.int)]) <ide> self.assertTrue(isinstance(test, np.recarray)) <ide> assert_equal(test, control) <ide> # <del> data = StringIO('A,B\n0,1\n2,N/A') <add> data = TextIO('A,B\n0,1\n2,N/A') <ide> test = np.recfromcsv(data, dtype=None, usemask=True, **kwargs) <ide> control = ma.array([(0, 1), (2, -1)], <ide> mask=[(False, False), (False, True)], <ide> def test_recfromcsv(self): <ide> assert_equal(test.mask, control.mask) <ide> assert_equal(test.A, [0, 2]) <ide> # <del> data = StringIO('A,B\n0,1\n2,3') <add> data = TextIO('A,B\n0,1\n2,3') <ide> test = np.recfromcsv(data, missing_values='N/A',) <ide> control = np.array([(0, 1), (2, 3)], <ide> dtype=[('a', np.int), ('b', np.int)]) <ide> def test_gft_using_filename(self): <ide> os.unlink(name) <ide> <ide> def test_gft_using_generator(self): <add> # gft doesn't work with unicode. <ide> def count(): <ide> for i in range(10): <ide> yield asbytes("%d" % i) <ide> def count(): <ide> def test_gzip_load(): <ide> a = np.random.random((5, 5)) <ide> <del> s = StringIO() <add> s = BytesIO() <ide> f = gzip.GzipFile(fileobj=s, mode="w") <ide> <ide> np.save(f, a) <ide> def test_gzip_loadtxt(): <ide> # reopened by another open call. So we first put the gzipped string <ide> # of the test reference array, write it to a securely opened file, <ide> # which is then read from by the loadtxt function <del> s = StringIO() <add> s = BytesIO() <ide> g = gzip.GzipFile(fileobj=s, mode='w') <del> g.write(asbytes('1 2 3\n')) <add> g.write(b'1 2 3\n') <ide> g.close() <ide> s.seek(0) <ide> <ide> def test_gzip_loadtxt(): <ide> os.unlink(name) <ide> <ide> def test_gzip_loadtxt_from_string(): <del> s = StringIO() <add> s = BytesIO() <ide> f = gzip.GzipFile(fileobj=s, mode="w") <del> f.write(asbytes('1 2 3\n')) <add> f.write(b'1 2 3\n') <ide> f.close() <ide> s.seek(0) <ide> <ide> f = gzip.GzipFile(fileobj=s, mode="r") <ide> assert_array_equal(np.loadtxt(f), [1, 2, 3]) <ide> <ide> def test_npzfile_dict(): <del> s = StringIO() <add> s = BytesIO() <ide> x = np.zeros((3, 3)) <ide> y = np.zeros((3, 3)) <ide> <ide> def test_load_refcount(): <ide> # Check that objects returned by np.load are directly freed based on <ide> # their refcount, rather than needing the gc to collect them. <ide> <del> f = StringIO() <add> f = BytesIO() <ide> np.savez(f, [1, 2, 3]) <ide> f.seek(0) <ide> <ide><path>numpy/lib/tests/test_regression.py <ide> from __future__ import division, absolute_import <ide> <add>import sys <ide> from numpy.testing import * <ide> from numpy.testing.utils import _assert_valid_refcount <ide> import numpy as np <ide> def test_append_fields_dtype_list(self): <ide> <ide> def test_loadtxt_fields_subarrays(self): <ide> # For ticket #1936 <del> from StringIO import StringIO <add> if sys.version_info[0] >= 3: <add> from io import StringIO <add> else: <add> from StringIO import StringIO <add> <ide> dt = [("a", 'u1', 2), ("b", 'u1', 2)] <ide> x = np.loadtxt(StringIO("0 1 2 3"), dtype=dt) <ide> assert_equal(x, np.array([((0, 1), (2, 3))], dtype=dt)) <ide><path>numpy/lib/tests/test_utils.py <ide> from __future__ import division, absolute_import <ide> <add>import sys <ide> from numpy.testing import * <ide> import numpy.lib.utils as utils <ide> from numpy.lib import deprecate <ide> <del>from StringIO import StringIO <add>if sys.version_info[0] >= 3: <add> from io import StringIO <add>else: <add> from StringIO import StringIO <ide> <ide> def test_lookfor(): <ide> out = StringIO() <ide><path>numpy/lib/utils.py <ide> def _lookfor_generate_cache(module, import_modules, regenerate): <ide> global _lookfor_caches <ide> # Local import to speed up numpy's import time. <ide> import inspect <del> from cStringIO import StringIO <add> <add> if sys.version_info[0] >= 3: <add> # In Python3 stderr, stdout are text files. <add> from io import StringIO <add> else: <add> from StringIO import StringIO <ide> <ide> if module is None: <ide> module = "numpy" <ide><path>numpy/linalg/lapack_lite/clapack_scrub.py <del>#!/usr/bin/env python2.4 <add>#!/usr/bin/env python <ide> from __future__ import division, absolute_import <ide> <ide> import sys, os <del>from cStringIO import StringIO <add>from io import StringIO <ide> import re <ide> <ide> from Plex import * <ide><path>numpy/ma/core.py <ide> # pylint: disable-msg=E1002 <ide> from __future__ import division, absolute_import <ide> <add>import sys <add>import warnings <add> <add>import numpy as np <add>import numpy.core.umath as umath <add>import numpy.core.numerictypes as ntypes <add>from numpy import ndarray, amax, amin, iscomplexobj, bool_ <add>from numpy import array as narray <add>from numpy.lib.function_base import angle <add>from numpy.compat import getargspec, formatargspec <add>from numpy import expand_dims as n_expand_dims <add> <add>if sys.version_info[0] >= 3: <add> from functools import reduce <add> import pickle <add>else: <add> import cPickle as pickle <ide> <ide> __author__ = "Pierre GF Gerard-Marchant" <ide> __docformat__ = "restructuredtext en" <ide> 'var', 'where', <ide> 'zeros'] <ide> <del>import cPickle <del> <del>import numpy as np <del>from numpy import ndarray, amax, amin, iscomplexobj, bool_ <del>from numpy import array as narray <del> <del>import numpy.core.umath as umath <del>from numpy.lib.function_base import angle <del>import numpy.core.numerictypes as ntypes <del>from numpy.compat import getargspec, formatargspec <del>from numpy import expand_dims as n_expand_dims <del>import warnings <del> <del>import sys <del>if sys.version_info[0] >= 3: <del> from functools import reduce <del> <ide> MaskType = np.bool_ <ide> nomask = MaskType(0) <ide> <ide> def dump(a, F): <ide> """ <ide> if not hasattr(F, 'readline'): <ide> F = open(F, 'w') <del> return cPickle.dump(a, F) <add> return pickle.dump(a, F) <ide> <ide> def dumps(a): <ide> """ <ide> def dumps(a): <ide> returned. <ide> <ide> """ <del> return cPickle.dumps(a) <add> return pickle.dumps(a) <ide> <ide> def load(F): <ide> """ <ide> def load(F): <ide> """ <ide> if not hasattr(F, 'readline'): <ide> F = open(F, 'r') <del> return cPickle.load(F) <add> return pickle.load(F) <ide> <ide> def loads(strg): <ide> """ <ide> def loads(strg): <ide> dumps : Return a string corresponding to the pickling of a masked array. <ide> <ide> """ <del> return cPickle.loads(strg) <add> return pickle.loads(strg) <ide> <ide> ################################################################################ <ide> def fromfile(file, dtype=float, count= -1, sep=''): <ide><path>numpy/ma/tests/test_core.py <ide> <ide> import types <ide> import warnings <add>import sys <add>import pickle <ide> <ide> import numpy as np <add>import numpy.ma.core <ide> import numpy.core.fromnumeric as fromnumeric <ide> from numpy import ndarray <ide> from numpy.ma.testutils import * <del> <del>import numpy.ma.core <ide> from numpy.ma.core import * <del> <ide> from numpy.compat import asbytes, asbytes_nested <ide> from numpy.testing.utils import WarningManager <ide> <ide> pi = np.pi <ide> <del>import sys <ide> if sys.version_info[0] >= 3: <ide> from functools import reduce <ide> <ide> def test_deepcopy(self): <ide> <ide> def test_pickling(self): <ide> "Tests pickling" <del> import cPickle <ide> a = arange(10) <ide> a[::3] = masked <ide> a.fill_value = 999 <del> a_pickled = cPickle.loads(a.dumps()) <add> a_pickled = pickle.loads(a.dumps()) <ide> assert_equal(a_pickled._mask, a._mask) <ide> assert_equal(a_pickled._data, a._data) <ide> assert_equal(a_pickled.fill_value, 999) <ide> <ide> def test_pickling_subbaseclass(self): <ide> "Test pickling w/ a subclass of ndarray" <del> import cPickle <ide> a = array(np.matrix(list(range(10))), mask=[1, 0, 1, 0, 0] * 2) <del> a_pickled = cPickle.loads(a.dumps()) <add> a_pickled = pickle.loads(a.dumps()) <ide> assert_equal(a_pickled._mask, a._mask) <ide> assert_equal(a_pickled, a) <ide> self.assertTrue(isinstance(a_pickled._data, np.matrix)) <ide> <ide> def test_pickling_maskedconstant(self): <ide> "Test pickling MaskedConstant" <ide> <del> import cPickle <ide> mc = np.ma.masked <del> mc_pickled = cPickle.loads(mc.dumps()) <add> mc_pickled = pickle.loads(mc.dumps()) <ide> assert_equal(mc_pickled._baseclass, mc._baseclass) <ide> assert_equal(mc_pickled._mask, mc._mask) <ide> assert_equal(mc_pickled._data, mc._data) <ide> <ide> def test_pickling_wstructured(self): <ide> "Tests pickling w/ structured array" <del> import cPickle <ide> a = array([(1, 1.), (2, 2.)], mask=[(0, 0), (0, 1)], <ide> dtype=[('a', int), ('b', float)]) <del> a_pickled = cPickle.loads(a.dumps()) <add> a_pickled = pickle.loads(a.dumps()) <ide> assert_equal(a_pickled._mask, a._mask) <ide> assert_equal(a_pickled, a) <ide> <ide> def test_pickling_keepalignment(self): <ide> "Tests pickling w/ F_CONTIGUOUS arrays" <del> import cPickle <ide> a = arange(10) <ide> a.shape = (-1, 2) <ide> b = a.T <del> test = cPickle.loads(cPickle.dumps(b)) <add> test = pickle.loads(pickle.dumps(b)) <ide> assert_equal(test, b) <ide> <del># def test_pickling_oddity(self): <del># "Test some pickling oddity" <del># import cPickle <del># a = array([{'a':1}, {'b':2}, 3], dtype=object) <del># test = cPickle.loads(cPickle.dumps(a)) <del># assert_equal(test, a) <ide> <ide> def test_single_element_subscript(self): <ide> "Tests single element subscripts of Maskedarrays." <ide><path>numpy/ma/tests/test_mrecords.py <ide> """ <ide> from __future__ import division, absolute_import <ide> <del>__author__ = "Pierre GF Gerard-Marchant ($Author: jarrod.millman $)" <del>__revision__ = "$Revision: 3473 $" <del>__date__ = '$Date: 2007-10-29 17:18:13 +0200 (Mon, 29 Oct 2007) $' <del> <ide> import sys <add>import warnings <add>import pickle <add> <ide> import numpy as np <add>import numpy.ma.testutils <add>import numpy.ma as ma <ide> from numpy import recarray <ide> from numpy.core.records import fromrecords as recfromrecords, \ <ide> fromarrays as recfromarrays <ide> <ide> from numpy.compat import asbytes, asbytes_nested <del> <del>import numpy.ma.testutils <ide> from numpy.ma.testutils import * <del> <del>import numpy.ma as ma <ide> from numpy.ma import masked, nomask <del> <del>import warnings <ide> from numpy.testing.utils import WarningManager <del> <ide> from numpy.ma.mrecords import MaskedRecords, mrecarray, fromarrays, \ <ide> fromtextfile, fromrecords, addfield <ide> <add> <add>__author__ = "Pierre GF Gerard-Marchant ($Author: jarrod.millman $)" <add>__revision__ = "$Revision: 3473 $" <add>__date__ = '$Date: 2007-10-29 17:18:13 +0200 (Mon, 29 Oct 2007) $' <add> <add> <ide> #.............................................................................. <ide> class TestMRecords(TestCase): <ide> "Base test class for MaskedArrays." <ide> def test_hardmask(self): <ide> # <ide> def test_pickling(self): <ide> "Test pickling" <del> import cPickle <ide> base = self.base.copy() <ide> mrec = base.view(mrecarray) <del> _ = cPickle.dumps(mrec) <del> mrec_ = cPickle.loads(_) <add> _ = pickle.dumps(mrec) <add> mrec_ = pickle.loads(_) <ide> assert_equal(mrec_.dtype, mrec.dtype) <ide> assert_equal_records(mrec_._data, mrec._data) <ide> assert_equal(mrec_._mask, mrec._mask) <ide><path>numpy/numarray/functions.py <ide> ] <ide> <ide> import copy <del>import copy_reg <ide> import types <ide> import os <ide> import sys <ide> <ide> if sys.version_info[0] >= 3: <ide> import copyreg as copy_reg <add>else: <add> import copy_reg <ide> <ide> isBigEndian = sys.byteorder != 'little' <ide> value = tcode = 'f' <ide><path>numpy/oldnumeric/compat.py <ide> """ <ide> from __future__ import division, absolute_import <ide> <add>import sys <add>import copy <add>import pickle <add>from pickle import dump, dumps <add> <add>import numpy.core.multiarray as multiarray <add>import numpy.core.umath as um <add>from numpy.core.numeric import array <add>from . import functions <add> <add> <ide> __all__ = ['NewAxis', <ide> 'UFuncType', 'UfuncType', 'ArrayType', 'arraytype', <ide> 'LittleEndian', 'arrayrange', 'matrixmultiply', <ide> 'Unpickler', 'Pickler' <ide> ] <ide> <del>import numpy.core.multiarray as multiarray <del>import numpy.core.umath as um <del>from numpy.core.numeric import array <del>from . import functions <del>import sys <del> <del>from cPickle import dump, dumps <ide> <ide> mu = multiarray <ide> <ide> def DumpArray(m, fp): <ide> m.dump(fp) <ide> <ide> def LoadArray(fp): <del> import cPickle <del> return cPickle.load(fp) <add> return pickle.load(fp) <ide> <ide> def array_constructor(shape, typecode, thestr, Endian=LittleEndian): <ide> if typecode == "O": <ide> def pickle_array(a): <ide> (a.shape, a.dtype.char, a.tostring(), LittleEndian)) <ide> <ide> def loads(astr): <del> import cPickle <del> arr = cPickle.loads(astr.replace('Numeric', 'numpy.oldnumeric')) <add> arr = pickle.loads(astr.replace('Numeric', 'numpy.oldnumeric')) <ide> return arr <ide> <ide> def load(fp): <ide> def _LoadArray(fp): <ide> else: <ide> return m <ide> <del>import pickle, copy <ide> if sys.version_info[0] >= 3: <ide> class Unpickler(pickle.Unpickler): <ide> # XXX: should we implement this? It's not completely straightforward <ide><path>numpy/oldnumeric/misc.py <ide> 'dot', 'outerproduct', 'innerproduct', 'insert'] <ide> <ide> import types <del>import StringIO <ide> import pickle <ide> import math <ide> import copy <del>import copy_reg <del> <ide> import sys <add> <ide> if sys.version_info[0] >= 3: <del> import copyreg <del> import io <del> StringIO = io.BytesIO <del> copy_reg = copyreg <add> import copyreg as copy_reg <add> from io import BytesIO as StringIO <add>else: <add> import copy_reg <add> from StringIO import StringIO <ide> <ide> from numpy import sort, clip, rank, sign, shape, putmask, allclose, size,\ <ide> choose, swapaxes, array_str, array_repr, e, pi, put, \ <ide><path>numpy/testing/utils.py <ide> import warnings <ide> from .nosetester import import_nose <ide> <add>if sys.version_info[0] >= 3: <add> from io import StringIO <add>else: <add> from StringIO import StringIO <add> <ide> __all__ = ['assert_equal', 'assert_almost_equal','assert_approx_equal', <ide> 'assert_array_equal', 'assert_array_less', 'assert_string_equal', <ide> 'assert_array_almost_equal', 'assert_raises', 'build_err_msg', <ide> def print_assert_equal(test_string,actual,desired): <ide> import pprint <ide> <ide> if not (actual == desired): <del> import cStringIO <del> msg = cStringIO.StringIO() <add> msg = StringIO() <ide> msg.write(test_string) <ide> msg.write(' failed\nACTUAL: \n') <ide> pprint.pprint(actual,msg) <ide><path>setup.py <ide> import re <ide> import subprocess <ide> <del>if sys.version_info[0] < 3: <del> import __builtin__ as builtins <del>else: <add>if sys.version_info[0] >= 3: <ide> import builtins <add>else: <add> import __builtin__ as builtins <ide> <ide> CLASSIFIERS = """\ <ide> Development Status :: 5 - Production/Stable <ide><path>tools/py3tool.py <ide> 'raw_input', <ide> 'xreadlines', <ide> 'xrange', <del> 'import' <add> 'import', <add> 'imports' <ide> ] <ide> <ide> skip_fixes= [] <ide><path>tools/win32build/build.py <ide> def move_binary(arch, pyver): <ide> os.path.join("binaries", get_binary_name(arch))) <ide> <ide> def get_numpy_version(): <del> import __builtin__ <del> __builtin__.__NUMPY_SETUP__ = True <add> if sys.version_info[0] >= 3: <add> import builtins <add> else: <add> import __builtin__ as builtins <add> <add> builtins.__NUMPY_SETUP__ = True <ide> from numpy.version import version <ide> return version <ide>
44
PHP
PHP
remove use of non-existent controller property
667220ab9269e0d37d78ee2d553fe7c0aa7931b9
<ide><path>src/Controller/Component/RequestHandlerComponent.php <ide> public function renderAs(Controller $controller, $type, $options = array()) { <ide> $controller->layout = $this->ajaxLayout; <ide> return $this->respondAs('html', $options); <ide> } <del> $controller->ext = '.ctp'; <ide> <ide> $viewClassMap = $this->viewClassMap(); <ide> if (array_key_exists($type, $viewClassMap)) { <ide><path>tests/TestCase/Controller/Component/RequestHandlerComponentTest.php <ide> public function testDisabling() { <ide> $this->assertEquals(true, $this->Controller->request->params['isAjax']); <ide> } <ide> <del>/** <del> * testAutoResponseType method <del> * <del> * @return void <del> */ <del> public function testAutoResponseType() { <del> $event = new Event('Controller.startup', $this->Controller); <del> $this->Controller->ext = '.thtml'; <del> $this->Controller->request->params['_ext'] = 'rss'; <del> $this->RequestHandler->initialize($event); <del> $this->RequestHandler->startup($event); <del> $this->assertEquals('.ctp', $this->Controller->ext); <del> } <del> <ide> /** <ide> * testAutoAjaxLayout method <ide> *
2
Javascript
Javascript
replace assert.equal with assert.strictequal
18016d3b3fd27fd2744a17a54f15631af6206df1
<ide><path>test/addons/async-hello-world/test.js <ide> var assert = require('assert'); <ide> const binding = require(`./build/${common.buildType}/binding`); <ide> <ide> binding(5, common.mustCall(function(err, val) { <del> assert.equal(null, err); <del> assert.equal(10, val); <add> assert.strictEqual(err, null); <add> assert.strictEqual(val, 10); <ide> process.nextTick(common.mustCall(function() {})); <ide> })); <ide><path>test/addons/hello-world-function-export/test.js <ide> const common = require('../../common'); <ide> var assert = require('assert'); <ide> const binding = require(`./build/${common.buildType}/binding`); <del>assert.equal('world', binding()); <add>assert.strictEqual(binding(), 'world'); <ide> console.log('binding.hello() =', binding()); <ide><path>test/addons/hello-world/test.js <ide> const common = require('../../common'); <ide> var assert = require('assert'); <ide> const binding = require(`./build/${common.buildType}/binding`); <del>assert.equal('world', binding.hello()); <add>assert.strictEqual(binding.hello(), 'world'); <ide> console.log('binding.hello() =', binding.hello()); <ide><path>test/addons/load-long-path/test.js <ide> fs.writeFileSync(addonDestinationPath, contents); <ide> // Attempt to load at long path destination <ide> var addon = require(addonDestinationPath); <ide> assert.notEqual(addon, null); <del>assert.equal(addon.hello(), 'world'); <add>assert.strictEqual(addon.hello(), 'world'); <ide><path>test/addons/stringbytes-external-exceed-max/test-stringbytes-external-at-max.js <ide> if (!binding.ensureAllocation(2 * kStringMaxLength)) { <ide> } <ide> <ide> const maxString = buf.toString('latin1'); <del>assert.equal(maxString.length, kStringMaxLength); <add>assert.strictEqual(maxString.length, kStringMaxLength); <ide><path>test/addons/stringbytes-external-exceed-max/test-stringbytes-external-exceed-max-by-1-binary.js <ide> assert.throws(function() { <ide> }, /"toString\(\)" failed/); <ide> <ide> var maxString = buf.toString('latin1', 1); <del>assert.equal(maxString.length, kStringMaxLength); <add>assert.strictEqual(maxString.length, kStringMaxLength); <ide> // Free the memory early instead of at the end of the next assignment <ide> maxString = undefined; <ide> <ide> maxString = buf.toString('latin1', 0, kStringMaxLength); <del>assert.equal(maxString.length, kStringMaxLength); <add>assert.strictEqual(maxString.length, kStringMaxLength); <ide><path>test/addons/stringbytes-external-exceed-max/test-stringbytes-external-exceed-max-by-2.js <ide> if (!binding.ensureAllocation(2 * kStringMaxLength)) { <ide> } <ide> <ide> const maxString = buf.toString('utf16le'); <del>assert.equal(maxString.length, (kStringMaxLength + 2) / 2); <add>assert.strictEqual(maxString.length, (kStringMaxLength + 2) / 2);
7
Javascript
Javascript
improve the changelog script
07a58dd7669431d33b61f8c3213c31eff744d02a
<ide><path>changelog.js <ide> var parseRawCommit = function(raw) { <ide> msg.breaks = []; <ide> <ide> lines.forEach(function(line) { <del> match = line.match(/Closes\s#(\d+)/); <add> match = line.match(/(?:Closes|Fixes)\s#(\d+)/); <ide> if (match) msg.closes.push(parseInt(match[1])); <ide> }); <del> <add> <ide> match = raw.match(/BREAKING CHANGE:([\s\S]*)/); <ide> if (match) { <del> console.log('found!!!') <del> msg.breaks.push(match[1]); <add> msg.breaking = match[1]; <ide> } <del> <add> <ide> <ide> msg.body = lines.join('\n'); <ide> match = msg.subject.match(/^(.*)\((.*)\)\:\s(.*)$/); <ide> var currentDate = function() { <ide> }; <ide> <ide> <del>var printSection = function(stream, title, section) { <add>var printSection = function(stream, title, section, printCommitLinks) { <add> printCommitLinks = printCommitLinks === undefined ? true : printCommitLinks; <ide> var components = Object.getOwnPropertyNames(section).sort(); <ide> <ide> if (!components.length) return; <ide> var printSection = function(stream, title, section) { <ide> } <ide> <ide> section[name].forEach(function(commit) { <del> stream.write(util.format('%s %s (%s', prefix, commit.subject, linkToCommit(commit.hash))); <del> if (commit.closes.length) { <del> stream.write(', closes ' + commit.closes.map(linkToIssue).join(', ')); <add> if (printCommitLinks) { <add> stream.write(util.format('%s %s\n (%s', prefix, commit.subject, linkToCommit(commit.hash))); <add> if (commit.closes.length) { <add> stream.write(',\n ' + commit.closes.map(linkToIssue).join(', ')); <add> } <add> stream.write(')\n'); <add> } else { <add> stream.write(util.format('%s %s', prefix, commit.subject)); <ide> } <del> stream.write(')\n'); <ide> }); <ide> }); <ide> <ide> var printSection = function(stream, title, section) { <ide> <ide> <ide> var readGitLog = function(grep, from) { <del> var deffered = q.defer(); <add> var deferred = q.defer(); <ide> <ide> // TODO(vojta): if it's slow, use spawn and stream it instead <ide> child.exec(util.format(GIT_LOG_CMD, grep, '%H%n%s%n%b%n==END==', from), function(code, stdout, stderr) { <ide> var readGitLog = function(grep, from) { <ide> if (commit) commits.push(commit); <ide> }); <ide> <del> deffered.resolve(commits); <add> deferred.resolve(commits); <ide> }); <ide> <del> return deffered.promise; <add> return deferred.promise; <ide> }; <ide> <ide> <ide> var writeChangelog = function(stream, commits, version) { <ide> section[component].push(commit); <ide> } <ide> <del> commit.breaks.forEach(function(breakMsg) { <del> sections.breaks[EMPTY_COMPONENT].push({ <del> subject: breakMsg, <add> if (commit.breaking) { <add> sections.breaks[component] = sections.breaks[component] || []; <add> sections.breaks[component].push({ <add> subject: util.format("due to %s,\n %s", linkToCommit(commit.hash), commit.breaking), <ide> hash: commit.hash, <ide> closes: [] <ide> }); <del> }); <add> }; <ide> }); <ide> <ide> stream.write(util.format(HEADER_TPL, version, version, currentDate())); <ide> printSection(stream, 'Bug Fixes', sections.fix); <ide> printSection(stream, 'Features', sections.feat); <del> printSection(stream, 'Breaking Changes', sections.breaks); <add> printSection(stream, 'Breaking Changes', sections.breaks, false); <ide> } <ide> <ide> <ide> var getPreviousTag = function() { <del> var deffered = q.defer(); <add> var deferred = q.defer(); <ide> child.exec(GIT_TAG_CMD, function(code, stdout, stderr) { <del> if (code) deffered.reject('Cannot get the previous tag.'); <del> else deffered.resolve(stdout.replace('\n', '')); <add> if (code) deferred.reject('Cannot get the previous tag.'); <add> else deferred.resolve(stdout.replace('\n', '')); <ide> }); <del> return deffered.promise; <add> return deferred.promise; <ide> }; <ide> <ide> <ide><path>changelog.spec.js <ide> describe('changelog.js', function() { <ide> '13f31602f396bc269076ab4d389cfd8ca94b20ba\n' + <ide> 'feat(ng-list): Allow custom separator\n' + <ide> 'bla bla bla\n\n' + <del> 'Breaks first breaking change\nsomething else\n' + <del> 'Breaks another breaking change\n'); <add> 'BREAKING CHANGE: first breaking change\nsomething else\n' + <add> 'another line with more info\n'); <ide> <del> expect(msg.breaks).toEqual(['first breaking change', 'another breaking change']); <add> expect(msg.breaking).toEqual(' first breaking change\nsomething else\nanother line with more info\n'); <ide> }); <ide> }); <ide> });
2
PHP
PHP
check main mail config for from
fa463b0998853faadc302ed21c4dd704c99144cc
<ide><path>src/Illuminate/Mail/MailManager.php <ide> protected function guzzle(array $config) <ide> */ <ide> protected function setGlobalAddress($mailer, array $config, string $type) <ide> { <del> $address = Arr::get($config, $type); <add> $address = Arr::get($config, $type, $this->app['config']['mail.'.$type]); <ide> <ide> if (is_array($address) && isset($address['address'])) { <ide> $mailer->{'always'.Str::studly($type)}($address['address'], $address['name']);
1
Java
Java
remove unnecessary observable constructor
c968effdbba6e9d8e885f8316c288ae36cb9e86b
<ide><path>rxjava-core/src/main/java/rx/Observable.java <ide> * <p> <ide> * <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/legend.png"> <ide> * <p> <del> * For more information see the <a href="https://github.com/Netflix/RxJava/wiki/Observable">RxJava <del> * Wiki</a> <add> * For more information see the <a href="https://github.com/Netflix/RxJava/wiki/Observable">RxJava Wiki</a> <ide> * <ide> * @param <T> <ide> */ <ide> public class Observable<T> { <ide> <del> //TODO use a consistent parameter naming scheme (for example: for all operators that modify a source Observable, the parameter representing that source Observable should have the same name, e.g. "source" -- currently such parameters are named any of "sequence", "that", "source", "items", or "observable") <del> <del> private final static RxJavaObservableExecutionHook hook = RxJavaPlugins.getInstance().getObservableExecutionHook(); <del> <add> /** <add> * Executed when 'subscribe' is invoked. <add> */ <ide> private final OnSubscribeFunc<T> onSubscribe; <ide> <ide> /** <ide> public Subscription onSubscribe(Observer<? super T> t1); <ide> <ide> } <del> <add> <ide> /** <ide> * Observable with Function to execute when subscribed to. <ide> * <p> <ide> protected Observable(OnSubscribeFunc<T> onSubscribe) { <ide> this.onSubscribe = onSubscribe; <ide> } <ide> <del> protected Observable() { <del> this(null); <del> //TODO should this be made private to prevent it? It really serves no good purpose and only confuses things. Unit tests are incorrectly using it today <del> } <add> private final static RxJavaObservableExecutionHook hook = RxJavaPlugins.getInstance().getObservableExecutionHook(); <add> <ide> <ide> /** <ide> * An {@link Observer} must call an Observable's {@code subscribe} method in order to <ide><path>rxjava-core/src/main/java/rx/observables/BlockingObservable.java <ide> protected BlockingObservable(OnSubscribeFunc<T> onSubscribe) { <ide> super(onSubscribe); <ide> } <ide> <del> /** <del> * Used to prevent public instantiation <del> */ <del> @SuppressWarnings("unused") <del> private BlockingObservable() { <del> // prevent public instantiation <del> } <del> <ide> /** <ide> * Convert an Observable into a BlockingObservable. <ide> */ <ide><path>rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java <ide> public void testCombineLatestWithFunctionThatThrowsAnException() { <ide> TestObservable w1 = new TestObservable(); <ide> TestObservable w2 = new TestObservable(); <ide> <del> Observable<String> combined = Observable.create(combineLatest(w1, w2, new Func2<String, String, String>() { <add> Observable<String> combined = Observable.create(combineLatest(Observable.create(w1), Observable.create(w2), new Func2<String, String, String>() { <ide> @Override <ide> public String call(String v1, String v2) { <ide> throw new RuntimeException("I don't work."); <ide> public void testCombineLatestDifferentLengthObservableSequences1() { <ide> TestObservable w2 = new TestObservable(); <ide> TestObservable w3 = new TestObservable(); <ide> <del> Observable<String> combineLatestW = Observable.create(combineLatest(w1, w2, w3, getConcat3StringsCombineLatestFunction())); <add> Observable<String> combineLatestW = Observable.create(combineLatest(Observable.create(w1), Observable.create(w2), Observable.create(w3), getConcat3StringsCombineLatestFunction())); <ide> combineLatestW.subscribe(w); <ide> <ide> /* simulate sending data */ <ide> public void testCombineLatestDifferentLengthObservableSequences2() { <ide> TestObservable w2 = new TestObservable(); <ide> TestObservable w3 = new TestObservable(); <ide> <del> Observable<String> combineLatestW = Observable.create(combineLatest(w1, w2, w3, getConcat3StringsCombineLatestFunction())); <add> Observable<String> combineLatestW = Observable.create(combineLatest(Observable.create(w1), Observable.create(w2), Observable.create(w3), getConcat3StringsCombineLatestFunction())); <ide> combineLatestW.subscribe(w); <ide> <ide> /* simulate sending data */ <ide> public void testCombineLatestWithInterleavingSequences() { <ide> TestObservable w2 = new TestObservable(); <ide> TestObservable w3 = new TestObservable(); <ide> <del> Observable<String> combineLatestW = Observable.create(combineLatest(w1, w2, w3, getConcat3StringsCombineLatestFunction())); <add> Observable<String> combineLatestW = Observable.create(combineLatest(Observable.create(w1), Observable.create(w2), Observable.create(w3), getConcat3StringsCombineLatestFunction())); <ide> combineLatestW.subscribe(w); <ide> <ide> /* simulate sending data */ <ide> private static String getStringValue(Object o) { <ide> } <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> Observer<? super String> observer; <ide> <ide> @Override <del> public Subscription subscribe(Observer<? super String> observer) { <add> public Subscription onSubscribe(Observer<? super String> observer) { <ide> // just store the variable where it can be accessed so we can manually trigger it <ide> this.observer = observer; <ide> return Subscriptions.empty(); <ide><path>rxjava-core/src/main/java/rx/operators/OperationConcat.java <ide> public void testSimpleAsyncConcat() { <ide> TestObservable<String> o1 = new TestObservable<String>("one", "two", "three"); <ide> TestObservable<String> o2 = new TestObservable<String>("four", "five", "six"); <ide> <del> Observable.concat(o1, o2).subscribe(observer); <add> Observable.concat(Observable.create(o1), Observable.create(o2)).subscribe(observer); <ide> <ide> try { <ide> // wait for async observables to complete <ide> public void run() { <ide> // emit first <ide> if (!s.isUnsubscribed()) { <ide> System.out.println("Emit o1"); <del> observer.onNext(o1); <add> observer.onNext(Observable.create(o1)); <ide> } <ide> // emit second <ide> if (!s.isUnsubscribed()) { <ide> System.out.println("Emit o2"); <del> observer.onNext(o2); <add> observer.onNext(Observable.create(o2)); <ide> } <ide> <ide> // wait until sometime later and emit third <ide> public void run() { <ide> } <ide> if (!s.isUnsubscribed()) { <ide> System.out.println("Emit o3"); <del> observer.onNext(o3); <add> observer.onNext(Observable.create(o3)); <ide> } <ide> <ide> } catch (Throwable e) { <ide> public void testBlockedObservableOfObservables() { <ide> final CountDownLatch callOnce = new CountDownLatch(1); <ide> final CountDownLatch okToContinue = new CountDownLatch(1); <ide> TestObservable<Observable<String>> observableOfObservables = new TestObservable<Observable<String>>(callOnce, okToContinue, odds, even); <del> OnSubscribeFunc<String> concatF = concat(observableOfObservables); <add> OnSubscribeFunc<String> concatF = concat(Observable.create(observableOfObservables)); <ide> Observable<String> concat = Observable.create(concatF); <ide> concat.subscribe(observer); <ide> try { <ide> public void testConcatConcurrentWithInfinity() { <ide> @SuppressWarnings("unchecked") <ide> Observer<String> aObserver = mock(Observer.class); <ide> @SuppressWarnings("unchecked") <del> TestObservable<Observable<String>> observableOfObservables = new TestObservable<Observable<String>>(w1, w2); <del> OnSubscribeFunc<String> concatF = concat(observableOfObservables); <add> TestObservable<Observable<String>> observableOfObservables = new TestObservable<Observable<String>>(Observable.create(w1), Observable.create(w2)); <add> OnSubscribeFunc<String> concatF = concat(Observable.create(observableOfObservables)); <ide> <ide> Observable<String> concat = Observable.create(concatF); <ide> <ide> public void testConcatNonBlockingObservables() { <ide> @Override <ide> public Subscription onSubscribe(Observer<? super Observable<String>> observer) { <ide> // simulate what would happen in an observable <del> observer.onNext(w1); <del> observer.onNext(w2); <add> observer.onNext(Observable.create(w1)); <add> observer.onNext(Observable.create(w2)); <ide> observer.onCompleted(); <ide> <ide> return new Subscription() { <ide> public void testConcatUnsubscribe() { <ide> @SuppressWarnings("unchecked") <ide> final Observer<String> aObserver = mock(Observer.class); <ide> @SuppressWarnings("unchecked") <del> final Observable<String> concat = Observable.create(concat(w1, w2)); <add> final Observable<String> concat = Observable.create(concat(Observable.create(w1), Observable.create(w2))); <ide> final SafeObservableSubscription s1 = new SafeObservableSubscription(); <ide> <ide> try { <ide> public void testConcatUnsubscribeConcurrent() { <ide> @SuppressWarnings("unchecked") <ide> Observer<String> aObserver = mock(Observer.class); <ide> @SuppressWarnings("unchecked") <del> TestObservable<Observable<String>> observableOfObservables = new TestObservable<Observable<String>>(w1, w2); <del> OnSubscribeFunc<String> concatF = concat(observableOfObservables); <add> TestObservable<Observable<String>> observableOfObservables = new TestObservable<Observable<String>>(Observable.create(w1), Observable.create(w2)); <add> OnSubscribeFunc<String> concatF = concat(Observable.create(observableOfObservables)); <ide> <ide> Observable<String> concat = Observable.create(concatF); <ide> <ide> public void testConcatUnsubscribeConcurrent() { <ide> verify(aObserver, never()).onError(any(Throwable.class)); <ide> } <ide> <del> private static class TestObservable<T> extends Observable<T> { <add> private static class TestObservable<T> implements OnSubscribeFunc<T> { <ide> <ide> private final Subscription s = new Subscription() { <ide> <ide> public TestObservable(T seed, int size) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super T> observer) { <add> public Subscription onSubscribe(final Observer<? super T> observer) { <ide> t = new Thread(new Runnable() { <ide> <ide> @Override <ide><path>rxjava-core/src/main/java/rx/operators/OperationMaterialize.java <ide> public void testMaterialize1() { <ide> final TestAsyncErrorObservable o1 = new TestAsyncErrorObservable("one", "two", null, "three"); <ide> <ide> TestObserver Observer = new TestObserver(); <del> Observable<Notification<String>> m = Observable.create(materialize(o1)); <add> Observable<Notification<String>> m = Observable.create(materialize(Observable.create(o1))); <ide> m.subscribe(Observer); <ide> <ide> try { <ide> public void testMaterialize2() { <ide> final TestAsyncErrorObservable o1 = new TestAsyncErrorObservable("one", "two", "three"); <ide> <ide> TestObserver Observer = new TestObserver(); <del> Observable<Notification<String>> m = Observable.create(materialize(o1)); <add> Observable<Notification<String>> m = Observable.create(materialize(Observable.create(o1))); <ide> m.subscribe(Observer); <ide> <ide> try { <ide> public void testMaterialize2() { <ide> public void testMultipleSubscribes() throws InterruptedException, ExecutionException { <ide> final TestAsyncErrorObservable o = new TestAsyncErrorObservable("one", "two", null, "three"); <ide> <del> Observable<Notification<String>> m = Observable.create(materialize(o)); <add> Observable<Notification<String>> m = Observable.create(materialize(Observable.create(o))); <ide> <ide> assertEquals(3, m.toList().toBlockingObservable().toFuture().get().size()); <ide> assertEquals(3, m.toList().toBlockingObservable().toFuture().get().size()); <ide> public void onNext(Notification<String> value) { <ide> <ide> } <ide> <del> private static class TestAsyncErrorObservable extends Observable<String> { <add> private static class TestAsyncErrorObservable implements OnSubscribeFunc<String> { <ide> <ide> String[] valuesToReturn; <ide> <ide> private static class TestAsyncErrorObservable extends Observable<String> { <ide> volatile Thread t; <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> t = new Thread(new Runnable() { <ide> <ide> @Override <ide><path>rxjava-core/src/main/java/rx/operators/OperationMerge.java <ide> public void before() { <ide> <ide> @Test <ide> public void testMergeObservableOfObservables() { <del> final Observable<String> o1 = new TestSynchronousObservable(); <del> final Observable<String> o2 = new TestSynchronousObservable(); <add> final Observable<String> o1 = Observable.create(new TestSynchronousObservable()); <add> final Observable<String> o2 = Observable.create(new TestSynchronousObservable()); <ide> <ide> Observable<Observable<String>> observableOfObservables = Observable.create(new OnSubscribeFunc<Observable<String>>() { <ide> <ide> public void unsubscribe() { <ide> <ide> @Test <ide> public void testMergeArray() { <del> final Observable<String> o1 = new TestSynchronousObservable(); <del> final Observable<String> o2 = new TestSynchronousObservable(); <add> final Observable<String> o1 = Observable.create(new TestSynchronousObservable()); <add> final Observable<String> o2 = Observable.create(new TestSynchronousObservable()); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(merge(o1, o2)); <ide> public void testMergeArray() { <ide> <ide> @Test <ide> public void testMergeList() { <del> final Observable<String> o1 = new TestSynchronousObservable(); <del> final Observable<String> o2 = new TestSynchronousObservable(); <add> final Observable<String> o1 = Observable.create(new TestSynchronousObservable()); <add> final Observable<String> o2 = Observable.create(new TestSynchronousObservable()); <ide> List<Observable<String>> listOfObservables = new ArrayList<Observable<String>>(); <ide> listOfObservables.add(o1); <ide> listOfObservables.add(o2); <ide> public void testUnSubscribe() { <ide> TestObservable tB = new TestObservable(); <ide> <ide> @SuppressWarnings("unchecked") <del> Observable<String> m = Observable.create(merge(tA, tB)); <add> Observable<String> m = Observable.create(merge(Observable.create(tA), Observable.create(tB))); <ide> Subscription s = m.subscribe(stringObserver); <ide> <ide> tA.sendOnNext("Aone"); <ide> public void testMergeArrayWithThreading() { <ide> final TestASynchronousObservable o2 = new TestASynchronousObservable(); <ide> <ide> @SuppressWarnings("unchecked") <del> Observable<String> m = Observable.create(merge(o1, o2)); <add> Observable<String> m = Observable.create(merge(Observable.create(o1), Observable.create(o2))); <ide> m.subscribe(stringObserver); <ide> <ide> try { <ide> public void testSynchronizationOfMultipleSequences() throws Throwable { <ide> final AtomicInteger totalCounter = new AtomicInteger(); <ide> <ide> @SuppressWarnings("unchecked") <del> Observable<String> m = Observable.create(merge(o1, o2)); <add> Observable<String> m = Observable.create(merge(Observable.create(o1), Observable.create(o2))); <ide> m.subscribe(new Observer<String>() { <ide> <ide> @Override <ide> public void onNext(String v) { <ide> @Test <ide> public void testError1() { <ide> // we are using synchronous execution to test this exactly rather than non-deterministic concurrent behavior <del> final Observable<String> o1 = new TestErrorObservable("four", null, "six"); // we expect to lose "six" <del> final Observable<String> o2 = new TestErrorObservable("one", "two", "three"); // we expect to lose all of these since o1 is done first and fails <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("four", null, "six")); // we expect to lose "six" <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("one", "two", "three")); // we expect to lose all of these since o1 is done first and fails <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(merge(o1, o2)); <ide> public void testError1() { <ide> @Test <ide> public void testError2() { <ide> // we are using synchronous execution to test this exactly rather than non-deterministic concurrent behavior <del> final Observable<String> o1 = new TestErrorObservable("one", "two", "three"); <del> final Observable<String> o2 = new TestErrorObservable("four", null, "six"); // we expect to lose "six" <del> final Observable<String> o3 = new TestErrorObservable("seven", "eight", null);// we expect to lose all of these since o2 is done first and fails <del> final Observable<String> o4 = new TestErrorObservable("nine");// we expect to lose all of these since o2 is done first and fails <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("one", "two", "three")); <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("four", null, "six")); // we expect to lose "six" <add> final Observable<String> o3 = Observable.create(new TestErrorObservable("seven", "eight", null));// we expect to lose all of these since o2 is done first and fails <add> final Observable<String> o4 = Observable.create(new TestErrorObservable("nine"));// we expect to lose all of these since o2 is done first and fails <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(merge(o1, o2, o3, o4)); <ide> public void testError2() { <ide> verify(stringObserver, times(0)).onNext("nine"); <ide> } <ide> <del> private static class TestSynchronousObservable extends Observable<String> { <add> private static class TestSynchronousObservable implements OnSubscribeFunc<String> { <ide> <ide> @Override <del> public Subscription subscribe(Observer<? super String> observer) { <add> public Subscription onSubscribe(Observer<? super String> observer) { <ide> <ide> observer.onNext("hello"); <ide> observer.onCompleted(); <ide> public void unsubscribe() { <ide> } <ide> } <ide> <del> private static class TestASynchronousObservable extends Observable<String> { <add> private static class TestASynchronousObservable implements OnSubscribeFunc<String> { <ide> Thread t; <ide> final CountDownLatch onNextBeingSent = new CountDownLatch(1); <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> t = new Thread(new Runnable() { <ide> <ide> @Override <ide> public void unsubscribe() { <ide> /** <ide> * A Observable that doesn't do the right thing on UnSubscribe/Error/etc in that it will keep sending events down the pipe regardless of what happens. <ide> */ <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> Observer<? super String> observer = null; <ide> volatile boolean unsubscribed = false; <ide> public void sendOnError(Throwable e) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> this.observer = observer; <ide> return s; <ide> } <ide> } <ide> <del> private static class TestErrorObservable extends Observable<String> { <add> private static class TestErrorObservable implements OnSubscribeFunc<String> { <ide> <ide> String[] valuesToReturn; <ide> <ide> private static class TestErrorObservable extends Observable<String> { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(Observer<? super String> observer) { <add> public Subscription onSubscribe(Observer<? super String> observer) { <ide> <ide> for (String s : valuesToReturn) { <ide> if (s == null) { <ide><path>rxjava-core/src/main/java/rx/operators/OperationMergeDelayError.java <ide> public void before() { <ide> <ide> @Test <ide> public void testErrorDelayed1() { <del> final Observable<String> o1 = new TestErrorObservable("four", null, "six"); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <del> final Observable<String> o2 = new TestErrorObservable("one", "two", "three"); <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("one", "two", "three")); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(mergeDelayError(o1, o2)); <ide> public void testErrorDelayed1() { <ide> <ide> @Test <ide> public void testErrorDelayed2() { <del> final Observable<String> o1 = new TestErrorObservable("one", "two", "three"); <del> final Observable<String> o2 = new TestErrorObservable("four", null, "six"); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <del> final Observable<String> o3 = new TestErrorObservable("seven", "eight", null); <del> final Observable<String> o4 = new TestErrorObservable("nine"); <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("one", "two", "three")); <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <add> final Observable<String> o3 = Observable.create(new TestErrorObservable("seven", "eight", null)); <add> final Observable<String> o4 = Observable.create(new TestErrorObservable("nine")); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(mergeDelayError(o1, o2, o3, o4)); <ide> public void testErrorDelayed2() { <ide> <ide> @Test <ide> public void testErrorDelayed3() { <del> final Observable<String> o1 = new TestErrorObservable("one", "two", "three"); <del> final Observable<String> o2 = new TestErrorObservable("four", "five", "six"); <del> final Observable<String> o3 = new TestErrorObservable("seven", "eight", null); <del> final Observable<String> o4 = new TestErrorObservable("nine"); <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("one", "two", "three")); <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("four", "five", "six")); <add> final Observable<String> o3 = Observable.create(new TestErrorObservable("seven", "eight", null)); <add> final Observable<String> o4 = Observable.create(new TestErrorObservable("nine")); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(mergeDelayError(o1, o2, o3, o4)); <ide> public void testErrorDelayed3() { <ide> <ide> @Test <ide> public void testErrorDelayed4() { <del> final Observable<String> o1 = new TestErrorObservable("one", "two", "three"); <del> final Observable<String> o2 = new TestErrorObservable("four", "five", "six"); <del> final Observable<String> o3 = new TestErrorObservable("seven", "eight"); <del> final Observable<String> o4 = new TestErrorObservable("nine", null); <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("one", "two", "three")); <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("four", "five", "six")); <add> final Observable<String> o3 = Observable.create(new TestErrorObservable("seven", "eight")); <add> final Observable<String> o4 = Observable.create(new TestErrorObservable("nine", null)); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(mergeDelayError(o1, o2, o3, o4)); <ide> public void testErrorDelayed4WithThreading() { <ide> final TestAsyncErrorObservable o4 = new TestAsyncErrorObservable("nine", null); <ide> <ide> @SuppressWarnings("unchecked") <del> Observable<String> m = Observable.create(mergeDelayError(o1, o2, o3, o4)); <add> Observable<String> m = Observable.create(mergeDelayError(Observable.create(o1), Observable.create(o2), Observable.create(o3), Observable.create(o4))); <ide> m.subscribe(stringObserver); <ide> <ide> try { <ide> public void testErrorDelayed4WithThreading() { <ide> <ide> @Test <ide> public void testCompositeErrorDelayed1() { <del> final Observable<String> o1 = new TestErrorObservable("four", null, "six"); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <del> final Observable<String> o2 = new TestErrorObservable("one", "two", null); <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("one", "two", null)); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(mergeDelayError(o1, o2)); <ide> public void testCompositeErrorDelayed1() { <ide> <ide> @Test <ide> public void testCompositeErrorDelayed2() { <del> final Observable<String> o1 = new TestErrorObservable("four", null, "six"); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <del> final Observable<String> o2 = new TestErrorObservable("one", "two", null); <add> final Observable<String> o1 = Observable.create(new TestErrorObservable("four", null, "six")); // we expect to lose "six" from the source (and it should never be sent by the source since onError was called <add> final Observable<String> o2 = Observable.create(new TestErrorObservable("one", "two", null)); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(mergeDelayError(o1, o2)); <ide> public void testCompositeErrorDelayed2() { <ide> <ide> @Test <ide> public void testMergeObservableOfObservables() { <del> final Observable<String> o1 = new TestSynchronousObservable(); <del> final Observable<String> o2 = new TestSynchronousObservable(); <add> final Observable<String> o1 = Observable.create(new TestSynchronousObservable()); <add> final Observable<String> o2 = Observable.create(new TestSynchronousObservable()); <ide> <ide> Observable<Observable<String>> observableOfObservables = Observable.create(new OnSubscribeFunc<Observable<String>>() { <ide> <ide> public void unsubscribe() { <ide> <ide> @Test <ide> public void testMergeArray() { <del> final Observable<String> o1 = new TestSynchronousObservable(); <del> final Observable<String> o2 = new TestSynchronousObservable(); <add> final Observable<String> o1 = Observable.create(new TestSynchronousObservable()); <add> final Observable<String> o2 = Observable.create(new TestSynchronousObservable()); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observable<String> m = Observable.create(mergeDelayError(o1, o2)); <ide> public void testMergeArray() { <ide> <ide> @Test <ide> public void testMergeList() { <del> final Observable<String> o1 = new TestSynchronousObservable(); <del> final Observable<String> o2 = new TestSynchronousObservable(); <add> final Observable<String> o1 = Observable.create(new TestSynchronousObservable()); <add> final Observable<String> o2 = Observable.create(new TestSynchronousObservable()); <ide> List<Observable<String>> listOfObservables = new ArrayList<Observable<String>>(); <ide> listOfObservables.add(o1); <ide> listOfObservables.add(o2); <ide> public void testUnSubscribe() { <ide> TestObservable tB = new TestObservable(); <ide> <ide> @SuppressWarnings("unchecked") <del> Observable<String> m = Observable.create(mergeDelayError(tA, tB)); <add> Observable<String> m = Observable.create(mergeDelayError(Observable.create(tA), Observable.create(tB))); <ide> Subscription s = m.subscribe(stringObserver); <ide> <ide> tA.sendOnNext("Aone"); <ide> public void testMergeArrayWithThreading() { <ide> final TestASynchronousObservable o2 = new TestASynchronousObservable(); <ide> <ide> @SuppressWarnings("unchecked") <del> Observable<String> m = Observable.create(mergeDelayError(o1, o2)); <add> Observable<String> m = Observable.create(mergeDelayError(Observable.create(o1), Observable.create(o2))); <ide> m.subscribe(stringObserver); <ide> <ide> try { <ide> public void testMergeArrayWithThreading() { <ide> verify(stringObserver, times(1)).onCompleted(); <ide> } <ide> <del> private static class TestSynchronousObservable extends Observable<String> { <add> private static class TestSynchronousObservable implements OnSubscribeFunc<String> { <ide> <ide> @Override <del> public Subscription subscribe(Observer<? super String> observer) { <add> public Subscription onSubscribe(Observer<? super String> observer) { <ide> <ide> observer.onNext("hello"); <ide> observer.onCompleted(); <ide> public void unsubscribe() { <ide> } <ide> } <ide> <del> private static class TestASynchronousObservable extends Observable<String> { <add> private static class TestASynchronousObservable implements OnSubscribeFunc<String> { <ide> Thread t; <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> t = new Thread(new Runnable() { <ide> <ide> @Override <ide> public void unsubscribe() { <ide> /** <ide> * A Observable that doesn't do the right thing on UnSubscribe/Error/etc in that it will keep sending events down the pipe regardless of what happens. <ide> */ <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> Observer<? super String> observer = null; <ide> volatile boolean unsubscribed = false; <ide> public void sendOnError(Throwable e) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> this.observer = observer; <ide> return s; <ide> } <ide> } <ide> <del> private static class TestErrorObservable extends Observable<String> { <add> private static class TestErrorObservable implements OnSubscribeFunc<String> { <ide> <ide> String[] valuesToReturn; <ide> <ide> private static class TestErrorObservable extends Observable<String> { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(Observer<? super String> observer) { <add> public Subscription onSubscribe(Observer<? super String> observer) { <ide> boolean errorThrown = false; <ide> for (String s : valuesToReturn) { <ide> if (s == null) { <ide> public void unsubscribe() { <ide> } <ide> } <ide> <del> private static class TestAsyncErrorObservable extends Observable<String> { <add> private static class TestAsyncErrorObservable implements OnSubscribeFunc<String> { <ide> <ide> String[] valuesToReturn; <ide> <ide> private static class TestAsyncErrorObservable extends Observable<String> { <ide> Thread t; <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> t = new Thread(new Runnable() { <ide> <ide> @Override <ide><path>rxjava-core/src/main/java/rx/operators/OperationMostRecent.java <ide> import rx.Observable; <ide> import rx.Observer; <ide> import rx.Subscription; <add>import rx.subjects.PublishSubject; <add>import rx.subjects.Subject; <ide> import rx.util.Exceptions; <ide> <ide> /** <ide> private T getRecentValue() { <ide> public static class UnitTest { <ide> @Test <ide> public void testMostRecent() { <del> Subscription s = mock(Subscription.class); <del> TestObservable observable = new TestObservable(s); <add> Subject<String, String> observable = PublishSubject.create(); <ide> <ide> Iterator<String> it = mostRecent(observable, "default").iterator(); <ide> <ide> assertTrue(it.hasNext()); <ide> assertEquals("default", it.next()); <ide> assertEquals("default", it.next()); <ide> <del> observable.sendOnNext("one"); <add> observable.onNext("one"); <ide> assertTrue(it.hasNext()); <ide> assertEquals("one", it.next()); <ide> assertEquals("one", it.next()); <ide> <del> observable.sendOnNext("two"); <add> observable.onNext("two"); <ide> assertTrue(it.hasNext()); <ide> assertEquals("two", it.next()); <ide> assertEquals("two", it.next()); <ide> <del> observable.sendOnCompleted(); <add> observable.onCompleted(); <ide> assertFalse(it.hasNext()); <ide> <ide> } <ide> <ide> @Test(expected = TestException.class) <ide> public void testMostRecentWithException() { <del> Subscription s = mock(Subscription.class); <del> TestObservable observable = new TestObservable(s); <add> Subject<String, String> observable = PublishSubject.create(); <ide> <ide> Iterator<String> it = mostRecent(observable, "default").iterator(); <ide> <ide> assertTrue(it.hasNext()); <ide> assertEquals("default", it.next()); <ide> assertEquals("default", it.next()); <ide> <del> observable.sendOnError(new TestException()); <add> observable.onError(new TestException()); <ide> assertTrue(it.hasNext()); <ide> <ide> it.next(); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <del> <del> Observer<? super String> observer = null; <del> Subscription s; <del> <del> public TestObservable(Subscription s) { <del> this.s = s; <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnCompleted() { <del> observer.onCompleted(); <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnNext(String value) { <del> observer.onNext(value); <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnError(Throwable e) { <del> observer.onError(e); <del> } <del> <del> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <del> this.observer = observer; <del> return s; <del> } <del> } <del> <ide> private static class TestException extends RuntimeException { <ide> private static final long serialVersionUID = 1L; <ide> } <ide><path>rxjava-core/src/main/java/rx/operators/OperationMulticast.java <ide> public static class UnitTest { <ide> <ide> @Test <ide> public void testMulticast() { <del> TestObservable source = new TestObservable(); <add> Subject<String, String> source = PublishSubject.create(); <ide> <ide> ConnectableObservable<String> multicasted = OperationMulticast.multicast(source, <ide> PublishSubject.<String>create()); <ide> public void testMulticast() { <ide> Observer<String> observer = mock(Observer.class); <ide> multicasted.subscribe(observer); <ide> <del> source.sendOnNext("one"); <del> source.sendOnNext("two"); <add> source.onNext("one"); <add> source.onNext("two"); <ide> <ide> multicasted.connect(); <ide> <del> source.sendOnNext("three"); <del> source.sendOnNext("four"); <del> source.sendOnCompleted(); <add> source.onNext("three"); <add> source.onNext("four"); <add> source.onCompleted(); <ide> <ide> verify(observer, never()).onNext("one"); <ide> verify(observer, never()).onNext("two"); <ide> public void testMulticast() { <ide> <ide> @Test <ide> public void testMulticastConnectTwice() { <del> TestObservable source = new TestObservable(); <add> Subject<String, String> source = PublishSubject.create(); <ide> <ide> ConnectableObservable<String> multicasted = OperationMulticast.multicast(source, <ide> PublishSubject.<String>create()); <ide> public void testMulticastConnectTwice() { <ide> Observer<String> observer = mock(Observer.class); <ide> multicasted.subscribe(observer); <ide> <del> source.sendOnNext("one"); <add> source.onNext("one"); <ide> <ide> multicasted.connect(); <ide> multicasted.connect(); <ide> <del> source.sendOnNext("two"); <del> source.sendOnCompleted(); <add> source.onNext("two"); <add> source.onCompleted(); <ide> <ide> verify(observer, never()).onNext("one"); <ide> verify(observer, times(1)).onNext("two"); <ide> public void testMulticastConnectTwice() { <ide> <ide> @Test <ide> public void testMulticastDisconnect() { <del> TestObservable source = new TestObservable(); <add> Subject<String, String> source = PublishSubject.create(); <ide> <ide> ConnectableObservable<String> multicasted = OperationMulticast.multicast(source, <ide> PublishSubject.<String>create()); <ide> public void testMulticastDisconnect() { <ide> Observer<String> observer = mock(Observer.class); <ide> multicasted.subscribe(observer); <ide> <del> source.sendOnNext("one"); <add> source.onNext("one"); <ide> <ide> Subscription connection = multicasted.connect(); <del> source.sendOnNext("two"); <add> source.onNext("two"); <ide> <ide> connection.unsubscribe(); <del> source.sendOnNext("three"); <add> source.onNext("three"); <ide> <ide> multicasted.connect(); <del> source.sendOnNext("four"); <del> source.sendOnCompleted(); <add> source.onNext("four"); <add> source.onCompleted(); <ide> <ide> verify(observer, never()).onNext("one"); <ide> verify(observer, times(1)).onNext("two"); <ide> public void testMulticastDisconnect() { <ide> <ide> } <ide> <del> <del> private static class TestObservable extends Observable<String> { <del> <del> Observer<? super String> observer = new Observer<String>() { <del> @Override <del> public void onCompleted() { <del> // Do nothing <del> } <del> <del> @Override <del> public void onError(Throwable e) { <del> // Do nothing <del> } <del> <del> @Override <del> public void onNext(String args) { <del> // Do nothing <del> } <del> }; <del> Subscription s = new Subscription() { <del> @Override <del> public void unsubscribe() { <del> observer = new Observer<String>() { <del> @Override <del> public void onCompleted() { <del> // Do nothing <del> } <del> <del> @Override <del> public void onError(Throwable e) { <del> // Do nothing <del> } <del> <del> @Override <del> public void onNext(String args) { <del> // Do nothing <del> } <del> }; <del> } <del> }; <del> <del> public TestObservable() { <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnCompleted() { <del> observer.onCompleted(); <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnNext(String value) { <del> observer.onNext(value); <del> } <del> <del> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <del> this.observer = observer; <del> return s; <del> } <del> <del> } <del> <ide> } <ide> } <ide><path>rxjava-core/src/main/java/rx/operators/OperationNext.java <ide> import rx.Observable.OnSubscribeFunc; <ide> import rx.Observer; <ide> import rx.Subscription; <add>import rx.subjects.PublishSubject; <add>import rx.subjects.Subject; <ide> import rx.subscriptions.Subscriptions; <ide> import rx.util.Exceptions; <ide> <ide> public static class UnitTest { <ide> <ide> @Test <ide> public void testNext() throws Throwable { <del> Subscription s = mock(Subscription.class); <del> final TestObservable obs = new TestObservable(s); <add> Subject<String, String> obs = PublishSubject.create(); <ide> <ide> Iterator<String> it = next(obs).iterator(); <ide> <ide> assertTrue(it.hasNext()); <ide> <ide> Future<String> next = nextAsync(it); <ide> Thread.sleep(100); <del> obs.sendOnNext("one"); <add> obs.onNext("one"); <ide> assertEquals("one", next.get()); <ide> <ide> assertTrue(it.hasNext()); <ide> <ide> next = nextAsync(it); <ide> Thread.sleep(100); <del> obs.sendOnNext("two"); <add> obs.onNext("two"); <ide> assertEquals("two", next.get()); <ide> <ide> assertTrue(it.hasNext()); <ide> <del> obs.sendOnCompleted(); <add> obs.onCompleted(); <ide> <ide> assertFalse(it.hasNext()); <ide> } <ide> <ide> @Test(expected = TestException.class) <ide> public void testOnError() throws Throwable { <del> Subscription s = mock(Subscription.class); <del> final TestObservable obs = new TestObservable(s); <add> Subject<String, String> obs = PublishSubject.create();; <ide> <ide> Iterator<String> it = next(obs).iterator(); <ide> <ide> assertTrue(it.hasNext()); <ide> <ide> Future<String> next = nextAsync(it); <ide> Thread.sleep(100); <del> obs.sendOnNext("one"); <add> obs.onNext("one"); <ide> assertEquals("one", next.get()); <ide> <ide> assertTrue(it.hasNext()); <ide> <ide> next = nextAsync(it); <ide> Thread.sleep(100); <del> obs.sendOnError(new TestException()); <add> obs.onError(new TestException()); <ide> <ide> try { <ide> next.get(); <ide> public void testOnError() throws Throwable { <ide> <ide> @Test <ide> public void testOnErrorViaHasNext() throws Throwable { <del> Subscription s = mock(Subscription.class); <del> final TestObservable obs = new TestObservable(s); <add> Subject<String, String> obs = PublishSubject.create(); <ide> <ide> Iterator<String> it = next(obs).iterator(); <ide> <ide> assertTrue(it.hasNext()); <ide> <ide> Future<String> next = nextAsync(it); <ide> Thread.sleep(100); <del> obs.sendOnNext("one"); <add> obs.onNext("one"); <ide> assertEquals("one", next.get()); <ide> <ide> assertTrue(it.hasNext()); <ide> <ide> next = nextAsync(it); <ide> Thread.sleep(100); <del> obs.sendOnError(new TestException()); <add> obs.onError(new TestException()); <ide> <ide> // this should not throw an exception but instead just return false <ide> try { <ide> public String call() throws Exception { <ide> }); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <del> <del> Observer<? super String> observer = null; <del> Subscription s; <del> <del> public TestObservable(Subscription s) { <del> this.s = s; <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnCompleted() { <del> observer.onCompleted(); <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnNext(String value) { <del> observer.onNext(value); <del> } <del> <del> /* used to simulate subscription */ <del> public void sendOnError(Throwable e) { <del> observer.onError(e); <del> } <del> <del> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <del> this.observer = observer; <del> return s; <del> } <del> <del> } <del> <ide> @SuppressWarnings("serial") <ide> private static class TestException extends RuntimeException { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationOnErrorResumeNextViaFunction.java <ide> public Observable<String> call(Throwable t1) { <ide> } <ide> <ide> }; <del> Observable<String> observable = Observable.create(onErrorResumeNextViaFunction(w, resume)); <add> Observable<String> observable = Observable.create(onErrorResumeNextViaFunction(Observable.create(w), resume)); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> aObserver = mock(Observer.class); <ide> public Observable<String> call(Throwable t1) { <ide> } <ide> <ide> }; <del> Observable<String> observable = Observable.create(onErrorResumeNextViaFunction(w, resume)); <add> Observable<String> observable = Observable.create(onErrorResumeNextViaFunction(Observable.create(w), resume)); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> aObserver = mock(Observer.class); <ide> public Observable<String> call(Throwable t1) { <ide> verify(aObserver, times(0)).onCompleted(); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> final Subscription s; <ide> final String[] values; <ide> public TestObservable(Subscription s, String... values) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> System.out.println("TestObservable subscribed to ..."); <ide> t = new Thread(new Runnable() { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationOnErrorResumeNextViaObservable.java <ide> public static class UnitTest { <ide> public void testResumeNext() { <ide> Subscription s = mock(Subscription.class); <ide> // Trigger failure on second element <del> TestObservable w = new TestObservable(s, "one", "fail", "two", "three"); <add> TestObservable f = new TestObservable(s, "one", "fail", "two", "three"); <add> Observable<String> w = Observable.create(f); <ide> Observable<String> resume = Observable.from("twoResume", "threeResume"); <ide> Observable<String> observable = Observable.create(onErrorResumeNextViaObservable(w, resume)); <ide> <ide> public void testResumeNext() { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public void testMapResumeAsyncNext() { <ide> // Trigger multiple failures <ide> Observable<String> w = Observable.from("one", "fail", "two", "three", "fail"); <ide> // Resume Observable is async <del> TestObservable resume = new TestObservable(sr, "twoResume", "threeResume"); <add> TestObservable f = new TestObservable(sr, "twoResume", "threeResume"); <add> Observable<String> resume = Observable.create(f); <ide> <ide> // Introduce map function that fails intermittently (Map does not prevent this when the observer is a <ide> // rx.operator incl onErrorResumeNextViaObservable) <ide> public String call(String s) { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> resume.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public String call(String s) { <ide> verify(aObserver, times(1)).onNext("threeResume"); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> final Subscription s; <ide> final String[] values; <ide> public TestObservable(Subscription s, String... values) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> System.out.println("TestObservable subscribed to ..."); <ide> t = new Thread(new Runnable() { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationOnErrorReturn.java <ide> public static class UnitTest { <ide> @Test <ide> public void testResumeNext() { <ide> Subscription s = mock(Subscription.class); <del> TestObservable w = new TestObservable(s, "one"); <add> TestObservable f = new TestObservable(s, "one"); <add> Observable<String> w = Observable.create(f); <ide> final AtomicReference<Throwable> capturedException = new AtomicReference<Throwable>(); <ide> <ide> Observable<String> observable = Observable.create(onErrorReturn(w, new Func1<Throwable, String>() { <ide> public String call(Throwable e) { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public String call(Throwable e) { <ide> @Test <ide> public void testFunctionThrowsError() { <ide> Subscription s = mock(Subscription.class); <del> TestObservable w = new TestObservable(s, "one"); <add> TestObservable f = new TestObservable(s, "one"); <add> Observable<String> w = Observable.create(f); <ide> final AtomicReference<Throwable> capturedException = new AtomicReference<Throwable>(); <ide> <ide> Observable<String> observable = Observable.create(onErrorReturn(w, new Func1<Throwable, String>() { <ide> public String call(Throwable e) { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public String call(Throwable e) { <ide> assertNotNull(capturedException.get()); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> final Subscription s; <ide> final String[] values; <ide> public TestObservable(Subscription s, String... values) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> System.out.println("TestObservable subscribed to ..."); <ide> t = new Thread(new Runnable() { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationOnExceptionResumeNextViaObservable.java <ide> public static class UnitTest { <ide> public void testResumeNextWithException() { <ide> Subscription s = mock(Subscription.class); <ide> // Trigger failure on second element <del> TestObservable w = new TestObservable(s, "one", "EXCEPTION", "two", "three"); <add> TestObservable f = new TestObservable(s, "one", "EXCEPTION", "two", "three"); <add> Observable<String> w = Observable.create(f); <ide> Observable<String> resume = Observable.from("twoResume", "threeResume"); <ide> Observable<String> observable = Observable.create(onExceptionResumeNextViaObservable(w, resume)); <ide> <ide> public void testResumeNextWithException() { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public void testResumeNextWithException() { <ide> public void testResumeNextWithRuntimeException() { <ide> Subscription s = mock(Subscription.class); <ide> // Trigger failure on second element <del> TestObservable w = new TestObservable(s, "one", "RUNTIMEEXCEPTION", "two", "three"); <add> TestObservable f = new TestObservable(s, "one", "RUNTIMEEXCEPTION", "two", "three"); <add> Observable<String> w = Observable.create(f); <ide> Observable<String> resume = Observable.from("twoResume", "threeResume"); <ide> Observable<String> observable = Observable.create(onExceptionResumeNextViaObservable(w, resume)); <ide> <ide> public void testResumeNextWithRuntimeException() { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public void testResumeNextWithRuntimeException() { <ide> public void testThrowablePassesThru() { <ide> Subscription s = mock(Subscription.class); <ide> // Trigger failure on second element <del> TestObservable w = new TestObservable(s, "one", "THROWABLE", "two", "three"); <add> TestObservable f = new TestObservable(s, "one", "THROWABLE", "two", "three"); <add> Observable<String> w = Observable.create(f); <ide> Observable<String> resume = Observable.from("twoResume", "threeResume"); <ide> Observable<String> observable = Observable.create(onExceptionResumeNextViaObservable(w, resume)); <ide> <ide> public void testThrowablePassesThru() { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public void testThrowablePassesThru() { <ide> public void testErrorPassesThru() { <ide> Subscription s = mock(Subscription.class); <ide> // Trigger failure on second element <del> TestObservable w = new TestObservable(s, "one", "ERROR", "two", "three"); <add> TestObservable f = new TestObservable(s, "one", "ERROR", "two", "three"); <add> Observable<String> w = Observable.create(f); <ide> Observable<String> resume = Observable.from("twoResume", "threeResume"); <ide> Observable<String> observable = Observable.create(onExceptionResumeNextViaObservable(w, resume)); <ide> <ide> public void testErrorPassesThru() { <ide> observable.subscribe(aObserver); <ide> <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> } <ide> public void testMapResumeAsyncNext() { <ide> // Trigger multiple failures <ide> Observable<String> w = Observable.from("one", "fail", "two", "three", "fail"); <ide> // Resume Observable is async <del> TestObservable resume = new TestObservable(sr, "twoResume", "threeResume"); <add> TestObservable f = new TestObservable(sr, "twoResume", "threeResume"); <add> Observable<String> resume = Observable.create(f); <ide> <ide> // Introduce map function that fails intermittently (Map does not prevent this when the observer is a <ide> // rx.operator incl onErrorResumeNextViaObservable) <ide> public String call(String s) { <ide> <ide> try { <ide> // if the thread gets started (which it shouldn't if it's working correctly) <del> if (resume.t != null) { <del> resume.t.join(); <add> if (f.t != null) { <add> f.t.join(); <ide> } <ide> } catch (InterruptedException e) { <ide> fail(e.getMessage()); <ide> public String call(String s) { <ide> verify(aObserver, times(1)).onCompleted(); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> final Subscription s; <ide> final String[] values; <ide> public TestObservable(Subscription s, String... values) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> System.out.println("TestObservable subscribed to ..."); <ide> t = new Thread(new Runnable() { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationSynchronize.java <ide> public static class UnitTest { <ide> @Test <ide> public void testOnCompletedAfterUnSubscribe() { <ide> TestObservable t = new TestObservable(null); <del> Observable<String> st = Observable.create(synchronize(t)); <add> Observable<String> st = Observable.create(synchronize(Observable.create(t))); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> w = mock(Observer.class); <ide> public void testOnCompletedAfterUnSubscribe() { <ide> @Test <ide> public void testOnNextAfterUnSubscribe() { <ide> TestObservable t = new TestObservable(null); <del> Observable<String> st = Observable.create(synchronize(t)); <add> Observable<String> st = Observable.create(synchronize(Observable.create(t))); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> w = mock(Observer.class); <ide> public void testOnNextAfterUnSubscribe() { <ide> @Test <ide> public void testOnErrorAfterUnSubscribe() { <ide> TestObservable t = new TestObservable(null); <del> Observable<String> st = Observable.create(synchronize(t)); <add> Observable<String> st = Observable.create(synchronize(Observable.create(t))); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> w = mock(Observer.class); <ide> public void testOnErrorAfterUnSubscribe() { <ide> @Test <ide> public void testOnNextAfterOnError() { <ide> TestObservable t = new TestObservable(null); <del> Observable<String> st = Observable.create(synchronize(t)); <add> Observable<String> st = Observable.create(synchronize(Observable.create(t))); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> w = mock(Observer.class); <ide> public void testOnNextAfterOnError() { <ide> @Test <ide> public void testOnCompletedAfterOnError() { <ide> TestObservable t = new TestObservable(null); <del> Observable<String> st = Observable.create(synchronize(t)); <add> Observable<String> st = Observable.create(synchronize(Observable.create(t))); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> w = mock(Observer.class); <ide> public void testOnCompletedAfterOnError() { <ide> @Test <ide> public void testOnNextAfterOnCompleted() { <ide> TestObservable t = new TestObservable(null); <del> Observable<String> st = Observable.create(synchronize(t)); <add> Observable<String> st = Observable.create(synchronize(Observable.create(t))); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> w = mock(Observer.class); <ide> public void testOnNextAfterOnCompleted() { <ide> @Test <ide> public void testOnErrorAfterOnCompleted() { <ide> TestObservable t = new TestObservable(null); <del> Observable<String> st = Observable.create(synchronize(t)); <add> Observable<String> st = Observable.create(synchronize(Observable.create(t))); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> w = mock(Observer.class); <ide> public void testOnErrorAfterOnCompleted() { <ide> /** <ide> * A Observable that doesn't do the right thing on UnSubscribe/Error/etc in that it will keep sending events down the pipe regardless of what happens. <ide> */ <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> Observer<? super String> observer = null; <ide> <ide> public void sendOnError(Throwable e) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> this.observer = observer; <ide> return new Subscription() { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationTake.java <ide> public void unsubscribe() <ide> <ide> @Test <ide> public void testUnsubscribeAfterTake() { <del> Subscription s = mock(Subscription.class); <del> TestObservable w = new TestObservable(s, "one", "two", "three"); <add> final Subscription s = mock(Subscription.class); <add> TestObservableFunc f = new TestObservableFunc(s, "one", "two", "three"); <add> Observable<String> w = Observable.create(f); <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> aObserver = mock(Observer.class); <ide> public void testUnsubscribeAfterTake() { <ide> <ide> // wait for the Observable to complete <ide> try { <del> w.t.join(); <add> f.t.join(); <ide> } catch (Throwable e) { <ide> e.printStackTrace(); <ide> fail(e.getMessage()); <ide> public void testUnsubscribeAfterTake() { <ide> verifyNoMoreInteractions(aObserver); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservableFunc implements OnSubscribeFunc<String> { <ide> <ide> final Subscription s; <ide> final String[] values; <ide> Thread t = null; <ide> <del> public TestObservable(Subscription s, String... values) { <add> public TestObservableFunc(Subscription s, String... values) { <ide> this.s = s; <ide> this.values = values; <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> System.out.println("TestObservable subscribed to ..."); <ide> t = new Thread(new Runnable() { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationTakeUntil.java <ide> public void testTakeUntil() { <ide> TestObservable other = new TestObservable(sOther); <ide> <ide> Observer<String> result = mock(Observer.class); <del> Observable<String> stringObservable = takeUntil(source, other); <add> Observable<String> stringObservable = takeUntil(Observable.create(source), Observable.create(other)); <ide> stringObservable.subscribe(result); <ide> source.sendOnNext("one"); <ide> source.sendOnNext("two"); <ide> public void testTakeUntilSourceCompleted() { <ide> TestObservable other = new TestObservable(sOther); <ide> <ide> Observer<String> result = mock(Observer.class); <del> Observable<String> stringObservable = takeUntil(source, other); <add> Observable<String> stringObservable = takeUntil(Observable.create(source), Observable.create(other)); <ide> stringObservable.subscribe(result); <ide> source.sendOnNext("one"); <ide> source.sendOnNext("two"); <ide> public void testTakeUntilSourceError() { <ide> Throwable error = new Throwable(); <ide> <ide> Observer<String> result = mock(Observer.class); <del> Observable<String> stringObservable = takeUntil(source, other); <add> Observable<String> stringObservable = takeUntil(Observable.create(source), Observable.create(other)); <ide> stringObservable.subscribe(result); <ide> source.sendOnNext("one"); <ide> source.sendOnNext("two"); <ide> public void testTakeUntilOtherError() { <ide> Throwable error = new Throwable(); <ide> <ide> Observer<String> result = mock(Observer.class); <del> Observable<String> stringObservable = takeUntil(source, other); <add> Observable<String> stringObservable = takeUntil(Observable.create(source), Observable.create(other)); <ide> stringObservable.subscribe(result); <ide> source.sendOnNext("one"); <ide> source.sendOnNext("two"); <ide> public void testTakeUntilOtherCompleted() { <ide> TestObservable other = new TestObservable(sOther); <ide> <ide> Observer<String> result = mock(Observer.class); <del> Observable<String> stringObservable = takeUntil(source, other); <add> Observable<String> stringObservable = takeUntil(Observable.create(source), Observable.create(other)); <ide> stringObservable.subscribe(result); <ide> source.sendOnNext("one"); <ide> source.sendOnNext("two"); <ide> public void testTakeUntilOtherCompleted() { <ide> <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> Observer<? super String> observer = null; <ide> Subscription s; <ide> public void sendOnError(Throwable e) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> this.observer = observer; <ide> return s; <ide> } <ide><path>rxjava-core/src/main/java/rx/operators/OperationTakeWhile.java <ide> public void testTakeWhileProtectsPredicateCall() { <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> aObserver = mock(Observer.class); <del> Observable<String> take = Observable.create(takeWhile(source, new Func1<String, Boolean>() <add> Observable<String> take = Observable.create(takeWhile(Observable.create(source), new Func1<String, Boolean>() <ide> { <ide> @Override <ide> public Boolean call(String s) <ide> public void testUnsubscribeAfterTake() { <ide> <ide> @SuppressWarnings("unchecked") <ide> Observer<String> aObserver = mock(Observer.class); <del> Observable<String> take = Observable.create(takeWhileWithIndex(w, new Func2<String, Integer, Boolean>() <add> Observable<String> take = Observable.create(takeWhileWithIndex(Observable.create(w), new Func2<String, Integer, Boolean>() <ide> { <ide> @Override <ide> public Boolean call(String s, Integer index) <ide> public Boolean call(String s, Integer index) <ide> verify(s, times(1)).unsubscribe(); <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> final Subscription s; <ide> final String[] values; <ide> public TestObservable(Subscription s, String... values) { <ide> } <ide> <ide> @Override <del> public Subscription subscribe(final Observer<? super String> observer) { <add> public Subscription onSubscribe(final Observer<? super String> observer) { <ide> System.out.println("TestObservable subscribed to ..."); <ide> t = new Thread(new Runnable() { <ide> <ide><path>rxjava-core/src/main/java/rx/operators/OperationZip.java <ide> public void testZippingDifferentLengthObservableSequences1() { <ide> TestObservable w2 = new TestObservable(); <ide> TestObservable w3 = new TestObservable(); <ide> <del> Observable<String> zipW = Observable.create(zip(w1, w2, w3, getConcat3StringsZipr())); <add> Observable<String> zipW = Observable.create(zip(Observable.create(w1), Observable.create(w2), Observable.create(w3), getConcat3StringsZipr())); <ide> zipW.subscribe(w); <ide> <ide> /* simulate sending data */ <ide> public void testZippingDifferentLengthObservableSequences2() { <ide> TestObservable w2 = new TestObservable(); <ide> TestObservable w3 = new TestObservable(); <ide> <del> Observable<String> zipW = Observable.create(zip(w1, w2, w3, getConcat3StringsZipr())); <add> Observable<String> zipW = Observable.create(zip(Observable.create(w1), Observable.create(w2), Observable.create(w3), getConcat3StringsZipr())); <ide> zipW.subscribe(w); <ide> <ide> /* simulate sending data */ <ide> private static String getStringValue(Object o) { <ide> } <ide> } <ide> <del> private static class TestObservable extends Observable<String> { <add> private static class TestObservable implements OnSubscribeFunc<String> { <ide> <ide> Observer<? super String> observer; <ide> <ide> @Override <del> public Subscription subscribe(Observer<? super String> Observer) { <add> public Subscription onSubscribe(Observer<? super String> Observer) { <ide> // just store the variable where it can be accessed so we can manually trigger it <ide> this.observer = Observer; <ide> return Subscriptions.empty();
19
Go
Go
remove some duplicated code, and preserve context
56a68c15f8a093b1761e77a74d8b7acdfbcb30a2
<ide><path>integration/internal/swarm/states.go <add>package swarm <add> <add>import ( <add> "context" <add> <add> "github.com/docker/docker/api/types" <add> "github.com/docker/docker/api/types/filters" <add> "github.com/docker/docker/client" <add> "gotest.tools/poll" <add>) <add> <add>// NoTasksForService verifies that there are no more tasks for the given service <add>func NoTasksForService(ctx context.Context, client client.ServiceAPIClient, serviceID string) func(log poll.LogT) poll.Result { <add> return func(log poll.LogT) poll.Result { <add> tasks, err := client.TaskList(ctx, types.TaskListOptions{ <add> Filters: filters.NewArgs( <add> filters.Arg("service", serviceID), <add> ), <add> }) <add> if err == nil { <add> if len(tasks) == 0 { <add> return poll.Success() <add> } <add> if len(tasks) > 0 { <add> return poll.Continue("task count for service %s at %d waiting for 0", serviceID, len(tasks)) <add> } <add> return poll.Continue("waiting for tasks for service %s to be deleted", serviceID) <add> } <add> // TODO we should not use an error as indication that the tasks are gone. There may be other reasons for an error to occur. <add> return poll.Success() <add> } <add>} <add> <add>// NoTasks verifies that all tasks are gone <add>func NoTasks(ctx context.Context, client client.ServiceAPIClient) func(log poll.LogT) poll.Result { <add> return func(log poll.LogT) poll.Result { <add> tasks, err := client.TaskList(ctx, types.TaskListOptions{}) <add> switch { <add> case err != nil: <add> return poll.Error(err) <add> case len(tasks) == 0: <add> return poll.Success() <add> default: <add> return poll.Continue("waiting for all tasks to be removed: task count at %d", len(tasks)) <add> } <add> } <add>} <ide><path>integration/network/inspect_test.go <ide> func TestInspectNetwork(t *testing.T) { <ide> // TODO find out why removing networks is needed; other tests fail if the network is not removed, even though they run on a new daemon. <ide> err := c.ServiceRemove(ctx, serviceID) <ide> assert.NilError(t, err) <del> poll.WaitOn(t, serviceIsRemoved(c, serviceID), swarm.ServicePoll) <add> poll.WaitOn(t, swarm.NoTasksForService(ctx, c, serviceID), swarm.ServicePoll) <ide> err = c.NetworkRemove(ctx, overlayID) <ide> assert.NilError(t, err) <ide> poll.WaitOn(t, network.IsRemoved(ctx, c, overlayID), swarm.NetworkPoll) <ide> func serviceRunningTasksCount(client client.ServiceAPIClient, serviceID string, <ide> } <ide> } <ide> } <del> <del>func serviceIsRemoved(client client.ServiceAPIClient, serviceID string) func(log poll.LogT) poll.Result { <del> return func(log poll.LogT) poll.Result { <del> filter := filters.NewArgs() <del> filter.Add("service", serviceID) <del> _, err := client.TaskList(context.Background(), types.TaskListOptions{ <del> Filters: filter, <del> }) <del> if err == nil { <del> return poll.Continue("waiting for service %s to be deleted", serviceID) <del> } <del> return poll.Success() <del> } <del>} <ide><path>integration/network/service_test.go <ide> func TestServiceRemoveKeepsIngressNetwork(t *testing.T) { <ide> <ide> poll.WaitOn(t, serviceRunningCount(c, serviceID, instances), swarm.ServicePoll) <ide> <del> _, _, err := c.ServiceInspectWithRaw(context.Background(), serviceID, types.ServiceInspectOptions{}) <add> ctx := context.Background() <add> _, _, err := c.ServiceInspectWithRaw(ctx, serviceID, types.ServiceInspectOptions{}) <ide> assert.NilError(t, err) <ide> <del> err = c.ServiceRemove(context.Background(), serviceID) <add> err = c.ServiceRemove(ctx, serviceID) <ide> assert.NilError(t, err) <ide> <del> poll.WaitOn(t, serviceIsRemoved(c, serviceID), swarm.ServicePoll) <del> poll.WaitOn(t, noServices(c), swarm.ServicePoll) <add> poll.WaitOn(t, noServices(ctx, c), swarm.ServicePoll) <add> poll.WaitOn(t, swarm.NoTasks(ctx, c), swarm.ServicePoll) <ide> <ide> // Ensure that "ingress" is not removed or corrupted <ide> time.Sleep(10 * time.Second) <del> netInfo, err := c.NetworkInspect(context.Background(), ingressNet, types.NetworkInspectOptions{ <add> netInfo, err := c.NetworkInspect(ctx, ingressNet, types.NetworkInspectOptions{ <ide> Verbose: true, <ide> Scope: "swarm", <ide> }) <ide> func swarmIngressReady(client client.NetworkAPIClient) func(log poll.LogT) poll. <ide> } <ide> } <ide> <del>func noServices(client client.ServiceAPIClient) func(log poll.LogT) poll.Result { <add>func noServices(ctx context.Context, client client.ServiceAPIClient) func(log poll.LogT) poll.Result { <ide> return func(log poll.LogT) poll.Result { <del> services, err := client.ServiceList(context.Background(), types.ServiceListOptions{}) <add> services, err := client.ServiceList(ctx, types.ServiceListOptions{}) <ide> switch { <ide> case err != nil: <ide> return poll.Error(err) <ide> case len(services) == 0: <ide> return poll.Success() <ide> default: <del> return poll.Continue("Service count at %d waiting for 0", len(services)) <add> return poll.Continue("waiting for all services to be removed: service count at %d", len(services)) <ide> } <ide> } <ide> } <ide><path>integration/service/create_test.go <ide> func TestCreateServiceMultipleTimes(t *testing.T) { <ide> defer d.Stop(t) <ide> client := d.NewClientT(t) <ide> defer client.Close() <add> ctx := context.Background() <ide> <ide> overlayName := "overlay1_" + t.Name() <del> overlayID := network.CreateNoError(t, context.Background(), client, overlayName, <add> overlayID := network.CreateNoError(t, ctx, client, overlayName, <ide> network.WithCheckDuplicate(), <ide> network.WithDriver("overlay"), <ide> ) <ide> func TestCreateServiceMultipleTimes(t *testing.T) { <ide> err = client.ServiceRemove(context.Background(), serviceID) <ide> assert.NilError(t, err) <ide> <del> poll.WaitOn(t, serviceIsRemoved(client, serviceID), swarm.ServicePoll) <del> poll.WaitOn(t, noTasks(client), swarm.ServicePoll) <add> poll.WaitOn(t, swarm.NoTasksForService(ctx, client, serviceID), swarm.ServicePoll) <ide> <ide> serviceID2 := swarm.CreateService(t, d, serviceSpec...) <ide> poll.WaitOn(t, serviceRunningTasksCount(client, serviceID2, instances), swarm.ServicePoll) <ide> <ide> err = client.ServiceRemove(context.Background(), serviceID2) <ide> assert.NilError(t, err) <ide> <del> poll.WaitOn(t, serviceIsRemoved(client, serviceID2), swarm.ServicePoll) <del> poll.WaitOn(t, noTasks(client), swarm.ServicePoll) <add> poll.WaitOn(t, swarm.NoTasksForService(ctx, client, serviceID2), swarm.ServicePoll) <ide> <ide> err = client.NetworkRemove(context.Background(), overlayID) <ide> assert.NilError(t, err) <ide> func TestCreateWithDuplicateNetworkNames(t *testing.T) { <ide> defer d.Stop(t) <ide> client := d.NewClientT(t) <ide> defer client.Close() <add> ctx := context.Background() <ide> <ide> name := "foo_" + t.Name() <del> n1 := network.CreateNoError(t, context.Background(), client, name, <del> network.WithDriver("bridge"), <del> ) <del> n2 := network.CreateNoError(t, context.Background(), client, name, <del> network.WithDriver("bridge"), <del> ) <add> n1 := network.CreateNoError(t, ctx, client, name, network.WithDriver("bridge")) <add> n2 := network.CreateNoError(t, ctx, client, name, network.WithDriver("bridge")) <ide> <ide> // Duplicates with name but with different driver <del> n3 := network.CreateNoError(t, context.Background(), client, name, <del> network.WithDriver("overlay"), <del> ) <add> n3 := network.CreateNoError(t, ctx, client, name, network.WithDriver("overlay")) <ide> <ide> // Create Service with the same name <ide> var instances uint64 = 1 <ide> func TestCreateWithDuplicateNetworkNames(t *testing.T) { <ide> <ide> poll.WaitOn(t, serviceRunningTasksCount(client, serviceID, instances), swarm.ServicePoll) <ide> <del> resp, _, err := client.ServiceInspectWithRaw(context.Background(), serviceID, types.ServiceInspectOptions{}) <add> resp, _, err := client.ServiceInspectWithRaw(ctx, serviceID, types.ServiceInspectOptions{}) <ide> assert.NilError(t, err) <ide> assert.Check(t, is.Equal(n3, resp.Spec.TaskTemplate.Networks[0].Target)) <ide> <del> // Remove Service <del> err = client.ServiceRemove(context.Background(), serviceID) <add> // Remove Service, and wait for its tasks to be removed <add> err = client.ServiceRemove(ctx, serviceID) <ide> assert.NilError(t, err) <del> <del> // Make sure task has been destroyed. <del> poll.WaitOn(t, serviceIsRemoved(client, serviceID), swarm.ServicePoll) <add> poll.WaitOn(t, swarm.NoTasksForService(ctx, client, serviceID), swarm.ServicePoll) <ide> <ide> // Remove networks <ide> err = client.NetworkRemove(context.Background(), n3) <ide> func TestCreateServiceSecretFileMode(t *testing.T) { <ide> <ide> err = client.ServiceRemove(ctx, serviceID) <ide> assert.NilError(t, err) <del> <del> poll.WaitOn(t, serviceIsRemoved(client, serviceID), swarm.ServicePoll) <del> poll.WaitOn(t, noTasks(client), swarm.ServicePoll) <add> poll.WaitOn(t, swarm.NoTasksForService(ctx, client, serviceID), swarm.ServicePoll) <ide> <ide> err = client.SecretRemove(ctx, secretName) <ide> assert.NilError(t, err) <ide> func TestCreateServiceConfigFileMode(t *testing.T) { <ide> <ide> err = client.ServiceRemove(ctx, serviceID) <ide> assert.NilError(t, err) <del> <del> poll.WaitOn(t, serviceIsRemoved(client, serviceID)) <del> poll.WaitOn(t, noTasks(client)) <add> poll.WaitOn(t, swarm.NoTasksForService(ctx, client, serviceID)) <ide> <ide> err = client.ConfigRemove(ctx, configName) <ide> assert.NilError(t, err) <ide> func serviceRunningTasksCount(client client.ServiceAPIClient, serviceID string, <ide> } <ide> } <ide> } <del> <del>func noTasks(client client.ServiceAPIClient) func(log poll.LogT) poll.Result { <del> return func(log poll.LogT) poll.Result { <del> filter := filters.NewArgs() <del> tasks, err := client.TaskList(context.Background(), types.TaskListOptions{ <del> Filters: filter, <del> }) <del> switch { <del> case err != nil: <del> return poll.Error(err) <del> case len(tasks) == 0: <del> return poll.Success() <del> default: <del> return poll.Continue("task count at %d waiting for 0", len(tasks)) <del> } <del> } <del>} <del> <del>func serviceIsRemoved(client client.ServiceAPIClient, serviceID string) func(log poll.LogT) poll.Result { <del> return func(log poll.LogT) poll.Result { <del> filter := filters.NewArgs() <del> filter.Add("service", serviceID) <del> _, err := client.TaskList(context.Background(), types.TaskListOptions{ <del> Filters: filter, <del> }) <del> if err == nil { <del> return poll.Continue("waiting for service %s to be deleted", serviceID) <del> } <del> return poll.Success() <del> } <del>}
4
Ruby
Ruby
install binstubs by default
f34c27a452418d8aa17f92bb0fd7ae97b5f7e252
<ide><path>railties/lib/rails/generators/app_base.rb <ide> def bundle_command(command) <ide> end <ide> <ide> def run_bundle <del> bundle_command('install') unless options[:skip_gemfile] || options[:skip_bundle] || options[:pretend] <add> bundle_command('install --binstubs') unless options[:skip_gemfile] || options[:skip_bundle] || options[:pretend] <ide> end <ide> <ide> def empty_directory_with_keep_file(destination, config = {}) <ide><path>railties/test/generators/shared_generator_tests.rb <ide> def test_skeleton_is_created <ide> end <ide> <ide> def test_generation_runs_bundle_install <del> generator([destination_root]).expects(:bundle_command).with('install').once <add> generator([destination_root]).expects(:bundle_command).with('install --binstubs').once <ide> quietly { generator.invoke_all } <ide> end <ide> <ide> def test_template_is_executed_when_supplied_an_https_path <ide> end <ide> <ide> def test_dev_option <del> generator([destination_root], dev: true).expects(:bundle_command).with('install').once <add> generator([destination_root], dev: true).expects(:bundle_command).with('install --binstubs').once <ide> quietly { generator.invoke_all } <ide> rails_path = File.expand_path('../../..', Rails.root) <ide> assert_file 'Gemfile', /^gem\s+["']rails["'],\s+path:\s+["']#{Regexp.escape(rails_path)}["']$/ <ide> end <ide> <ide> def test_edge_option <del> generator([destination_root], edge: true).expects(:bundle_command).with('install').once <add> generator([destination_root], edge: true).expects(:bundle_command).with('install --binstubs').once <ide> quietly { generator.invoke_all } <ide> assert_file 'Gemfile', %r{^gem\s+["']rails["'],\s+github:\s+["']#{Regexp.escape("rails/rails")}["']$} <ide> end
2
Ruby
Ruby
remove useless conditional
3a156ec8e79b88ff90e8f5ed34dd05b89e94b72f
<ide><path>activerecord/lib/active_record/core.rb <ide> def update_attributes_from_transaction_state(transaction_state, depth) <ide> @reflects_state[depth] = true <ide> end <ide> <del> if transaction_state.parent && !@reflects_state[depth+1] <add> if transaction_state.parent <ide> update_attributes_from_transaction_state(transaction_state.parent, depth+1) <ide> end <ide> end
1
PHP
PHP
add test for
6152546e1aa8670d6a7647d856729a8a05d5fd73
<ide><path>tests/TestCase/ORM/QueryRegressionTest.php <ide> public function testTypemapInFunctions() <ide> 'Output values for functions are not cast yet.' <ide> ); <ide> } <add> <add> public function testBooleanConditionsInContain() <add> { <add> $table = TableRegistry::get('Articles'); <add> $table->belongsToMany('Tags', [ <add> 'foreignKey' => 'article_id', <add> 'associationForeignKey' => 'tag_id', <add> 'through' => 'SpecialTags' <add> ]); <add> $query = $table->find() <add> ->contain(['Tags' => function ($q) { <add> return $q->where(['SpecialTags.highlighted' => false]); <add> }]) <add> ->order(['Articles.id' => 'ASC']); <add> <add> $result = $query->first(); <add> $this->assertEquals(1, $result->id); <add> $this->assertNotEmpty($result->tags); <add> $this->assertNotEmpty($result->tags[0]->_joinData); <add> } <ide> }
1
Python
Python
update core layers
bf4dab3501c62836f94ea17d2f0e198348f5293d
<ide><path>keras/constraints.py <ide> def get_config(self): <ide> unitnorm = UnitNorm <ide> <ide> from .utils.generic_utils import get_from_module <del> <del> <ide> def get(identifier, kwargs=None): <del> return get_from_module(identifier, globals(), 'constraint', instantiate=True, kwargs=kwargs) <add> return get_from_module(identifier, globals(), 'constraint', <add> instantiate=True, kwargs=kwargs) <ide><path>keras/engine/topology.py <ide> class Merge(Layer): <ide> a list of layer instances. Must be more <ide> than one layer/tensor. <ide> mode: string or lambda/function. If string, must be one <del> of: 'sum', 'mul', 'concat', 'ave', 'join', 'cos', 'dot'. <add> of: 'sum', 'mul', 'concat', 'ave', 'cos', 'dot'. <ide> If lambda/function, it should take as input a list of tensors <ide> and return a single tensor. <ide> concat_axis: integer, axis to use in mode `concat`. <ide><path>keras/layers/core.py <ide> def build(self, input_shape): <ide> self.set_weights(self.initial_weights) <ide> del self.initial_weights <ide> <del> def call(self, train=False): <del> X = self.get_input(train) <del> transform_weight = activations.sigmoid(K.dot(X, self.W_carry) + self.b_carry) <del> act = self.activation(K.dot(X, self.W) + self.b) <add> def call(self, x, mask=None): <add> transform_weight = activations.sigmoid(K.dot(x, self.W_carry) + self.b_carry) <add> act = self.activation(K.dot(x, self.W) + self.b) <ide> act *= transform_weight <del> output = act + (1 - transform_weight) * X <add> output = act + (1 - transform_weight) * x <ide> return output <ide> <ide> def get_config(self): <del> config = {'name': self.__class__.__name__, <del> 'init': self.init.__name__, <add> config = {'init': self.init.__name__, <ide> 'transform_bias': self.transform_bias, <ide> 'activation': self.activation.__name__, <ide> 'W_regularizer': self.W_regularizer.get_config() if self.W_regularizer else None, <ide> def call(self, x, mask=None): <ide> return y <ide> <ide> def get_config(self): <del> config = {'name': self.__class__.__name__, <del> 'output_dim': self.output_dim, <add> config = {'output_dim': self.output_dim, <ide> 'init': self.init.__name__, <ide> 'activation': self.activation.__name__, <ide> 'W_regularizer': self.W_regularizer.get_config() if self.W_regularizer else None, <ide><path>keras/utils/generic_utils.py <ide> def get_from_module(identifier, module_params, module_name, <ide> return res(**kwargs) <ide> else: <ide> return res <add> elif type(identifier) is dict: <add> name = identifier.pop('name') <add> res = module_params.get(name) <add> if res: <add> return res(**identifier) <add> else: <add> raise Exception('Invalid ' + str(module_name) + ': ' + <add> str(identifier)) <ide> return identifier <ide> <ide> <ide><path>keras/utils/test_utils.py <ide> def get_test_data(nb_train=1000, nb_test=500, input_shape=(10,), <ide> return (X[:nb_train], y[:nb_train]), (X[nb_train:], y[nb_train:]) <ide> <ide> <del>def test_layer(layer_cls, kwargs={}, input_shape=None, input_dtype=None, <add>def layer_test(layer_cls, kwargs={}, input_shape=None, input_dtype=None, <ide> input_data=None, expected_output=None): <ide> '''Test routine for a layer with a single input tensor <ide> and single output tensor. <ide> def test_layer(layer_cls, kwargs={}, input_shape=None, input_dtype=None, <ide> x = Input(shape=input_shape[1:], dtype=input_dtype) <ide> y = layer(x) <ide> model = Model(input=x, output=y) <del> model.compile('rmsprop', 'mse') <add> model.compile('rmsprop', 'mse', mode='FAST_COMPILE') <ide> <ide> expected_output_shape = layer.get_output_shape_for(input_shape) <ide> actual_output = model.predict(input_data) <ide><path>tests/keras/engine/test_topology.py <ide> from keras.models import model_from_json, model_from_yaml <ide> <ide> <del>def test_lambda_serialization(): <del> from keras.layers import Lambda <del> from keras.utils.layer_utils import layer_from_config <del> ld = Lambda(lambda x: x + 1) <del> config = ld.get_config() <del> ld = Lambda.from_config(config) <del> <del> def f(x): <del> return x + 1 <del> ld = Lambda(f) <del> config = ld.get_config() <del> ld = layer_from_config({'class_name': 'Lambda', 'config': config}) <del> <del> ld = Lambda(lambda x: K.concatenate([K.square(x), x]), <del> output_shape=lambda s: tuple(list(s)[:-1] + [2 * s[-1]])) <del> config = ld.get_config() <del> ld = Lambda.from_config(config) <del> <del> def f(x): <del> return K.concatenate([K.square(x), x]) <del> def f_shape(s): <del> return tuple(list(s)[:-1] + [2 * s[-1]]) <del> ld = Lambda(f, output_shape=f_shape) <del> config = ld.get_config() <del> ld = layer_from_config({'class_name': 'Lambda', 'config': config}) <del> <del> <ide> def test_learning_phase(): <ide> a = Input(shape=(32,), name='input_a') <ide> b = Input(shape=(32,), name='input_b') <ide><path>tests/keras/layers/test_advanced_activations.py <ide> import pytest <del>from keras.utils.test_utils import test_layer <add>from keras.utils.test_utils import layer_test <ide> <ide> <ide> def test_leaky_relu(): <ide> from keras.layers.advanced_activations import LeakyReLU <ide> for alpha in [0., .5, -1.]: <del> test_layer(LeakyReLU, kwargs={'alpha': alpha}, <add> layer_test(LeakyReLU, kwargs={'alpha': alpha}, <ide> input_shape=(2, 3, 4)) <ide> <ide> <ide> def test_prelu(): <ide> from keras.layers.advanced_activations import PReLU <del> test_layer(PReLU, kwargs={}, <add> layer_test(PReLU, kwargs={}, <ide> input_shape=(2, 3, 4)) <ide> <ide> <ide> def test_elu(): <ide> from keras.layers.advanced_activations import ELU <ide> for alpha in [0., .5, -1.]: <del> test_layer(ELU, kwargs={'alpha': alpha}, <add> layer_test(ELU, kwargs={'alpha': alpha}, <ide> input_shape=(2, 3, 4)) <ide> <ide> <ide> def test_parametric_softplus(): <ide> from keras.layers.advanced_activations import ParametricSoftplus <ide> for alpha in [0., .5, -1.]: <del> test_layer(ParametricSoftplus, <add> layer_test(ParametricSoftplus, <ide> kwargs={'alpha_init': 1., <ide> 'beta_init': -1}, <ide> input_shape=(2, 3, 4)) <ide> <ide> <ide> def test_thresholded_linear(): <ide> from keras.layers.advanced_activations import ThresholdedLinear <del> test_layer(ThresholdedLinear, kwargs={'theta': 0.5}, <add> layer_test(ThresholdedLinear, kwargs={'theta': 0.5}, <ide> input_shape=(2, 3, 4)) <ide> <ide> <ide> def test_thresholded_relu(): <ide> from keras.layers.advanced_activations import ThresholdedReLU <del> test_layer(ThresholdedReLU, kwargs={'theta': 0.5}, <add> layer_test(ThresholdedReLU, kwargs={'theta': 0.5}, <ide> input_shape=(2, 3, 4)) <ide> <ide> <ide> def test_srelu(): <ide> from keras.layers.advanced_activations import SReLU <del> test_layer(SReLU, kwargs={}, <add> layer_test(SReLU, kwargs={}, <ide> input_shape=(2, 3, 4)) <ide> <ide> <ide><path>tests/keras/layers/test_call.py <del>"""Test keras.layers.core.Layer.__call__""" <del> <del>import pytest <del>import numpy as np <del>from numpy.testing import assert_allclose <del> <del>from keras import backend as K <del>from keras.layers.core import Dense <del>from keras.models import Sequential, Graph <del> <del> <del>def test_layer_call(): <del> """Test keras.layers.core.Layer.__call__""" <del> nb_samples, input_dim, output_dim = 3, 10, 5 <del> layer = Dense(output_dim, input_dim=input_dim) <del> W = np.asarray(K.eval(layer.W)).astype(K.floatx()) <del> X = K.placeholder(ndim=2) <del> Y = layer(X) <del> f = K.function([X], [Y]) <del> <del> x = np.ones((nb_samples, input_dim)).astype(K.floatx()) <del> y = f([x])[0].astype(K.floatx()) <del> t = np.dot(x, W).astype(K.floatx()) <del> assert_allclose(t, y, rtol=.2) <del> <del> <del>def test_sequential_call(): <del> """Test keras.models.Sequential.__call__""" <del> nb_samples, input_dim, output_dim = 3, 10, 5 <del> model = Sequential() <del> model.add(Dense(output_dim=output_dim, input_dim=input_dim)) <del> model.compile('sgd', 'mse') <del> <del> # test flat model <del> X = K.placeholder(ndim=2) <del> Y = model(X) <del> f = K.function([X], [Y]) <del> <del> x = np.ones((nb_samples, input_dim)).astype(K.floatx()) <del> y1 = f([x])[0].astype(K.floatx()) <del> y2 = model.predict(x) <del> # results of __call__ should match model.predict <del> assert_allclose(y1, y2) <del> <del> # test nested model <del> model2 = Sequential() <del> model2.add(model) <del> model2.compile('sgd', 'mse') <del> <del> Y2 = model2(X) <del> f = K.function([X], [Y2]) <del> <del> y1 = f([x])[0].astype(K.floatx()) <del> y2 = model2.predict(x) <del> # results of __call__ should match model.predict <del> assert_allclose(y1, y2) <del> <del> <del>def test_graph_call(): <del> """Test keras.models.Graph.__call__""" <del> nb_samples, input_dim, output_dim = 3, 10, 5 <del> model = Graph() <del> model.add_input('input', input_shape=(input_dim, )) <del> model.add_node(Dense(output_dim=output_dim, input_dim=input_dim), <del> input='input', name='output', create_output=True) <del> <del> model.compile('sgd', {'output': 'mse'}) <del> <del> # test flat model <del> X = K.placeholder(ndim=2) <del> Y = model(X) <del> f = K.function([X], [Y]) <del> <del> x = np.ones((nb_samples, input_dim)).astype(K.floatx()) <del> y1 = f([x])[0].astype(K.floatx()) <del> y2 = model.predict({'input': x})['output'] <del> # results of __call__ should match model.predict <del> assert_allclose(y1, y2) <del> <del> # test nested Graph models <del> model2 = Graph() <del> model2.add_input('input', input_shape=(input_dim, )) <del> model2.add_node(model, input='input', name='output', create_output=True) <del> # need to turn off cache because we're reusing model <del> model2.cache_enabled = False <del> model2.compile('sgd', {'output': 'mse'}) <del> <del> Y2 = model2(X) <del> f = K.function([X], [Y2]) <del> <del> y1 = f([x])[0].astype(K.floatx()) <del> y2 = model2.predict({'input': x})['output'] <del> # results of __call__ should match model.predict <del> assert_allclose(y1, y2) <del> <del> <del>def test_graph_multiple_in_out_call(): <del> """Test keras.models.Graph.__call__ with multiple inputs""" <del> nb_samples, input_dim, output_dim = 3, 10, 5 <del> model = Graph() <del> model.add_input('input1', input_shape=(input_dim, )) <del> model.add_input('input2', input_shape=(input_dim, )) <del> model.add_node(Dense(output_dim=output_dim, input_dim=input_dim), <del> inputs=['input1', 'input2'], merge_mode='sum', name='output', create_output=True) <del> <del> model.compile('sgd', {'output': 'mse'}) <del> <del> # test flat model <del> X1 = K.placeholder(ndim=2) <del> X2 = K.placeholder(ndim=2) <del> Y = model({'input1': X1, 'input2': X2})['output'] <del> f = K.function([X1, X2], [Y]) <del> <del> x1 = np.ones((nb_samples, input_dim)).astype(K.floatx()) <del> x2 = np.ones((nb_samples, input_dim)).astype(K.floatx()) * -2 <del> y1 = f([x1, x2])[0].astype(K.floatx()) <del> y2 = model.predict({'input1': x1, 'input2': x2})['output'] <del> # results of __call__ should match model.predict <del> assert_allclose(y1, y2) <del> <del> # test with single input, multiple outputs <del> model2 = Graph() <del> model2.add_input('input', input_shape=(input_dim, )) <del> model2.add_node(Dense(output_dim=output_dim, input_dim=input_dim), <del> input='input', name='output1', create_output=True) <del> model2.add_node(Dense(output_dim=output_dim, input_dim=input_dim), <del> input='input', name='output2', create_output=True) <del> <del> model2.compile('sgd', {'output1': 'mse', 'output2': 'mse'}) <del> <del> # test flat model <del> X = K.placeholder(ndim=2) <del> Y = model2(X) <del> f = K.function([X], [Y['output1'], Y['output2']]) <del> <del> x = np.ones((nb_samples, input_dim)).astype(K.floatx()) <del> out = f([x]) <del> y1a = out[0].astype(K.floatx()) <del> y1b = out[1].astype(K.floatx()) <del> y2 = model2.predict({'input': x}) <del> # results of __call__ should match model.predict <del> assert_allclose(y1a, y2['output1']) <del> assert_allclose(y1b, y2['output2']) <del> <del> # test with multiple inputs, multiple outputs <del> model3 = Graph() <del> model3.add_input('input1', input_shape=(input_dim, )) <del> model3.add_input('input2', input_shape=(input_dim, )) <del> model3.add_shared_node(Dense(output_dim=output_dim, input_dim=input_dim), <del> inputs=['input1', 'input2'], name='output', <del> outputs=['output1', 'output2'], create_output=True) <del> model3.compile('sgd', {'output1': 'mse', 'output2': 'mse'}) <del> <del> # test flat model <del> Y = model3({'input1': X1, 'input2': X2}) <del> f = K.function([X1, X2], [Y['output1'], Y['output2']]) <del> <del> x1 = np.ones((nb_samples, input_dim)).astype(K.floatx()) <del> x2 = np.ones((nb_samples, input_dim)).astype(K.floatx()) * -2 <del> out = f([x1, x2]) <del> y1a = out[0].astype(K.floatx()) <del> y1b = out[1].astype(K.floatx()) <del> y2 = model3.predict({'input1': x1, 'input2': x2}) <del> # results of __call__ should match model.predict <del> assert_allclose(y1a, y2['output1']) <del> assert_allclose(y1b, y2['output2']) <del> <del> <del>def test_nested_call(): <del> """Test nested Sequential and Graph models""" <del> nb_samples, input_dim, output_dim = 3, 10, 5 <del> X = K.placeholder(ndim=2) <del> x = np.ones((nb_samples, input_dim)).astype(K.floatx()) <del> <del> # test Graph model nested inside Sequential model <del> model = Graph() <del> model.add_input('input', input_shape=(input_dim, )) <del> model.add_node(Dense(output_dim=output_dim, input_dim=input_dim), <del> input='input', name='output', create_output=True) <del> <del> model2 = Sequential() <del> model2.add(model) <del> model2.compile('sgd', 'mse') <del> <del> Y2 = model2(X) <del> f = K.function([X], [Y2]) <del> <del> y1 = f([x])[0].astype(K.floatx()) <del> y2 = model2.predict(x) <del> # results of __call__ should match model.predict <del> assert_allclose(y1, y2) <del> <del> # test Sequential model inside Graph model <del> model3 = Sequential() <del> model3.add(Dense(output_dim=output_dim, input_dim=input_dim)) <del> <del> model4 = Graph() <del> model4.add_input('input', input_shape=(input_dim, )) <del> model4.add_node(model3, input='input', name='output', create_output=True) <del> model4.compile('sgd', {'output': 'mse'}) <del> <del> Y2 = model4(X) <del> f = K.function([X], [Y2]) <del> <del> y1 = f([x])[0].astype(K.floatx()) <del> y2 = model4.predict({'input': x})['output'] <del> # results of __call__ should match model.predict <del> assert_allclose(y1, y2) <del> <del> <del>if __name__ == '__main__': <del> pytest.main([__file__]) <ide><path>tests/keras/layers/test_core.py <ide> import pytest <ide> import numpy as np <del>from keras.models import Sequential <ide> from numpy.testing import assert_allclose <ide> <ide> from keras import backend as K <ide> from keras.layers import core <del>from keras.layers import containers <add>from keras.utils.test_utils import layer_test <ide> <ide> <del>def test_input_output(): <del> nb_samples = 10 <del> input_dim = 5 <del> layer = core.Layer() <del> <del> # Once an input is provided, it should be reachable through the <del> # appropriate getters <del> input = np.ones((nb_samples, input_dim)) <del> layer.input = K.variable(input) <del> for train in [True, False]: <del> assert_allclose(K.eval(layer.get_input(train)), input) <del> assert_allclose(K.eval(layer.get_output(train)), input) <del> <del> <del>def test_connections(): <del> nb_samples = 10 <del> input_dim = 5 <del> layer1 = core.Layer() <del> layer2 = core.Layer() <del> <del> input = np.ones((nb_samples, input_dim)) <del> layer1.input = K.variable(input) <del> <del> # After connecting, input of layer1 should be passed through <del> layer2.set_previous(layer1) <del> for train in [True, False]: <del> assert_allclose(K.eval(layer2.get_input(train)), input) <del> assert_allclose(K.eval(layer2.get_output(train)), input) <del> <del> <del>def test_base(): <del> layer = core.Layer() <del> _runner(layer) <del> <del> <del>def test_masked(): <del> layer = core.MaskedLayer() <del> _runner(layer) <add>def test_masking(): <add> layer_test(core.Masking, <add> kwargs={}, <add> input_shape=(3, 2, 3)) <ide> <ide> <ide> def test_merge(): <del> layer_1 = core.Layer() <del> layer_2 = core.Layer() <del> layer_1.set_input_shape((None,)) <del> layer_2.set_input_shape((None,)) <del> layer = core.Merge([layer_1, layer_2]) <del> _runner(layer) <add> # test modes: 'sum', 'mul', 'concat', 'ave', 'cos', 'dot'. <add> # test lambda with output_shape lambda <add> # test function with output_shape function <add> pass <ide> <ide> <ide> def test_dropout(): <del> layer = core.Dropout(0.5) <del> _runner(layer) <add> layer_test(core.Dropout, <add> kwargs={'p': 0.5}, <add> input_shape=(3, 2)) <ide> <ide> <ide> def test_activation(): <del> layer = core.Activation('linear') <del> _runner(layer) <del> <del> <del>def test_reshape(): <del> layer = core.Reshape(dims=(10, 10)) <del> _runner(layer) <del> <add> # with string argument <add> layer_test(core.Activation, <add> kwargs={'activation': 'relu'}, <add> input_shape=(3, 2)) <ide> <del>def test_flatten(): <del> layer = core.Flatten() <del> _runner(layer) <add> # with function argument <add> layer_test(core.Activation, <add> kwargs={'activation': K.relu}, <add> input_shape=(3, 2)) <ide> <ide> <del>def test_repeat_vector(): <del> layer = core.RepeatVector(10) <del> _runner(layer) <add>def test_reshape(): <add> layer_test(core.Reshape, <add> kwargs={'target_shape': (8, 1)}, <add> input_shape=(3, 2, 4)) <ide> <ide> <del>def test_dense(): <del> layer = core.Dense(10, input_shape=(10,)) <del> _runner(layer) <add>def test_permute(): <add> layer_test(core.Permute, <add> kwargs={'dims': (2, 1)}, <add> input_shape=(3, 2, 4)) <ide> <ide> <del>def test_act_reg(): <del> layer = core.ActivityRegularization(0.5, 0.5) <del> _runner(layer) <add>def test_flatten(): <add> layer_test(core.Flatten, <add> kwargs={}, <add> input_shape=(3, 2, 4)) <ide> <ide> <del>def test_time_dist_dense(): <del> layer = core.TimeDistributedDense(10, input_shape=(None, 10)) <del> _runner(layer) <add>def test_repeat_vector(): <add> layer_test(core.RepeatVector, <add> kwargs={'n': 3}, <add> input_shape=(3, 2)) <ide> <ide> <del>def test_time_dist_merge(): <del> layer = core.TimeDistributedMerge() <del> _runner(layer) <add>def test_lambda(): <add> from keras.utils.layer_utils import layer_from_config <add> Lambda = core.Lambda <ide> <add> layer_test(Lambda, <add> kwargs={'function': lambda x: x + 1}, <add> input_shape=(3, 2)) <ide> <del>def test_highway(): <del> layer = core.Highway(input_shape=(10,)) <del> _runner(layer) <add> # test serialization with function <add> def f(x): <add> return x + 1 <ide> <add> ld = Lambda(f) <add> config = ld.get_config() <add> ld = layer_from_config({'class_name': 'Lambda', 'config': config}) <ide> <del>def test_autoencoder(): <del> layer_1 = core.Layer() <del> layer_2 = core.Layer() <add> ld = Lambda(lambda x: K.concatenate([K.square(x), x]), <add> output_shape=lambda s: tuple(list(s)[:-1] + [2 * s[-1]])) <add> config = ld.get_config() <add> ld = Lambda.from_config(config) <ide> <del> layer = core.AutoEncoder(layer_1, layer_2) <del> _runner(layer) <add> # test serialization with output_shape function <add> def f(x): <add> return K.concatenate([K.square(x), x]) <ide> <add> def f_shape(s): <add> return tuple(list(s)[:-1] + [2 * s[-1]]) <ide> <del>def test_autoencoder_advanced(): <del> encoder = containers.Sequential([core.Dense(5, input_shape=(10,))]) <del> decoder = containers.Sequential([core.Dense(10, input_shape=(5,))]) <del> X_train = np.random.random((100, 10)) <del> X_test = np.random.random((100, 10)) <add> ld = Lambda(f, output_shape=f_shape) <add> config = ld.get_config() <add> ld = layer_from_config({'class_name': 'Lambda', 'config': config}) <ide> <del> model = Sequential() <del> model.add(core.Dense(output_dim=10, input_dim=10)) <del> autoencoder = core.AutoEncoder(encoder=encoder, decoder=decoder, <del> output_reconstruction=True) <del> model.add(autoencoder) <ide> <del> # training the autoencoder: <del> model.compile(optimizer='sgd', loss='mse') <del> assert autoencoder.output_reconstruction <del> <del> model.fit(X_train, X_train, nb_epoch=1, batch_size=32) <add>def test_dense(): <add> from keras import regularizers <add> from keras import constraints <ide> <del> # predicting compressed representations of inputs: <del> autoencoder.output_reconstruction = False # the autoencoder has to be recompiled after modifying this property <del> assert not autoencoder.output_reconstruction <del> model.compile(optimizer='sgd', loss='mse') <del> representations = model.predict(X_test) <del> assert representations.shape == (100, 5) <add> layer_test(core.Dense, <add> kwargs={'output_dim': 3}, <add> input_shape=(3, 2)) <ide> <del> # the model is still trainable, although it now expects compressed representations as targets: <del> model.fit(X_test, representations, nb_epoch=1, batch_size=32) <add> layer_test(core.Dense, <add> kwargs={'output_dim': 3, <add> 'W_regularizer': regularizers.l2(0.01), <add> 'b_regularizer': regularizers.l1(0.01), <add> 'activity_regularizer': regularizers.activity_l2(0.01), <add> 'W_constraint': constraints.MaxNorm(1), <add> 'b_constraint': constraints.MaxNorm(1)}, <add> input_shape=(3, 2)) <ide> <del> # to keep training against the original inputs, just switch back output_reconstruction to True: <del> autoencoder.output_reconstruction = True <del> model.compile(optimizer='sgd', loss='mse') <del> model.fit(X_train, X_train, nb_epoch=1) <ide> <del> reconstructions = model.predict(X_test) <del> assert reconstructions.shape == (100, 10) <add>def test_activity_regularization(): <add> layer_test(core.ActivityRegularization, <add> kwargs={'l1': 0.01, 'l2': 0.01}, <add> input_shape=(3, 2, 3)) <ide> <ide> <ide> def test_maxout_dense(): <del> layer = core.MaxoutDense(10, 10, input_shape=(20,)) <del> _runner(layer) <add> from keras import regularizers <add> from keras import constraints <ide> <add> layer_test(core.MaxoutDense, <add> kwargs={'output_dim': 3}, <add> input_shape=(3, 2)) <ide> <del>def test_naming(): <del> layer = core.Dense(2, input_dim=2) <del> assert layer.name == 'dense' <add> layer_test(core.MaxoutDense, <add> kwargs={'output_dim': 3, <add> 'W_regularizer': regularizers.l2(0.01), <add> 'b_regularizer': regularizers.l1(0.01), <add> 'activity_regularizer': regularizers.activity_l2(0.01), <add> 'W_constraint': constraints.MaxNorm(1), <add> 'b_constraint': constraints.MaxNorm(1)}, <add> input_shape=(3, 2)) <ide> <del> model = Sequential() <del> model.add(core.Dense(2, input_dim=2, name='my_dense')) <del> model.add(core.Dense(2, name='my_dense')) <ide> <del> assert model.layers[0].name == 'my_dense' <del> assert model.layers[1].name == 'my_dense' <del> <del> model.compile(optimizer='rmsprop', loss='mse') <del> model.train_on_batch(np.random.random((2, 2)), np.random.random((2, 2))) <del> <del> <del>def test_sequences(): <del> '''Test masking sequences with zeroes as padding''' <del> # integer inputs, one per timestep, like embeddings <del> layer = core.Masking(input_shape=(4, 1)) <del> func = K.function([layer.get_input(True)], [layer.get_output_mask()]) <del> input_data = np.array([[[1], [2], [3], [0]], <del> [[0], [4], [5], [0]]], dtype=np.int32) <del> <del> # This is the expected output mask, one dimension less <del> expected = np.array([[1, 1, 1, 0], [0, 1, 1, 0]]) <del> <del> # get mask for this input <del> output = func([input_data])[0] <del> assert np.all(output == expected), 'Output not as expected' <del> <del> <del>def test_non_zero(): <del> '''Test masking with non-zero mask value''' <del> layer = core.Masking(5, input_shape=(4, 2)) <del> func = K.function([layer.input], [layer.get_output_mask()]) <del> input_data = np.array([[[1, 1], [2, 1], [3, 1], [5, 5]], <del> [[1, 5], [5, 0], [0, 0], [0, 0]]], <del> dtype=np.int32) <del> output = func([input_data])[0] <del> expected = np.array([[1, 1, 1, 0], [1, 1, 1, 1]]) <del> assert np.all(output == expected), 'Output not as expected' <del> <del> <del>def test_non_zero_output(): <del> '''Test output of masking layer with non-zero mask value''' <del> layer = core.Masking(5, input_shape=(4, 2)) <del> func = K.function([layer.input], [layer.get_output()]) <del> <del> input_data = np.array([[[1, 1], [2, 1], [3, 1], [5, 5]], <del> [[1, 5], [5, 0], [0, 0], [0, 0]]], <del> dtype=np.int32) <del> output = func([input_data])[0] <del> expected = np.array([[[1, 1], [2, 1], [3, 1], [0, 0]], <del> [[1, 5], [5, 0], [0, 0], [0, 0]]]) <del> assert np.all(output == expected), 'Output not as expected' <del> <del> <del>def _runner(layer): <del> assert isinstance(layer, core.Layer) <del> layer.build() <del> conf = layer.get_config() <del> assert (type(conf) == dict) <del> <del> param = layer.get_params() <del> # Typically a list or a tuple, but may be any iterable <del> assert hasattr(param, '__iter__') <del> <del> # Test the setter for the trainable attribute <del> layer.trainable = True <del> layer.trainable = False <del> <del> <del>def test_siamese_all(): <del> right_input_layer = core.Dense(7, input_dim=3) <del> left_input_layer = core.Dense(7, input_dim=3) <del> <del> shared_layer = core.Dense(5,input_dim=7) <del> for mode in ['sum', 'mul', 'ave', 'concat']: <del> siamese_layer = core.Siamese(shared_layer, [left_input_layer, right_input_layer], merge_mode=mode) <del> siamese_layer.output_shape <del> siamese_layer.get_output() <del> <del> <del>@pytest.mark.skipif(K._BACKEND == 'tensorflow', <del> reason='currently not working with TensorFlow') <del>def test_siamese_theano_only(): <del> right_input_layer = core.Dense(7, input_dim=3) <del> left_input_layer = core.Dense(7, input_dim=3) <del> <del> shared_layer = core.Dense(5,input_dim=7) <add>def test_highway(): <add> from keras import regularizers <add> from keras import constraints <add> <add> layer_test(core.Highway, <add> kwargs={}, <add> input_shape=(3, 2)) <add> <add> layer_test(core.Highway, <add> kwargs={'W_regularizer': regularizers.l2(0.01), <add> 'b_regularizer': regularizers.l1(0.01), <add> 'activity_regularizer': regularizers.activity_l2(0.01), <add> 'W_constraint': constraints.MaxNorm(1), <add> 'b_constraint': constraints.MaxNorm(1)}, <add> input_shape=(3, 2)) <add> <add> <add>def test_timedistributeddense(): <add> from keras import regularizers <add> from keras import constraints <add> <add> layer_test(core.TimeDistributedDense, <add> kwargs={'output_dim': 2, 'input_length': 2}, <add> input_shape=(3, 2, 3)) <add> <add> layer_test(core.TimeDistributedDense, <add> kwargs={'output_dim': 3, <add> 'W_regularizer': regularizers.l2(0.01), <add> 'b_regularizer': regularizers.l1(0.01), <add> 'activity_regularizer': regularizers.activity_l2(0.01), <add> 'W_constraint': constraints.MaxNorm(1), <add> 'b_constraint': constraints.MaxNorm(1)}, <add> input_shape=(3, 2, 3)) <ide> <del> for mode in ['dot', 'cos']: <del> siamese_layer = core.Siamese(shared_layer, [left_input_layer, right_input_layer], merge_mode=mode, <del> dot_axes=([1], [1])) <del> siamese_layer.output_shape <del> siamese_layer.get_output() <ide> <ide> if __name__ == '__main__': <ide> pytest.main([__file__])
9
Python
Python
stop change cwd to .env/.flaskenv location
84cbfc0698f429c93450cd6f1600e56b41a30247
<ide><path>src/flask/cli.py <ide> def load_dotenv(path=None): <ide> If an env var is already set it is not overwritten, so earlier files in the <ide> list are preferred over later files. <ide> <del> Changes the current working directory to the location of the first file <del> found, with the assumption that it is in the top level project directory <del> and will be where the Python path should import local packages from. <del> <ide> This is a no-op if `python-dotenv`_ is not installed. <ide> <ide> .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme <ide> def load_dotenv(path=None): <ide> <ide> dotenv.load_dotenv(path) <ide> <del> if new_dir and os.getcwd() != new_dir: <del> os.chdir(new_dir) <del> <ide> return new_dir is not None # at least one file was located and loaded <ide> <ide> <ide><path>tests/test_cli.py <ide> def test_load_dotenv(monkeypatch): <ide> monkeypatch._setitem.append((os.environ, item, notset)) <ide> <ide> monkeypatch.setenv("EGGS", "3") <del> monkeypatch.chdir(os.path.join(test_path, "cliapp", "inner1")) <add> monkeypatch.chdir(test_path) <ide> assert load_dotenv() <ide> assert os.getcwd() == test_path <ide> # .flaskenv doesn't overwrite .env
2
Python
Python
remove agg setting from docs
49a798c27d08f244207829c57d9621c9514e8c82
<ide><path>numpy/core/function_base.py <ide> def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, <ide> <ide> >>> import matplotlib <ide> >>> import matplotlib.pyplot <del> >>> matplotlib.pyplot.switch_backend('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> N = 8 <ide> >>> y = np.zeros(N) <ide> def logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None, <ide> <ide> >>> import matplotlib <ide> >>> import matplotlib.pyplot <del> >>> matplotlib.pyplot.switch_backend('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> N = 10 <ide> >>> x1 = np.logspace(0.1, 1, N, endpoint=True) <ide> def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0): <ide> Graphical illustration of ``endpoint`` parameter: <ide> <ide> >>> import matplotlib <del> >>> matplotlib.use('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> N = 10 <ide> >>> y = np.zeros(N) <ide><path>numpy/fft/pocketfft.py <ide> def fft(a, n=None, axis=-1, norm=None): <ide> the `numpy.fft` documentation: <ide> <ide> >>> import matplotlib <del> >>> matplotlib.use('Agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> t = np.arange(256) <ide> >>> sp = np.fft.fft(np.sin(t)) <ide> def ifft(a, n=None, axis=-1, norm=None): <ide> Create and plot a band-limited signal with random phases: <ide> <ide> >>> import matplotlib <del> >>> matplotlib.use('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> t = np.arange(400) <ide> >>> n = np.zeros((400,), dtype=complex) <ide><path>numpy/lib/function_base.py <ide> def blackman(M): <ide> Examples <ide> -------- <ide> >>> import matplotlib <del> >>> matplotlib.use('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> np.blackman(12) <ide> array([-1.38777878e-17, 3.26064346e-02, 1.59903635e-01, # may vary <ide> def hanning(M): <ide> <ide> >>> import matplotlib <ide> >>> import matplotlib.pyplot <del> >>> matplotlib.pyplot.switch_backend('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> from numpy.fft import fft, fftshift <ide> >>> window = np.hanning(51) <ide> def hamming(M): <ide> <ide> >>> import matplotlib <ide> >>> import matplotlib.pyplot <del> >>> matplotlib.pyplot.switch_backend('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> from numpy.fft import fft, fftshift <ide> >>> window = np.hamming(51) <ide> def kaiser(M, beta): <ide> Examples <ide> -------- <ide> >>> import matplotlib <del> >>> matplotlib.use('agg') <ide> >>> import matplotlib.pyplot as plt <ide> >>> np.kaiser(12, 14) <ide> array([7.72686684e-06, 3.46009194e-03, 4.65200189e-02, # may vary <ide><path>numpy/lib/twodim_base.py <ide> def histogram2d(x, y, bins=10, range=None, normed=None, weights=None, <ide> -------- <ide> >>> import matplotlib <ide> >>> import matplotlib.pyplot <del> >>> matplotlib.pyplot.switch_backend('agg') <ide> >>> import matplotlib as mpl <ide> >>> import matplotlib.pyplot as plt <ide>
4
Ruby
Ruby
fix rubocop violations
fb9e846be9a1e1a907c84f5d0336bec56fe2a5bc
<ide><path>actionview/lib/action_view/digestor.rb <ide> def tree(name, finder, partial = false, seen = {}) <ide> node <ide> end <ide> else <del> unless name.include?('#') # Dynamic template partial names can never be tracked <add> unless name.include?("#") # Dynamic template partial names can never be tracked <ide> logger.error " Couldn't find template for digesting: #{name}" <ide> end <ide> <ide><path>railties/lib/rails/generators/named_base.rb <ide> def new_helper # :doc: <ide> end <ide> <ide> def field_id(attribute_name) <del> [singular_table_name, attribute_name].join('_') <add> [singular_table_name, attribute_name].join("_") <ide> end <ide> <ide> def singular_table_name # :doc:
2
Mixed
Text
update license documentation
3c55d8e528a2eed012a7b73bb46839bbe44d6aa0
<ide><path>Library/Homebrew/formula.rb <ide> def method_added(method) <ide> # @!attribute [w] <ide> # The SPDX ID of the open-source license that the formula uses. <ide> # Shows when running `brew info`. <del> # Use `:any`, `:all` or `:with` to describe complex license expressions. <del> # `:any` should be used when the user can choose which license to use. <del> # `:all` should be used when the user must use all licenses. <add> # Use `:any_of`, `:all_of` or `:with` to describe complex license expressions. <add> # `:any_of` should be used when the user can choose which license to use. <add> # `:all_of` should be used when the user must use all licenses. <ide> # `:with` should be used to specify a valid SPDX exception. <ide> # Add `+` to an identifier to indicate that the formulae can be <ide> # licensed under later versions of the same license. <add> # @see https://docs.brew.sh/License-Guidelines Homebrew License Guidelines <ide> # @see https://spdx.github.io/spdx-spec/appendix-IV-SPDX-license-expressions/ SPDX license expression guide <ide> # <pre>license "BSD-2-Clause"</pre> <ide> # <pre>license "EPL-1.0+"</pre> <ide> # <pre>license any_of: ["MIT", "GPL-2.0-only"]</pre> <ide> # <pre>license all_of: ["MIT", "GPL-2.0-only"]</pre> <ide> # <pre>license "GPL-2.0-only" => { with: "LLVM-exception" }</pre> <ide> # <pre>license :public_domain</pre> <add> # <pre>license any_of: [ <add> # "MIT", <add> # :public_domain, <add> # all_of: ["0BSD", "Zlib", "Artistic-1.0+"], <add> # "Apache-2.0" => { with: "LLVM-exception" }, <add> # ]</pre> <ide> def license(args = nil) <ide> if args.nil? <ide> @licenses <ide><path>docs/Formula-Cookbook.md <ide> We only accept formulae that use a [Debian Free Software Guidelines license](htt <ide> <ide> Use the license identifier from the [SPDX License List](https://spdx.org/licenses/) e.g. `license "BSD-2-Clause"`, or use `license :public_domain` for public domain software. <ide> <del>If the software is available under multiple licenses, you should list them all in an array: <add>Use `:any_of`, `:all_of` or `:with` to describe complex license expressions. `:any_of` should be used when the user can choose which license to use. `:all_of` should be used when the user must use all licenses. `:with` should be used to specify a valid SPDX exception. Add `+` to an identifier to indicate that the formulae can be licensed under later versions of the same license. <ide> <del>```ruby <del>license ["MIT", "GPL-2.0"] <del>``` <del> <del>Note: only specify multiple licenses if the formula gives the user a choice between the licenses. Formulae that have different licenses for different parts of their software should specify only the more restrictive license. For help determining which license is more restrictive, take a look [https://choosealicense.com](https://choosealicense.com/licenses/) or the [Comparison of free and open-source software licences Wikipedia page](https://en.wikipedia.org/wiki/Comparison_of_free_and_open-source_software_licences). <add>Check out the [License Guidelines](License-Guidelines.md) for examples of complex license expressions in Homebrew formulae. <ide> <ide> ### Check the build system <ide> <ide><path>docs/License-Guidelines.md <add># License Guidelines <add> <add>We only accept formulae that use a [Debian Free Software Guidelines license](https://wiki.debian.org/DFSGLicenses) or are released into the public domain following [DFSG Guidelines on Public Domain software](https://wiki.debian.org/DFSGLicenses#Public_Domain). <add> <add>## Specifying a License <add> <add>All licenses are identified by their license identifier from the [SPDX License List](https://spdx.org/licenses/). <add> <add>Specify a license by passing it to the `license` method: <add> <add>```ruby <add>license "MIT" <add>``` <add> <add>The public domain can be indicated using a symbol: <add> <add>```ruby <add>license :public_domain <add>``` <add> <add>## Complex SPDX License Expressions <add> <add>Some formulae have multiple licenses that need to be combined in different ways. In these cases, a more complex license expression can be used. These expressions are based on the [SPDX License Expression Guidelines](https://spdx.github.io/spdx-spec/appendix-IV-SPDX-license-expressions/). <add> <add>Add a `+` to indicate that the user can choose a later version of the same license: <add> <add>```ruby <add>license "EPL-1.0+" <add>``` <add> <add>GNU licenses (`GPL`, `LGPL`, `AGPL` and `GFDL`) require either the `-only` or the `-or-later` suffix to indicate whether a later version of the license is allowed: <add> <add>```ruby <add>license "LGPL-2.1-only" <add>``` <add> <add>```ruby <add>license "GPL-1.0-or-later" <add>``` <add> <add>Use `:any_of` to indicate that the user can choose which license applies: <add> <add>```ruby <add>license any_of: ["MIT", "0BSD"] <add>``` <add> <add>Use `:all_of` to indicate that the user must comply with multiple licenses: <add> <add>```ruby <add>license all_of: ["MIT", "0BSD"] <add>``` <add> <add>Use `:with` to indicate a license exception: <add> <add>```ruby <add>license "MIT" => { with: "LLVM-exception" } <add>``` <add> <add>These expressions can be nested as needed: <add> <add>```ruby <add>license any_of: [ <add> "MIT", <add> :public_domain, <add> all_of: ["0BSD", "Zlib", "Artistic-1.0+"], <add> "Apache-2.0" => { with: "LLVM-exception" }, <add>] <add>``` <add> <add>## Specifying Forbidden Licenses <add> <add>The `HOMEBREW_FORBIDDEN_LICENSES` environment variable can be set to forbid installation of formulae that require or have dependencies that require certain licenses. <add> <add>The `HOMEBREW_FORBIDDEN_LICENSES` should be set to a space separated list of licenses. Use `public_domain` to forbid installation of formulae with a `:public_domain` license. <add> <add>For example, the following forbids installation of `MIT`, `Artistic-1.0` and `:public_domain` licenses: <add> <add>```bash <add>export HOMEBREW_FORBIDDEN_LICENSES="MIT Artistic-1.0 public_domain" <add>``` <add> <add>In this example Homebrew would refuse to install any formula that specifies the `MIT` license. Homebrew would also forbid installation of any formula that declares a dependency on a formula that specifies `MIT`, even if the original formula has an allowed license. <add> <add>Homebrew interprets complex license expressions and determines whether the licenses allow installation. To continue the above example, Homebrew would not allow installation of a formula with the following license declarations: <add> <add>```ruby <add>license any_of: ["MIT", "Artistic-1.0"] <add>``` <add> <add>```ruby <add>license all_of: ["MIT", "0BSD"] <add>``` <add> <add>Homebrew _would_ allow formulae with the following declaration to be installed: <add> <add>```ruby <add>license any_of: ["MIT", "0BSD"] <add>``` <add> <add>`HOMEBREW_FORBIDDEN_LICENSES` can also forbid future versions of specific licenses. For example, to forbid `Artistic-1.0`, `Artistic-2.0` and any future Artistic licenses, use: <add> <add>```bash <add>export HOMEBREW_FORBIDDEN_LICENSES="Artistic-1.0+" <add>``` <add> <add>For GNU licenses (such as `GPL`, `LGPL`, `AGPL` and `GFDL`), use `-only` or `-or-later`. For example, the following would forbid `GPL-2.0`, `LGPL-2.1` and `LGPL-3.0` formulae from being installed, but would allow `GPL-3.0` <add> <add>```bash <add>export HOMEBREW_FORBIDDEN_LICENSES="GPL-2.0-only LGPL-2.1-or-later" <add>``` <ide><path>docs/README.md <ide> - [How To Open A Pull Request (and get it merged)](How-To-Open-a-Homebrew-Pull-Request.md) <ide> - [Formula Cookbook](Formula-Cookbook.md) <ide> - [Acceptable Formulae](Acceptable-Formulae.md) <add>- [License Guidelines](License-Guidelines.md) <ide> - [Formulae Versions](Versions.md) <ide> - [Node for Formula Authors](Node-for-Formula-Authors.md) <ide> - [Python for Formula Authors](Python-for-Formula-Authors.md)
4
PHP
PHP
fix boolean operators
6c2a68891a7069b9e6ffa9c23d0f125720b6307a
<ide><path>src/Illuminate/Foundation/Console/TinkerCommand.php <ide> protected function prompt() <ide> */ <ide> protected function supportsBoris() <ide> { <del> return (extension_loaded('readline') and extension_loaded('posix') and extension_loaded('pcntl')); <add> return extension_loaded('readline') && extension_loaded('posix') && extension_loaded('pcntl'); <ide> } <ide> <ide> }
1
Ruby
Ruby
remove unused #await_close
d09bce96b4ee0e2e2434caaed5ff07fe7d9fd1c9
<ide><path>actionpack/lib/action_controller/metal/live.rb <ide> def connected? <ide> !@aborted <ide> end <ide> <del> def await_close <del> synchronize do <del> @cv.wait_until { @closed } <del> end <del> end <del> <ide> def on_error(&block) <ide> @error_callback = block <ide> end
1
Javascript
Javascript
run tests against chrome 91 on macos catalina
56b0ee32756d9ac7ab39145e6c6df656fa649c07
<ide><path>protractor-circleci-conf.js <ide> config.sauceKey = process.env.SAUCE_ACCESS_KEY; <ide> config.multiCapabilities = [ <ide> capabilitiesForSauceLabs({ <ide> browserName: 'chrome', <del> platform: 'OS X 10.14', <del> version: '81' <add> platform: 'OS X 10.15', <add> version: '91' <ide> }), <ide> capabilitiesForSauceLabs({ <ide> browserName: 'firefox',
1
Ruby
Ruby
remove unused methods
f6cf909476c032524ed7c404ee7c0fb270d39d58
<ide><path>railties/lib/rails/generators/app_base.rb <ide> def self.path(name, path, comment = nil) <ide> new(name, comment, nil, path: path) <ide> end <ide> <del> def github; options[:github]; end <del> def path; options[:path]; end <del> def platforms; options[:platforms]; end <del> <ide> def padding(max_width) <ide> ' ' * (max_width - name.length + 2) <ide> end
1
Javascript
Javascript
prevent simultaneous heap snapshots
16a9ab142c804c90c1990eef01dc6116bc786d16
<ide><path>lib/internal/debugger/inspect_repl.js <ide> function createRepl(inspector) { <ide> const history = { control: [], debug: [] }; <ide> const watchedExpressions = []; <ide> const knownBreakpoints = []; <add> let heapSnapshotPromise = null; <ide> let pauseOnExceptionState = 'none'; <ide> let lastCommand; <ide> <ide> function createRepl(inspector) { <ide> }, <ide> <ide> takeHeapSnapshot(filename = 'node.heapsnapshot') { <del> return new Promise((resolve, reject) => { <add> if (heapSnapshotPromise) { <add> print( <add> 'Cannot take heap snapshot because another snapshot is in progress.' <add> ); <add> return heapSnapshotPromise; <add> } <add> heapSnapshotPromise = new Promise((resolve, reject) => { <ide> const absoluteFile = Path.resolve(filename); <ide> const writer = FS.createWriteStream(absoluteFile); <ide> let sizeWritten = 0; <ide> function createRepl(inspector) { <ide> writer.end(() => { <ide> teardown(); <ide> print(`Wrote snapshot: ${absoluteFile}`); <add> heapSnapshotPromise = null; <ide> resolve(); <ide> }); <ide> } <ide> function createRepl(inspector) { <ide> HeapProfiler.takeHeapSnapshot({ reportProgress: true }), <ide> onResolve, onReject); <ide> }); <add> return heapSnapshotPromise; <ide> }, <ide> <ide> get watchers() { <ide><path>test/known_issues/test-debugger-takeHeapSnapshot-race.js <del>'use strict'; <del>const common = require('../common'); <del> <del>// Refs: https://github.com/nodejs/node/issues/39555 <del> <del>// After this issue is fixed, this can perhaps be integrated into <del>// test/sequential/test-debugger-heap-profiler.js as it shares almost all <del>// the same code. <del> <del>// These skips should be uncommented once the issue is fixed. <del>// common.skipIfInspectorDisabled(); <del> <del>// if (!common.isMainThread) { <del>// common.skip('process.chdir() is not available in workers'); <del>// } <del> <del>// This assert.fail() can be removed once the issue is fixed. <del>if (!common.hasCrypto || !process.features.inspector) { <del> require('assert').fail('crypto is not available'); <del>} <del> <del>const fixtures = require('../common/fixtures'); <del>const startCLI = require('../common/debugger'); <del>const tmpdir = require('../common/tmpdir'); <del> <del>tmpdir.refresh(); <del>process.chdir(tmpdir.path); <del> <del>const { readFileSync } = require('fs'); <del> <del>const filename = 'node.heapsnapshot'; <del> <del>// Check that two simultaneous snapshots don't step all over each other. <del>{ <del> const cli = startCLI([fixtures.path('debugger/empty.js')]); <del> <del> function onFatal(error) { <del> cli.quit(); <del> throw error; <del> } <del> <del> return cli.waitForInitialBreak() <del> .then(() => cli.waitForPrompt()) <del> .then(() => cli.command('takeHeapSnapshot(); takeHeapSnapshot()')) <del> .then(() => JSON.parse(readFileSync(filename, 'utf8'))) <del> .then(() => cli.quit()) <del> .then(null, onFatal); <del>} <ide><path>test/sequential/test-debugger-heap-profiler.js <ide> const filename = 'node.heapsnapshot'; <ide> .then(() => cli.waitForPrompt()) <ide> .then(() => cli.command('takeHeapSnapshot()')) <ide> .then(() => JSON.parse(readFileSync(filename, 'utf8'))) <add> // Check that two simultaneous snapshots don't step all over each other. <add> // Refs: https://github.com/nodejs/node/issues/39555 <add> .then(() => cli.command('takeHeapSnapshot(); takeHeapSnapshot()')) <add> .then(() => JSON.parse(readFileSync(filename, 'utf8'))) <ide> .then(() => cli.quit()) <ide> .then(null, onFatal); <ide> }
3
Python
Python
use input shape instead of input layer
a4552fb00492c47f67261a8c1b9538a0614cbb71
<ide><path>keras/layers/core.py <ide> class Lambda(Layer): <ide> <ide> Arguments <ide> --------- <del> function - The function to be evaluated. Takes one argument : ouput of previous layer <del> output_shape - Expected output shape from function. Could be a tuple or a function of input layer <add> function - The function to be evaluated. Takes one argument : output of previous layer <add> output_shape - Expected output shape from function. Could be a tuple or a function of the shape of the input <ide> """ <ide> <ide> def __init__(self, function, output_shape=None, ndim=2): <ide> def output_shape(self): <ide> else: <ide> output_shape_func = marshal.loads(self._output_shape) <ide> output_shape_func = types.FunctionType(output_shape_func, globals()) <del> shape = output_shape_func(self.previous) <add> shape = output_shape_func(self.previous.output_shape) <ide> if type(shape) not in {list, tuple}: <ide> raise Exception("output_shape function must return a tuple") <ide> return tuple(shape) <ide> class LambdaMerge(Lambda): <ide> --------- <ide> layers - Input layers. Similar to layers argument of Merge <ide> function - The function to be evaluated. Takes one argument : list of outputs from input layers <del> output_shape - Expected output shape from function. Could be a tuple or a function of list of input layers <add> output_shape - Expected output shape from function. Could be a tuple or a function of list of input shapes <ide> """ <ide> def __init__(self, layers, function, output_shape=None): <ide> if len(layers) < 2: <ide> def output_shape(self): <ide> else: <ide> output_shape_func = marshal.loads(self._output_shape) <ide> output_shape_func = types.FunctionType(output_shape_func, globals()) <del> shape = output_shape_func(self.layers) <add> input_shapes = [layer.output_shape for layer in self.layers] <add> shape = output_shape_func(input_shapes) <ide> if type(shape) not in {list, tuple}: <ide> raise Exception("output_shape function must return a tuple") <ide> return tuple(shape)
1
Ruby
Ruby
improve the performance of `save` and friends
136fc65c9b8b66e1fb56f3a17f0d1fddff9b4bd0
<ide><path>activemodel/lib/active_model/dirty.rb <ide> def changes_include?(attr_name) <ide> # Returns +true+ if attr_name were changed before the model was saved, <ide> # +false+ otherwise. <ide> def previous_changes_include?(attr_name) <del> @previously_changed.include?(attr_name) <add> previous_changes.include?(attr_name) <ide> end <ide> <ide> # Removes current changes and makes them accessible through +previous_changes+. <ide> def attribute_change(attr) <ide> <ide> # Handles <tt>*_previous_change</tt> for +method_missing+. <ide> def attribute_previous_change(attr) <del> @previously_changed[attr] if attribute_previously_changed?(attr) <add> previous_changes[attr] if attribute_previously_changed?(attr) <ide> end <ide> <ide> # Handles <tt>*_will_change!</tt> for +method_missing+. <ide><path>activerecord/lib/active_record/attribute_methods/dirty.rb <ide> def initialize_dup(other) # :nodoc: <ide> end <ide> <ide> def changes_applied <del> super <add> @previous_mutation_tracker = @mutation_tracker <ide> store_original_attributes <ide> end <ide> <ide> def clear_changes_information <del> super <add> @previous_mutation_tracker = nil <ide> store_original_attributes <ide> end <ide> <ide> def changes <ide> end <ide> end <ide> <add> def previous_changes <add> previous_mutation_tracker.changes <add> end <add> <ide> def attribute_changed_in_place?(attr_name) <ide> @mutation_tracker.changed_in_place?(attr_name) <ide> end <ide> def store_original_attributes <ide> @mutation_tracker = @mutation_tracker.now_tracking(@attributes) <ide> end <ide> <add> def previous_mutation_tracker <add> @previous_mutation_tracker ||= NullMutationTracker.new <add> end <add> <ide> def cache_changed_attributes <ide> @cached_changed_attributes = changed_attributes <ide> yield <ide><path>activerecord/lib/active_record/attribute_mutation_tracker.rb <ide> def changed_values <ide> end <ide> end <ide> <add> def changes <add> attr_names.each_with_object({}.with_indifferent_access) do |attr_name, result| <add> if changed?(attr_name) <add> result[attr_name] = [original_attributes.fetch_value(attr_name), attributes.fetch_value(attr_name)] <add> end <add> end <add> end <add> <ide> def changed?(attr_name) <ide> attr_name = attr_name.to_s <ide> modified?(attr_name) || changed_in_place?(attr_name) <ide> def clean_copy_of(attributes) <ide> end <ide> end <ide> end <add> <add> class NullMutationTracker <add> def changed_values <add> {} <add> end <add> <add> def changes <add> {} <add> end <add> <add> def changed?(*) <add> false <add> end <add> <add> def changed_in_place?(*) <add> false <add> end <add> <add> def forget_change(*) <add> end <add> end <ide> end
3
Go
Go
save start error into state.error
fb6ee865a949905f678aa7c7066c809664a8a4aa
<ide><path>daemon/container.go <ide> func (container *Container) Start() (err error) { <ide> // setup has been cleaned up properly <ide> defer func() { <ide> if err != nil { <add> container.setError(err) <add> container.toDisk() <ide> container.cleanup() <ide> } <ide> }() <ide><path>daemon/state.go <ide> type State struct { <ide> Restarting bool <ide> Pid int <ide> ExitCode int <add> Error string // contains last known error when starting the container <ide> StartedAt time.Time <ide> FinishedAt time.Time <ide> waitChan chan struct{} <ide> func (s *State) SetRunning(pid int) { <ide> } <ide> <ide> func (s *State) setRunning(pid int) { <add> s.Error = "" <ide> s.Running = true <ide> s.Paused = false <ide> s.Restarting = false <ide> func (s *State) SetRestarting(exitCode int) { <ide> s.Unlock() <ide> } <ide> <add>// setError sets the container's error state. This is useful when we want to <add>// know the error that occurred when container transits to another state <add>// when inspecting it <add>func (s *State) setError(err error) { <add> s.Error = err.Error() <add>} <add> <ide> func (s *State) IsRestarting() bool { <ide> s.Lock() <ide> res := s.Restarting <ide><path>integration-cli/docker_cli_start_test.go <ide> func TestStartAttachCorrectExitCode(t *testing.T) { <ide> <ide> logDone("start - correct exit code returned with -a") <ide> } <add> <add>func TestStartRecordError(t *testing.T) { <add> defer deleteAllContainers() <add> <add> // when container runs successfully, we should not have state.Error <add> cmd(t, "run", "-d", "-p", "9999:9999", "--name", "test", "busybox", "top") <add> stateErr, err := inspectField("test", "State.Error") <add> if err != nil { <add> t.Fatalf("Failed to inspect %q state's error, got error %q", "test", err) <add> } <add> if stateErr != "" { <add> t.Fatalf("Expected to not have state error but got state.Error(%q)", stateErr) <add> } <add> <add> // Expect this to fail and records error because of ports conflict <add> out, _, err := runCommandWithOutput(exec.Command(dockerBinary, "run", "-d", "--name", "test2", "-p", "9999:9999", "busybox", "top")) <add> if err == nil { <add> t.Fatalf("Expected error but got none, output %q", out) <add> } <add> stateErr, err = inspectField("test2", "State.Error") <add> if err != nil { <add> t.Fatalf("Failed to inspect %q state's error, got error %q", "test2", err) <add> } <add> expected := "port is already allocated" <add> if stateErr == "" || !strings.Contains(stateErr, expected) { <add> t.Fatalf("State.Error(%q) does not include %q", stateErr, expected) <add> } <add> <add> // Expect the conflict to be resolved when we stop the initial container <add> cmd(t, "stop", "test") <add> cmd(t, "start", "test2") <add> stateErr, err = inspectField("test2", "State.Error") <add> if err != nil { <add> t.Fatalf("Failed to inspect %q state's error, got error %q", "test", err) <add> } <add> if stateErr != "" { <add> t.Fatalf("Expected to not have state error but got state.Error(%q)", stateErr) <add> } <add> <add> logDone("start - set state error when start fails") <add>}
3
Java
Java
remove unnecessary code from prior commit
310f0bfb6c396ac09e0d712fc0706a8e0c8a8d52
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/DataBufferUtils.java <ide> public static Flux<DataBuffer> write( <ide> <ide> } <ide> <del> public void handle() { <del> Number n = 5; <del> inspect(n); <del> } <del> <del> public <U extends Number> void inspect(U u){ <del> } <del> <del> <ide> /** <ide> * Write the given stream of {@link DataBuffer DataBuffers} to the given <ide> * file {@link Path}. The optional {@code options} parameter specifies
1
PHP
PHP
rename the method and fix docblock return type
49ea72e53e6c3755fccbbe110c366012f41ae902
<ide><path>lib/Cake/Model/Model.php <ide> public function save($data = null, $validate = true, $fieldList = array()) { <ide> if (!empty($this->id)) { <ide> $success = (bool)$db->update($this, $fields, $values); <ide> } else { <del> if (empty($this->data[$this->alias][$this->primaryKey]) && $this->_isUUID($this->primaryKey)) { <add> if (empty($this->data[$this->alias][$this->primaryKey]) && $this->_isUUIDField($this->primaryKey)) { <ide> if (array_key_exists($this->primaryKey, $this->data[$this->alias])) { <ide> $j = array_search($this->primaryKey, $fields); <ide> $values[$j] = String::uuid(); <ide> public function save($data = null, $validate = true, $fieldList = array()) { <ide> * Check if the passed in field is a UUID field <ide> * <ide> * @param string $field the field to check <del> * @return array <add> * @return boolean <ide> */ <del> protected function _isUUID($field) { <add> protected function _isUUIDField($field) { <ide> $field = $this->schema($field); <ide> return $field['length'] == 36 && in_array($field['type'], array('string', 'binary')); <ide> } <ide> protected function _saveMulti($joined, $id, $db) { <ide> $dbMulti = $db; <ide> } <ide> <del> $isUUID = !empty($this->{$join}->primaryKey) && $this->{$join}->_isUUID($this->{$join}->primaryKey); <add> $isUUID = !empty($this->{$join}->primaryKey) && $this->{$join}->_isUUIDField($this->{$join}->primaryKey); <ide> <ide> $newData = $newValues = $newJoins = array(); <ide> $primaryAdded = false;
1
Java
Java
use assertthat from hamcrest instead of junit 4
d616e10dca686b12b4cc95ae103e6680bd4e4224
<ide><path>spring-beans/src/test/java/org/springframework/beans/factory/ConcurrentBeanFactoryTests.java <ide> import org.junit.Before; <ide> import org.junit.Test; <ide> <del>import org.springframework.beans.PropertyEditorRegistrar; <del>import org.springframework.beans.PropertyEditorRegistry; <ide> import org.springframework.beans.factory.support.DefaultListableBeanFactory; <ide> import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; <ide> import org.springframework.beans.propertyeditors.CustomDateEditor; <del>import org.springframework.core.io.Resource; <ide> import org.springframework.tests.Assume; <ide> import org.springframework.tests.TestGroup; <ide> <ide><path>spring-core/src/test/java/org/springframework/core/CollectionFactoryTests.java <ide> import org.springframework.util.LinkedMultiValueMap; <ide> import org.springframework.util.MultiValueMap; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> import static org.springframework.core.CollectionFactory.*; <ide><path>spring-core/src/test/java/org/springframework/core/GenericTypeResolverTests.java <ide> <ide> import org.junit.Test; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> import static org.springframework.core.GenericTypeResolver.*; <ide><path>spring-core/src/test/java/org/springframework/core/ResolvableTypeTests.java <ide> import org.springframework.core.ResolvableType.VariableResolver; <ide> import org.springframework.util.MultiValueMap; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.BDDMockito.any; <ide><path>spring-core/src/test/java/org/springframework/core/SerializableTypeWrapperTests.java <ide> <ide> import org.junit.Test; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <del>import static org.junit.Assert.*; <ide> <ide> /** <ide> * Tests for {@link SerializableTypeWrapper}. <ide><path>spring-core/src/test/java/org/springframework/core/annotation/AnnotationAttributesTests.java <ide> import org.springframework.core.annotation.AnnotationUtilsTests.ImplicitAliasesContextConfig; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/annotation/AnnotationAwareOrderComparatorTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/convert/TypeDescriptorTests.java <ide> import org.springframework.util.LinkedMultiValueMap; <ide> import org.springframework.util.MultiValueMap; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/core/convert/converter/ConvertingComparatorTests.java <ide> import org.springframework.util.comparator.ComparableComparator; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Tests for {@link ConvertingComparator}. <ide><path>spring-core/src/test/java/org/springframework/core/convert/converter/DefaultConversionServiceTests.java <ide> import org.springframework.util.ClassUtils; <ide> import org.springframework.util.StopWatch; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/core/convert/support/ByteBufferConverterTests.java <ide> <ide> import org.springframework.core.convert.converter.Converter; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <del>import static org.junit.Assert.*; <ide> <ide> /** <ide> * Tests for {@link ByteBufferConverter}. <ide><path>spring-core/src/test/java/org/springframework/core/convert/support/GenericConversionServiceTests.java <ide> <ide> import static java.util.Comparator.*; <ide> import static java.util.stream.Collectors.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/core/convert/support/MapToMapConverterTests.java <ide> import org.springframework.util.LinkedMultiValueMap; <ide> import org.springframework.util.MultiValueMap; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/core/env/CustomEnvironmentTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Unit tests covering the extensibility of {@link AbstractEnvironment}. <ide><path>spring-core/src/test/java/org/springframework/core/env/JOptCommandLinePropertySourceTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/env/MutablePropertySourcesTests.java <ide> import org.springframework.mock.env.MockPropertySource; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/env/PropertySourceTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Unit tests for {@link PropertySource} implementations. <ide><path>spring-core/src/test/java/org/springframework/core/env/PropertySourcesPropertyResolverTests.java <ide> import org.springframework.core.convert.ConverterNotFoundException; <ide> import org.springframework.mock.env.MockPropertySource; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/core/env/SimpleCommandLineParserTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> public class SimpleCommandLineParserTests { <ide> <ide><path>spring-core/src/test/java/org/springframework/core/env/SimpleCommandLinePropertySourceTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Unit tests for {@link SimpleCommandLinePropertySource}. <ide> public void covertNonOptionArgsToStringArrayAndList() { <ide> assertThat(nonOptionArgsList.get(0), equalTo("noa1")); <ide> assertThat(nonOptionArgsList.get(1), equalTo("noa2")); <ide> } <add> <ide> } <ide><path>spring-core/src/test/java/org/springframework/core/env/SystemEnvironmentPropertySourceTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Unit tests for {@link SystemEnvironmentPropertySource}. <ide><path>spring-core/src/test/java/org/springframework/core/io/ClassPathResourceTests.java <ide> <ide> import org.junit.Test; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/core/io/ResourceTests.java <ide> import org.springframework.util.FileCopyUtils; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/io/support/ResourcePropertySourceTests.java <ide> import org.springframework.core.io.ClassPathResource; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/task/SimpleAsyncTaskExecutorTests.java <ide> import org.springframework.util.ConcurrencyThrottleSupport; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/type/AbstractClassMetadataMemberClassTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Abstract base class for testing implementations of <ide> public void againstMemberClass() { <ide> String[] nestedClasses = metadata.getMemberClassNames(); <ide> assertThat(nestedClasses, equalTo(new String[]{})); <ide> } <add> <ide> } <ide><path>spring-core/src/test/java/org/springframework/core/type/AnnotationMetadataTests.java <ide> import org.springframework.stereotype.Component; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/core/type/CachingMetadataReaderLeakTests.java <ide> import org.springframework.tests.TestGroup; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Unit tests for checking the behaviour of {@link CachingMetadataReaderFactory} under <ide><path>spring-core/src/test/java/org/springframework/tests/AssumeTests.java <ide> <ide> import static java.util.stream.Collectors.*; <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.springframework.tests.Assume.*; <ide> import static org.springframework.tests.TestGroup.*; <ide><path>spring-core/src/test/java/org/springframework/tests/MockitoUtils.java <ide> import org.mockito.internal.util.MockUtil; <ide> import org.mockito.invocation.Invocation; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <del>import static org.junit.Assert.*; <ide> <ide> /** <ide> * General test utilities for use with {@link Mockito}. <ide><path>spring-core/src/test/java/org/springframework/tests/TestGroupTests.java <ide> import org.junit.Test; <ide> import org.junit.rules.ExpectedException; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <del>import static org.junit.Assert.*; <ide> <ide> /** <ide> * Tests for {@link TestGroup}. <ide><path>spring-core/src/test/java/org/springframework/util/ConcurrentReferenceHashMapTests.java <ide> import org.springframework.util.comparator.ComparableComparator; <ide> import org.springframework.util.comparator.NullSafeComparator; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/util/ObjectUtilsTests.java <ide> import org.junit.rules.ExpectedException; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.springframework.util.ObjectUtils.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/util/ReflectionUtilsTests.java <ide> import org.springframework.tests.sample.objects.TestObject; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/util/StreamUtilsTests.java <ide> import org.junit.Test; <ide> import org.mockito.InOrder; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <del>import static org.junit.Assert.*; <ide> import static org.mockito.BDDMockito.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/util/comparator/BooleanComparatorTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Tests for {@link BooleanComparator}. <ide><path>spring-core/src/test/java/org/springframework/util/comparator/InstanceComparatorTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Tests for {@link InstanceComparator}. <ide><path>spring-core/src/test/java/org/springframework/util/concurrent/SettableListenableFutureTests.java <ide> <ide> import org.junit.Test; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.Mockito.any; <ide><path>spring-core/src/test/java/org/springframework/util/xml/AbstractStaxHandlerTestCase.java <ide> import org.xml.sax.XMLReader; <ide> import org.xmlunit.util.Predicate; <ide> <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.xmlunit.matchers.CompareMatcher.isSimilarTo; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/util/xml/DomContentHandlerTests.java <ide> import org.xml.sax.InputSource; <ide> import org.xml.sax.XMLReader; <ide> <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.xmlunit.matchers.CompareMatcher.*; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/util/xml/ListBasedXMLEventReaderTests.java <ide> import org.junit.Test; <ide> <ide> import static javax.xml.stream.XMLStreamConstants.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.xmlunit.matchers.CompareMatcher.*; <ide> <ide><path>spring-core/src/test/java/org/springframework/util/xml/SimpleNamespaceContextTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * @author Arjen Poutsma <ide><path>spring-core/src/test/java/org/springframework/util/xml/StaxResultTests.java <ide> import java.io.StringReader; <ide> import java.io.StringWriter; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertNull; <del>import static org.junit.Assert.assertThat; <ide> import static org.xmlunit.matchers.CompareMatcher.isSimilarTo; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/util/xml/StaxSourceTests.java <ide> import java.io.StringReader; <ide> import java.io.StringWriter; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertNull; <del>import static org.junit.Assert.assertThat; <ide> import static org.xmlunit.matchers.CompareMatcher.isSimilarTo; <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/util/xml/XMLEventStreamReaderTests.java <ide> import java.io.StringReader; <ide> import java.io.StringWriter; <ide> <del>import static org.junit.Assert.assertThat; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.xmlunit.matchers.CompareMatcher.isSimilarTo; <ide> <ide> public class XMLEventStreamReaderTests { <ide><path>spring-core/src/test/java/org/springframework/util/xml/XMLEventStreamWriterTests.java <ide> import javax.xml.stream.XMLOutputFactory; <ide> import java.io.StringWriter; <ide> <del>import static org.junit.Assert.assertThat; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.xmlunit.matchers.CompareMatcher.isSimilarTo; <ide> <ide> public class XMLEventStreamWriterTests { <ide><path>spring-web/src/test/java/org/springframework/http/CacheControlTests.java <ide> import org.hamcrest.Matchers; <ide> import org.junit.Test; <ide> <del>import static org.junit.Assert.*; <del> <ide> import java.time.Duration; <ide> import java.util.concurrent.TimeUnit; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <add> <ide> /** <ide> * @author Brian Clozel <ide> */ <ide><path>spring-web/src/test/java/org/springframework/http/HttpHeadersTests.java <ide> import java.util.TimeZone; <ide> <ide> import org.hamcrest.Matchers; <del>import org.junit.Ignore; <ide> import org.junit.Test; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-web/src/test/java/org/springframework/http/HttpRangeTests.java <ide> import java.io.IOException; <ide> import java.nio.charset.StandardCharsets; <ide> import java.util.ArrayList; <del>import java.util.Arrays; <ide> import java.util.List; <del>import java.util.stream.Stream; <ide> <ide> import org.junit.Test; <ide> <ide><path>spring-web/src/test/java/org/springframework/http/ResponseCookieTests.java <ide> import org.junit.Test; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/http/ResponseEntityTests.java <ide> import org.hamcrest.Matchers; <ide> import org.junit.Test; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/http/client/support/InterceptingHttpAccessorTests.java <ide> import org.springframework.http.client.ClientHttpRequestInterceptor; <ide> import org.springframework.http.client.ClientHttpResponse; <ide> <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Tests for {@link InterceptingHttpAccessor}. <ide><path>spring-web/src/test/java/org/springframework/http/codec/ResourceHttpMessageWriterTests.java <ide> import org.springframework.util.MimeTypeUtils; <ide> import org.springframework.util.StringUtils; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.containsInAnyOrder; <ide> import static org.hamcrest.Matchers.is; <ide> import static org.hamcrest.Matchers.startsWith; <ide> import static org.junit.Assert.assertArrayEquals; <del>import static org.junit.Assert.assertThat; <ide> import static org.springframework.http.MediaType.TEXT_PLAIN; <ide> import static org.springframework.mock.http.server.reactive.test.MockServerHttpRequest.get; <ide> <ide><path>spring-web/src/test/java/org/springframework/http/codec/xml/Jaxb2XmlEncoderTests.java <ide> import org.springframework.http.codec.Pojo; <ide> <ide> import static java.nio.charset.StandardCharsets.UTF_8; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.springframework.core.io.buffer.DataBufferUtils.release; <ide> import static org.xmlunit.matchers.CompareMatcher.isSimilarTo; <ide><path>spring-web/src/test/java/org/springframework/http/converter/FormHttpMessageConverterTests.java <ide> import static org.hamcrest.CoreMatchers.allOf; <ide> import static org.hamcrest.CoreMatchers.endsWith; <ide> import static org.hamcrest.CoreMatchers.startsWith; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertFalse; <ide> import static org.junit.Assert.assertNotNull; <ide> import static org.junit.Assert.assertNull; <del>import static org.junit.Assert.assertThat; <ide> import static org.junit.Assert.assertTrue; <ide> import static org.mockito.BDDMockito.never; <ide> import static org.mockito.BDDMockito.verify; <ide><path>spring-web/src/test/java/org/springframework/http/converter/ResourceHttpMessageConverterTests.java <ide> <ide> import static org.hamcrest.core.Is.*; <ide> import static org.hamcrest.core.IsInstanceOf.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.BDDMockito.any; <ide> import static org.mockito.BDDMockito.*; <ide><path>spring-web/src/test/java/org/springframework/http/converter/ResourceRegionHttpMessageConverterTests.java <ide> import org.springframework.http.MockHttpOutputMessage; <ide> import org.springframework.util.StringUtils; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.is; <ide> import static org.junit.Assert.assertFalse; <del>import static org.junit.Assert.assertThat; <ide> import static org.junit.Assert.assertTrue; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/http/converter/feed/AtomFeedHttpMessageConverterTests.java <ide> <ide> package org.springframework.http.converter.feed; <ide> <del>import static org.junit.Assert.*; <del>import static org.xmlunit.matchers.CompareMatcher.*; <del> <ide> import java.io.IOException; <ide> import java.io.InputStream; <ide> import java.nio.charset.Charset; <ide> import org.springframework.http.MockHttpInputMessage; <ide> import org.springframework.http.MockHttpOutputMessage; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <add>import static org.junit.Assert.*; <add>import static org.xmlunit.matchers.CompareMatcher.*; <add> <ide> /** <ide> * @author Arjen Poutsma <ide> */ <ide><path>spring-web/src/test/java/org/springframework/http/converter/feed/RssChannelHttpMessageConverterTests.java <ide> <ide> package org.springframework.http.converter.feed; <ide> <del>import static org.junit.Assert.*; <del> <ide> import java.io.IOException; <ide> import java.io.InputStream; <ide> import java.nio.charset.Charset; <ide> import org.springframework.http.MockHttpInputMessage; <ide> import org.springframework.http.MockHttpOutputMessage; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <add>import static org.junit.Assert.*; <add> <ide> /** <ide> * @author Arjen Poutsma <ide> */ <ide><path>spring-web/src/test/java/org/springframework/http/converter/json/MappingJackson2HttpMessageConverterTests.java <ide> import org.springframework.lang.Nullable; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/http/converter/xml/Jaxb2RootElementHttpMessageConverterTests.java <ide> import org.springframework.http.MockHttpOutputMessage; <ide> import org.springframework.http.converter.HttpMessageNotReadableException; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.xmlunit.diff.ComparisonType.*; <ide> import static org.xmlunit.diff.DifferenceEvaluators.*; <ide><path>spring-web/src/test/java/org/springframework/http/converter/xml/MappingJackson2XmlHttpMessageConverterTests.java <ide> import org.springframework.http.converter.json.MappingJacksonValue; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/http/converter/xml/SourceHttpMessageConverterTests.java <ide> import org.springframework.http.converter.HttpMessageNotReadableException; <ide> import org.springframework.util.FileCopyUtils; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.xmlunit.matchers.CompareMatcher.*; <ide> <ide><path>spring-web/src/test/java/org/springframework/http/server/reactive/AsyncIntegrationTests.java <ide> import org.springframework.http.ResponseEntity; <ide> import org.springframework.web.client.RestTemplate; <ide> <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * @author Stephane Maldini <ide><path>spring-web/src/test/java/org/springframework/http/server/reactive/CookieIntegrationTests.java <ide> import org.springframework.web.client.RestTemplate; <ide> <ide> import static org.hamcrest.CoreMatchers.equalTo; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-web/src/test/java/org/springframework/web/bind/support/WebRequestDataBinderTests.java <ide> <ide> package org.springframework.web.bind.support; <ide> <del>import static org.hamcrest.Matchers.*; <del>import static org.junit.Assert.*; <del> <ide> import java.beans.PropertyEditorSupport; <ide> import java.util.Arrays; <ide> import java.util.HashMap; <ide> import org.springframework.web.context.request.ServletWebRequest; <ide> import org.springframework.web.multipart.support.StringMultipartFileEditor; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <add>import static org.hamcrest.Matchers.*; <add>import static org.junit.Assert.*; <add> <ide> /** <ide> * @author Juergen Hoeller <ide> */ <ide><path>spring-web/src/test/java/org/springframework/web/client/AbstractMockWebServerTestCase.java <ide> <ide> import org.springframework.http.MediaType; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/web/client/HttpStatusCodeExceptionTests.java <ide> import org.springframework.http.HttpStatus; <ide> <ide> import static org.hamcrest.CoreMatchers.equalTo; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/web/client/RestTemplateIntegrationTests.java <ide> import com.fasterxml.jackson.annotation.JsonTypeInfo; <ide> import com.fasterxml.jackson.annotation.JsonTypeName; <ide> import com.fasterxml.jackson.annotation.JsonView; <del>import org.hamcrest.Matchers; <del>import org.junit.Assume; <ide> import org.junit.Before; <ide> import org.junit.Test; <ide> import org.junit.runner.RunWith; <ide> import org.springframework.util.MultiValueMap; <ide> <ide> import static org.junit.Assert.*; <add>import static org.junit.Assume.assumeFalse; <ide> import static org.springframework.http.HttpMethod.POST; <ide> <ide> /** <ide> public void postForObject() throws URISyntaxException { <ide> @Test <ide> public void patchForObject() throws URISyntaxException { <ide> // JDK client does not support the PATCH method <del> Assume.assumeThat(this.clientHttpRequestFactory, <del> Matchers.not(Matchers.instanceOf(SimpleClientHttpRequestFactory.class))); <add> assumeFalse(this.clientHttpRequestFactory instanceof SimpleClientHttpRequestFactory); <add> <ide> String s = template.patchForObject(baseUrl + "/{method}", helloWorld, String.class, "patch"); <ide> assertEquals("Invalid content", helloWorld, s); <ide> } <ide><path>spring-web/src/test/java/org/springframework/web/context/request/async/StandardServletAsyncWebRequestTests.java <ide> <ide> package org.springframework.web.context.request.async; <ide> <del> <ide> import java.util.function.Consumer; <ide> <ide> import javax.servlet.AsyncEvent; <ide> import org.springframework.mock.web.test.MockHttpServletRequest; <ide> import org.springframework.mock.web.test.MockHttpServletResponse; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.containsString; <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertFalse; <ide> import static org.junit.Assert.assertNotNull; <ide> import static org.junit.Assert.assertSame; <del>import static org.junit.Assert.assertThat; <ide> import static org.junit.Assert.assertTrue; <ide> import static org.junit.Assert.fail; <ide> import static org.mockito.BDDMockito.mock; <ide><path>spring-web/src/test/java/org/springframework/web/context/support/AnnotationConfigWebApplicationContextTests.java <ide> import org.springframework.context.annotation.Configuration; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/web/context/support/StandardServletEnvironmentTests.java <ide> import org.springframework.tests.mock.jndi.SimpleNamingContextBuilder; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del>import static org.junit.Assert.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> <ide> /** <ide> * Unit tests for {@link StandardServletEnvironment}. <ide><path>spring-web/src/test/java/org/springframework/web/cors/DefaultCorsProcessorTests.java <ide> import org.springframework.mock.web.test.MockHttpServletRequest; <ide> import org.springframework.mock.web.test.MockHttpServletResponse; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.contains; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-web/src/test/java/org/springframework/web/cors/reactive/DefaultCorsProcessorTests.java <ide> import org.springframework.web.cors.CorsConfiguration; <ide> import org.springframework.web.server.ServerWebExchange; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.contains; <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertFalse; <ide> import static org.junit.Assert.assertNull; <del>import static org.junit.Assert.assertThat; <ide> import static org.junit.Assert.assertTrue; <ide> import static org.springframework.http.HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS; <ide> import static org.springframework.http.HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN; <ide><path>spring-web/src/test/java/org/springframework/web/filter/ForwardedHeaderFilterTests.java <ide> package org.springframework.web.filter; <ide> <ide> import java.io.IOException; <del>import java.net.URI; <ide> import java.util.Enumeration; <ide> import javax.servlet.DispatcherType; <ide> import javax.servlet.Filter; <ide><path>spring-web/src/test/java/org/springframework/web/filter/reactive/HiddenHttpMethodFilterTests.java <ide> import org.springframework.web.server.ServerWebExchange; <ide> import org.springframework.web.server.WebFilterChain; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <del>import static org.junit.Assert.assertThat; <ide> <ide> /** <ide> * Tests for {@link HiddenHttpMethodFilter}. <ide><path>spring-web/src/test/java/org/springframework/web/method/support/InvocableHandlerMethodTests.java <ide> import org.springframework.web.context.request.ServletWebRequest; <ide> import org.springframework.web.method.ResolvableMethod; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-web/src/test/java/org/springframework/web/multipart/support/StandardMultipartHttpServletRequestTests.java <ide> import org.springframework.web.multipart.MultipartFile; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-web/src/test/java/org/springframework/web/util/UriComponentsBuilderTests.java <ide> import org.springframework.util.MultiValueMap; <ide> import org.springframework.util.StringUtils; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-web/src/test/java/org/springframework/web/util/UriComponentsTests.java <ide> <ide> import org.junit.Test; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> import static org.springframework.web.util.UriComponentsBuilder.*; <ide><path>spring-web/src/test/java/org/springframework/web/util/pattern/PathPatternTests.java <ide> import org.springframework.web.util.pattern.PathPattern.PathRemainingMatchInfo; <ide> <ide> import static org.hamcrest.CoreMatchers.containsString; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertFalse; <ide> import static org.junit.Assert.assertNotNull; <ide> import static org.junit.Assert.assertNull; <del>import static org.junit.Assert.assertThat; <ide> import static org.junit.Assert.assertTrue; <ide> import static org.junit.Assert.fail; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/context/ContextLoaderTests.java <ide> import org.springframework.web.servlet.SimpleWebApplicationContext; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/DispatcherServletTests.java <ide> import org.springframework.web.util.WebUtils; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.Mockito.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/config/AnnotationDrivenBeanDefinitionParserTests.java <ide> import org.springframework.web.servlet.mvc.method.annotation.ServletWebArgumentResolverAdapter; <ide> import org.springframework.web.util.UrlPathHelper; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/config/MvcNamespaceTests.java <ide> import org.springframework.web.servlet.view.tiles3.TilesViewResolver; <ide> import org.springframework.web.util.UrlPathHelper; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/config/annotation/ResourceHandlerRegistryTests.java <ide> import org.springframework.web.servlet.resource.WebJarsResourceResolver; <ide> import org.springframework.web.util.UrlPathHelper; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <ide> import static org.junit.Assert.assertFalse; <ide> import static org.junit.Assert.assertNotNull; <ide> import static org.junit.Assert.assertNull; <del>import static org.junit.Assert.assertThat; <ide> import static org.junit.Assert.assertTrue; <ide> <ide> /** <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/handler/HandlerMappingTests.java <ide> import javax.servlet.http.HttpServletResponse; <ide> <ide> import org.hamcrest.Matchers; <del>import org.junit.Assert; <ide> import org.junit.Before; <ide> import org.junit.Test; <ide> import org.mockito.Mockito; <ide> import org.springframework.web.servlet.HandlerInterceptor; <ide> import org.springframework.web.servlet.support.WebContentGenerator; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <add> <ide> /** <ide> * Unit tests for <ide> * {@link org.springframework.web.servlet.handler.HandlerMappingTests}. <ide> public void orderedInterceptors() throws Exception { <ide> this.handlerMapping.setInterceptors(mappedInterceptor1, i2, mappedInterceptor3, i4); <ide> this.handlerMapping.setApplicationContext(this.context); <ide> HandlerExecutionChain chain = this.handlerMapping.getHandlerExecutionChain(new SimpleHandler(), this.request); <del> Assert.assertThat(chain.getInterceptors(), Matchers.arrayContaining( <add> assertThat(chain.getInterceptors(), Matchers.arrayContaining( <ide> mappedInterceptor1.getInterceptor(), i2, mappedInterceptor3.getInterceptor(), i4)); <ide> } <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/WebContentInterceptorTests.java <ide> import org.springframework.mock.web.test.MockHttpServletRequest; <ide> import org.springframework.mock.web.test.MockHttpServletResponse; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <del>import static org.junit.Assert.*; <ide> <ide> /** <ide> * @author Rick Evans <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/RequestMappingInfoHandlerMappingTests.java <ide> import org.springframework.web.servlet.mvc.condition.RequestMethodsRequestCondition; <ide> import org.springframework.web.util.UrlPathHelper; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/HttpEntityMethodProcessorMockTests.java <ide> <ide> import static java.time.Instant.*; <ide> import static java.time.format.DateTimeFormatter.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.BDDMockito.*; <ide> import static org.springframework.http.MediaType.*; <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilderTests.java <ide> import org.springframework.web.util.UriComponents; <ide> import org.springframework.web.util.UriComponentsBuilder; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> import static org.springframework.web.servlet.mvc.method.annotation.MvcUriComponentsBuilder.*; <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestMappingHandlerAdapterIntegrationTests.java <ide> import org.springframework.web.servlet.ModelAndView; <ide> import org.springframework.web.util.UriComponentsBuilder; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/resource/AppCacheManifestTransformerTests.java <ide> import org.springframework.mock.web.test.MockHttpServletRequest; <ide> import org.springframework.util.FileCopyUtils; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/resource/ResourceUrlProviderTests.java <ide> import org.springframework.web.context.support.AnnotationConfigWebApplicationContext; <ide> import org.springframework.web.servlet.handler.SimpleUrlHandlerMapping; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.ArgumentMatchers.any; <ide> import static org.mockito.Mockito.mock; <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/resource/VersionResourceResolverTests.java <ide> import org.springframework.core.io.Resource; <ide> import org.springframework.mock.web.test.MockHttpServletRequest; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.BDDMockito.*; <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/tags/UrlTagTests.java <ide> import org.springframework.mock.web.test.MockHttpServletRequest; <ide> import org.springframework.mock.web.test.MockPageContext; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/view/ResourceBundleViewResolverTests.java <ide> import org.springframework.web.servlet.View; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <del> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.junit.Assume.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/view/feed/AtomFeedViewTests.java <ide> import java.util.List; <ide> import java.util.Map; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <del>import static org.junit.Assert.assertThat; <ide> <ide> /** <ide> * @author Arjen Poutsma <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/view/feed/RssFeedViewTests.java <ide> import java.util.List; <ide> import java.util.Map; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.assertEquals; <del>import static org.junit.Assert.assertThat; <ide> import static org.xmlunit.matchers.CompareMatcher.isSimilarTo; <ide> <ide> /** <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/view/freemarker/FreeMarkerConfigurerTests.java <ide> import org.springframework.ui.freemarker.FreeMarkerTemplateUtils; <ide> import org.springframework.ui.freemarker.SpringTemplateLoader; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.hamcrest.Matchers.*; <ide> import static org.junit.Assert.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/view/groovy/GroovyMarkupConfigurerTests.java <ide> * See the License for the specific language governing permissions and <ide> * limitations under the License. <ide> */ <add> <ide> package org.springframework.web.servlet.view.groovy; <ide> <ide> import java.io.IOException; <ide> import groovy.text.markup.MarkupTemplateEngine; <ide> import groovy.text.markup.TemplateConfiguration; <ide> import org.hamcrest.Matchers; <del>import org.junit.Assert; <ide> import org.junit.Before; <ide> import org.junit.Test; <ide> <ide> import org.springframework.context.i18n.LocaleContextHolder; <ide> import org.springframework.context.support.StaticApplicationContext; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> <ide> /** <ide> public TestTemplateEngine() { <ide> @Test <ide> public void resolveSampleTemplate() throws Exception { <ide> URL url = this.configurer.resolveTemplate(getClass().getClassLoader(), TEMPLATE_PREFIX + "test.tpl"); <del> Assert.assertNotNull(url); <add> assertNotNull(url); <ide> } <ide> <ide> @Test <ide> public void resolveI18nFullLocale() throws Exception { <ide> LocaleContextHolder.setLocale(Locale.GERMANY); <ide> URL url = this.configurer.resolveTemplate(getClass().getClassLoader(), TEMPLATE_PREFIX + "i18n.tpl"); <del> Assert.assertNotNull(url); <del> Assert.assertThat(url.getPath(), Matchers.containsString("i18n_de_DE.tpl")); <add> assertNotNull(url); <add> assertThat(url.getPath(), Matchers.containsString("i18n_de_DE.tpl")); <ide> } <ide> <ide> @Test <ide> public void resolveI18nPartialLocale() throws Exception { <ide> LocaleContextHolder.setLocale(Locale.FRANCE); <ide> URL url = this.configurer.resolveTemplate(getClass().getClassLoader(), TEMPLATE_PREFIX + "i18n.tpl"); <del> Assert.assertNotNull(url); <del> Assert.assertThat(url.getPath(), Matchers.containsString("i18n_fr.tpl")); <add> assertNotNull(url); <add> assertThat(url.getPath(), Matchers.containsString("i18n_fr.tpl")); <ide> } <ide> <ide> @Test <ide> public void resolveI18nDefaultLocale() throws Exception { <ide> LocaleContextHolder.setLocale(Locale.US); <ide> URL url = this.configurer.resolveTemplate(getClass().getClassLoader(), TEMPLATE_PREFIX + "i18n.tpl"); <del> Assert.assertNotNull(url); <del> Assert.assertThat(url.getPath(), Matchers.containsString("i18n.tpl")); <add> assertNotNull(url); <add> assertThat(url.getPath(), Matchers.containsString("i18n.tpl")); <ide> } <ide> <ide> @Test(expected = IOException.class) <ide> public void failMissingTemplate() throws Exception { <ide> LocaleContextHolder.setLocale(Locale.US); <ide> this.configurer.resolveTemplate(getClass().getClassLoader(), TEMPLATE_PREFIX + "missing.tpl"); <del> Assert.fail(); <add> fail(); <ide> } <add> <ide> } <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/view/groovy/GroovyMarkupViewTests.java <ide> * See the License for the specific language governing permissions and <ide> * limitations under the License. <ide> */ <add> <ide> package org.springframework.web.servlet.view.groovy; <ide> <ide> import java.io.Reader; <ide> import org.springframework.mock.web.test.MockServletContext; <ide> import org.springframework.web.context.WebApplicationContext; <ide> <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.BDDMockito.*; <ide> <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/view/json/MappingJackson2JsonViewTests.java <ide> import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider; <ide> <ide> import static org.hamcrest.CoreMatchers.*; <add>import static org.hamcrest.MatcherAssert.assertThat; <ide> import static org.junit.Assert.*; <ide> import static org.mockito.Mockito.*; <ide>
103
Text
Text
add link to forums thread
6009668c631aa5773c66aa30c6bfd9c191e2a6be
<ide><path>ISSUES.md <ide> In general the best way to figure out what works the best is learn from issues p <ide> <ide> Thank you for reading this somewhat lengthy document. We would like to conclude that these are not absolute rules, but a friendly advice that will help maximize the chances for us to understand what you are trying to communicate, reproduce the problem then resolve it to your satisfaction and the benefit of the whole community. <ide> <del>If after reading this document there are remaining questions on how and why or there is a need for further elucidation, please, don't hesitate to ask "How do I?" type of questions in the following sub-section of the forums: [XXX: @sgugger]. <add>If after reading this document there are remaining questions on how and why or there is a need for further elucidation, please, don't hesitate to ask your question in [this thread](https://discuss.huggingface.co/t/how-to-request-support/3128).
1
PHP
PHP
fix styleci issue
e01f5483a70c853314ead2a3f0a3d075f5c3e42e
<ide><path>src/Illuminate/Database/Schema/Grammars/SqlServerGrammar.php <ide> public function compileDisableForeignKeyConstraints() <ide> } <ide> <ide> /** <del> * Compile the command to drop all foreign keys <add> * Compile the command to drop all foreign keys. <ide> * <ide> * @return string <ide> */
1
Text
Text
fix encoderdecoder wikisplit example
0724c0f3a2d302246d0bd0b7d2f721fa902dee1b
<ide><path>model_cards/google/roberta2roberta_L-24_wikisplit/README.md <ide> model = AutoModelForSeq2SeqLM.from_pretrained("google/roberta2roberta_L-24_wikis <ide> <ide> long_sentence = """Due to the hurricane, Lobsterfest has been canceled, making Bob very happy about it and he decides to open Bob 's Burgers for customers who were planning on going to Lobsterfest.""" <ide> <del>input_ids = tokenizer(long_sentence, return_tensors="pt").input_ids <add>input_ids = tokenizer(tokenizer.bos_token + long_sentence + tokenizer.eos_token, return_tensors="pt").input_ids <ide> output_ids = model.generate(input_ids)[0] <ide> print(tokenizer.decode(output_ids, skip_special_tokens=True)) <ide> # should output <del># Due Due hurricane, Lobsterfest has been canceled, making Bob very happy about it. He decides to open B <del># ob's Burgers for customers who were planning on going to Lobsterfest.com. <add># Due to the hurricane, Lobsterfest has been canceled, making Bob very happy about it. He decides to open Bob's Burgers for customers who were planning on going to Lobsterfest. <ide> ```
1
Ruby
Ruby
pass explicit sort to handle apfs
e98d0fda86719794fd35f42b25a22fad408fd7ef
<ide><path>Library/Homebrew/tap.rb <ide> def self.each <ide> <ide> # an array of all installed {Tap} names. <ide> def self.names <del> map(&:name) <add> map(&:name).sort <ide> end <ide> <ide> # @private
1
Java
Java
fix javadoc warnings on empty <p>
27ce955dfc9b2457ae09cdbbe3c9a0542c5eb2a6
<ide><path>src/main/java/io/reactivex/Flowable.java <ide> public final T blockingFirst(T defaultItem) { <ide> * <em>Note:</em> the method will only return if the upstream terminates or the current <ide> * thread is interrupted. <ide> * <p> <del> * <p>This method executes the {@code Consumer} on the current thread while <add> * This method executes the {@code Consumer} on the current thread while <ide> * {@link #subscribe(Consumer)} executes the consumer on the original caller thread of the <ide> * sequence. <ide> * <dl> <ide><path>src/main/java/io/reactivex/Observable.java <ide> public final T blockingFirst(T defaultItem) { <ide> * <em>Note:</em> the method will only return if the upstream terminates or the current <ide> * thread is interrupted. <ide> * <p> <del> * <p>This method executes the {@code Consumer} on the current thread while <add> * This method executes the {@code Consumer} on the current thread while <ide> * {@link #subscribe(Consumer)} executes the consumer on the original caller thread of the <ide> * sequence. <ide> * <dl>
2
Go
Go
fix tarsum for go 1.10
a422774e593b33bd287d9890544ad9e09b380d8c
<ide><path>pkg/tarsum/tarsum.go <ide> func (sth simpleTHash) Hash() hash.Hash { return sth.h() } <ide> <ide> func (ts *tarSum) encodeHeader(h *tar.Header) error { <ide> for _, elem := range ts.headerSelector.selectHeaders(h) { <add> // Ignore these headers to be compatible with versions <add> // before go 1.10 <add> if elem[0] == "gname" || elem[0] == "uname" { <add> elem[1] = "" <add> } <ide> if _, err := ts.h.Write([]byte(elem[0] + elem[1])); err != nil { <ide> return err <ide> } <ide> func (ts *tarSum) Read(buf []byte) (int, error) { <ide> ts.first = false <ide> } <ide> <add> if _, err := ts.tarW.Write(buf2[:n]); err != nil { <add> return 0, err <add> } <add> <ide> currentHeader, err := ts.tarR.Next() <ide> if err != nil { <ide> if err == io.EOF { <ide> func (ts *tarSum) Read(buf []byte) (int, error) { <ide> return 0, err <ide> } <ide> ts.finished = true <del> return n, nil <add> return ts.bufWriter.Read(buf) <ide> } <del> return n, err <add> return 0, err <ide> } <add> <ide> ts.currentFile = path.Join(".", path.Join("/", currentHeader.Name)) <ide> if err := ts.encodeHeader(currentHeader); err != nil { <ide> return 0, err <ide> } <ide> if err := ts.tarW.WriteHeader(currentHeader); err != nil { <ide> return 0, err <ide> } <del> if _, err := ts.tarW.Write(buf2[:n]); err != nil { <del> return 0, err <del> } <del> ts.tarW.Flush() <add> <ide> if _, err := io.Copy(ts.writer, ts.bufTar); err != nil { <ide> return 0, err <ide> } <ide> ts.writer.Flush() <ide> <ide> return ts.bufWriter.Read(buf) <ide> } <del> return n, err <add> return 0, err <ide> } <ide> <ide> // Filling the hash buffer <ide> func (ts *tarSum) Read(buf []byte) (int, error) { <ide> if _, err = ts.tarW.Write(buf2[:n]); err != nil { <ide> return 0, err <ide> } <del> ts.tarW.Flush() <ide> <ide> // Filling the output writer <ide> if _, err = io.Copy(ts.writer, ts.bufTar); err != nil { <ide><path>pkg/tarsum/tarsum_test.go <ide> var testLayers = []testLayer{ <ide> { <ide> // this tar has two files with the same path <ide> filename: "testdata/collision/collision-0.tar", <del> tarsum: "tarsum+sha256:08653904a68d3ab5c59e65ef58c49c1581caa3c34744f8d354b3f575ea04424a"}, <add> tarsum: "tarsum+sha256:7cabb5e9128bb4a93ff867b9464d7c66a644ae51ea2e90e6ef313f3bef93f077"}, <ide> { <ide> // this tar has the same two files (with the same path), but reversed order. ensuring is has different hash than above <ide> filename: "testdata/collision/collision-1.tar", <del> tarsum: "tarsum+sha256:b51c13fbefe158b5ce420d2b930eef54c5cd55c50a2ee4abdddea8fa9f081e0d"}, <add> tarsum: "tarsum+sha256:805fd393cfd58900b10c5636cf9bab48b2406d9b66523122f2352620c85dc7f9"}, <ide> { <ide> // this tar has newer of collider-0.tar, ensuring is has different hash <ide> filename: "testdata/collision/collision-2.tar", <del> tarsum: "tarsum+sha256:381547080919bb82691e995508ae20ed33ce0f6948d41cafbeb70ce20c73ee8e"}, <add> tarsum: "tarsum+sha256:85d2b8389f077659d78aca898f9e632ed9161f553f144aef100648eac540147b"}, <ide> { <ide> // this tar has newer of collider-1.tar, ensuring is has different hash <ide> filename: "testdata/collision/collision-3.tar", <del> tarsum: "tarsum+sha256:f886e431c08143164a676805205979cd8fa535dfcef714db5515650eea5a7c0f"}, <add> tarsum: "tarsum+sha256:cbe4dee79fe979d69c16c2bccd032e3205716a562f4a3c1ca1cbeed7b256eb19"}, <ide> { <ide> options: &sizedOptions{1, 1024 * 1024, false, false}, // a 1mb file (in memory) <ide> tarsum: "tarsum+md5:0d7529ec7a8360155b48134b8e599f53", <ide> func TestIteration(t *testing.T) { <ide> []byte(""), <ide> }, <ide> { <del> "tarsum.dev+sha256:b38166c059e11fb77bef30bf16fba7584446e80fcc156ff46d47e36c5305d8ef", <add> "tarsum.dev+sha256:862964db95e0fa7e42836ae4caab3576ab1df8d275720a45bdd01a5a3730cc63", <ide> VersionDev, <ide> &tar.Header{ <ide> Name: "another.txt", <ide> func TestIteration(t *testing.T) { <ide> []byte("test"), <ide> }, <ide> { <del> "tarsum.dev+sha256:4cc2e71ac5d31833ab2be9b4f7842a14ce595ec96a37af4ed08f87bc374228cd", <add> "tarsum.dev+sha256:4b1ba03544b49d96a32bacc77f8113220bd2f6a77e7e6d1e7b33cd87117d88e7", <ide> VersionDev, <ide> &tar.Header{ <ide> Name: "xattrs.txt", <ide> func TestIteration(t *testing.T) { <ide> []byte("test"), <ide> }, <ide> { <del> "tarsum.dev+sha256:65f4284fa32c0d4112dd93c3637697805866415b570587e4fd266af241503760", <add> "tarsum.dev+sha256:410b602c898bd4e82e800050f89848fc2cf20fd52aa59c1ce29df76b878b84a6", <ide> VersionDev, <ide> &tar.Header{ <ide> Name: "xattrs.txt", <ide> func TestIteration(t *testing.T) { <ide> []byte("test"), <ide> }, <ide> { <del> "tarsum+sha256:c12bb6f1303a9ddbf4576c52da74973c00d14c109bcfa76b708d5da1154a07fa", <add> "tarsum+sha256:b1f97eab73abd7593c245e51070f9fbdb1824c6b00a0b7a3d7f0015cd05e9e86", <ide> Version0, <ide> &tar.Header{ <ide> Name: "xattrs.txt",
2
Javascript
Javascript
use custom inspection for linked lists
d8baf67d4af2a9e388c82eccab2d3366a0a61c7e
<ide><path>lib/internal/timers.js <ide> const { <ide> } = require('internal/errors').codes; <ide> const { validateNumber } = require('internal/validators'); <ide> <add>const { inspect } = require('util'); <add> <ide> // Timeout values > TIMEOUT_MAX are set to 1. <ide> const TIMEOUT_MAX = 2 ** 31 - 1; <ide> <ide> function Timeout(callback, after, args, isRepeat) { <ide> initAsyncResource(this, 'Timeout'); <ide> } <ide> <add>// Make sure the linked list only shows the minimal necessary information. <add>Timeout.prototype[inspect.custom] = function(_, options) { <add> return inspect(this, { <add> ...options, <add> // Only inspect one level. <add> depth: 0, <add> // It should not recurse. <add> customInspect: false <add> }); <add>}; <add> <ide> Timeout.prototype.refresh = function() { <ide> if (this[kRefed]) <ide> getTimers().active(this); <ide><path>lib/timers.js <ide> function TimersList(expiry, msecs) { <ide> this.priorityQueuePosition = null; <ide> } <ide> <add>// Make sure the linked list only shows the minimal necessary information. <add>TimersList.prototype[util.inspect.custom] = function(_, options) { <add> return util.inspect(this, { <add> ...options, <add> // Only inspect one level. <add> depth: 0, <add> // It should not recurse. <add> customInspect: false <add> }); <add>}; <add> <ide> const { _tickCallback: runNextTicks } = process; <ide> function processTimers(now) { <ide> debug('process timer lists %d', now); <ide><path>test/parallel/test-http2-socket-proxy.js <ide> if (!common.hasCrypto) <ide> const assert = require('assert'); <ide> const h2 = require('http2'); <ide> const net = require('net'); <add>const util = require('util'); <ide> <ide> const { kTimeout } = require('internal/timers'); <ide> <ide> server.on('stream', common.mustCall(function(stream, headers) { <ide> socket.setTimeout(987); <ide> assert.strictEqual(session[kTimeout]._idleTimeout, 987); <ide> <add> // The indentation is corrected depending on the depth. <add> let inspectedTimeout = util.inspect(session[kTimeout]); <add> assert(inspectedTimeout.includes(' _idlePrev: [TimersList]')); <add> assert(inspectedTimeout.includes(' _idleNext: [TimersList]')); <add> assert(!inspectedTimeout.includes(' _idleNext: [TimersList]')); <add> <add> inspectedTimeout = util.inspect([ session[kTimeout] ]); <add> assert(inspectedTimeout.includes(' _idlePrev: [TimersList]')); <add> assert(inspectedTimeout.includes(' _idleNext: [TimersList]')); <add> assert(!inspectedTimeout.includes(' _idleNext: [TimersList]')); <add> <add> const inspectedTimersList = util.inspect([[ session[kTimeout]._idlePrev ]]); <add> assert(inspectedTimersList.includes(' _idlePrev: [Timeout]')); <add> assert(inspectedTimersList.includes(' _idleNext: [Timeout]')); <add> assert(!inspectedTimersList.includes(' _idleNext: [Timeout]')); <add> <ide> common.expectsError(() => socket.destroy, errMsg); <ide> common.expectsError(() => socket.emit, errMsg); <ide> common.expectsError(() => socket.end, errMsg);
3
Javascript
Javascript
add more jsdoc to make eslint happuy
1a2bbc5303194054653a45d2d88df7b030a97a5c
<ide><path>lib/compareLocations.js <ide> <ide> /** <ide> * Compare two locations <del> * @param {string|NodeLocation} a <del> * @param {string|NodeLocation} b <del> * @returns {-1|0|1} <add> * @param {string|NodeLocation} a A location node <add> * @param {string|NodeLocation} b A location node <add> * @returns {-1|0|1} sorting comparator value <ide> */ <ide> module.exports = (a, b) => { <ide> if (typeof a === "string") {
1
Text
Text
add typescript to next-mdx readme
af890406221ff7af4310ef1b4ac56e54913db889
<ide><path>packages/next-mdx/readme.md <ide> module.exports = withMDX({ <ide> pageExtensions: ['js', 'jsx', 'mdx'], <ide> }) <ide> ``` <add> <add>## Typescript <add> <add>Follow [this guide](https://mdxjs.com/advanced/typescript) from the MDX docs.
1
Javascript
Javascript
remove progress events
7764dd95c02c14211ec225dda911ec9b6b29ff60
<ide><path>src/util.js <ide> function isPDFFunction(v) { <ide> * Promise/A+ spec. Some notable differences from other promise libaries are: <ide> * - There currently isn't a seperate deferred and promise object. <ide> * - Unhandled rejections eventually show an error if they aren't handled. <del> * - Progress events are supported. <ide> * <ide> * Based off of the work in: <ide> * https://bugzilla.mozilla.org/show_bug.cgi?id=810490 <ide> var Promise = PDFJS.Promise = (function PromiseClosure() { <ide> var STATUS_PENDING = 0; <ide> var STATUS_RESOLVED = 1; <ide> var STATUS_REJECTED = 2; <del> var STATUS_PROGRESS = 3; <ide> <ide> // In an attempt to avoid silent exceptions, unhandled rejections are <ide> // tracked and if they aren't handled in a certain amount of time an <ide> var Promise = PDFJS.Promise = (function PromiseClosure() { <ide> } <ide> <ide> this.handlers = this.handlers.concat(promise._handlers); <del> if (promise._status !== STATUS_PROGRESS) { <del> promise._handlers = []; <del> } <add> promise._handlers = []; <ide> <ide> if (this.running) { <ide> return; <ide> var Promise = PDFJS.Promise = (function PromiseClosure() { <ide> if (typeof(handler.onResolve) == 'function') { <ide> nextValue = handler.onResolve(nextValue); <ide> } <del> } else if (nextStatus === STATUS_PROGRESS) { <del> if (typeof(handler.onProgress) === 'function') { <del> nextValue = handler.onProgress(nextValue); <del> } <ide> } else if (typeof(handler.onReject) === 'function') { <ide> nextValue = handler.onReject(nextValue); <ide> nextStatus = STATUS_RESOLVED; <ide> var Promise = PDFJS.Promise = (function PromiseClosure() { <ide> this._updateStatus(STATUS_REJECTED, reason); <ide> }, <ide> <del> notify: function Promise_notify(update) { <del> this._updateStatus(STATUS_PROGRESS, update); <del> }, <del> <del> then: function Promise_then(onResolve, onReject, onProgress) { <add> then: function Promise_then(onResolve, onReject) { <ide> var nextPromise = new Promise(); <ide> this._handlers.push({ <ide> thisPromise: this, <ide> onResolve: onResolve, <ide> onReject: onReject, <del> onProgress: onProgress, <ide> nextPromise: nextPromise <ide> }); <ide> HandlerManager.scheduleHandlers(this);
1
Javascript
Javascript
add assertstablererender rendering test helper
6a58171a27393cb5719dcaa52ac49c56611c2e48
<ide><path>packages/ember-glimmer/tests/utils/abstract-test-case.js <ide> export class TestCase { <ide> this.assertSameNode(newSnapshot[i], oldSnapshot[i]); <ide> } <ide> } <add> <add> assertStableRerender() { <add> this.takeSnapshot(); <add> this.runTask(() => this.rerender()); <add> this.assertInvariants(); <add> } <ide> } <ide> <ide> function isMarker(node) {
1
Go
Go
keep linebreaks and generalize code
c349b4d56c338bc43c81667bb927518b923998cb
<ide><path>utils/utils.go <ide> func GetResolvConf() ([]byte, error) { <ide> // CheckLocalDns looks into the /etc/resolv.conf, <ide> // it returns true if there is a local nameserver or if there is no nameserver. <ide> func CheckLocalDns(resolvConf []byte) bool { <del> var parsedResolvConf = ParseResolvConf(resolvConf) <add> var parsedResolvConf = StripComments(resolvConf, []byte("#")) <ide> if !bytes.Contains(parsedResolvConf, []byte("nameserver")) { <ide> return true <ide> } <ide> func CheckLocalDns(resolvConf []byte) bool { <ide> return false <ide> } <ide> <del>// ParseResolvConf parses the resolv.conf file into lines and strips away comments. <del>func ParseResolvConf(resolvConf []byte) []byte { <del> lines := bytes.Split(resolvConf, []byte("\n")) <del> var noCommentsResolvConf []byte <add>// StripComments parses input into lines and strips away comments. <add>func StripComments(input []byte, commentMarker []byte) []byte { <add> lines := bytes.Split(input, []byte("\n")) <add> var output []byte <ide> for _, currentLine := range lines { <del> var cleanLine = bytes.TrimLeft(currentLine, " \t") <del> var commentIndex = bytes.Index(cleanLine, []byte("#")) <add> var commentIndex = bytes.Index(currentLine, commentMarker) <ide> if ( commentIndex == -1 ) { <del> noCommentsResolvConf = append(noCommentsResolvConf, cleanLine...) <add> output = append(output, currentLine...) <ide> } else { <del> noCommentsResolvConf = append(noCommentsResolvConf, cleanLine[:commentIndex]...) <add> output = append(output, currentLine[:commentIndex]...) <ide> } <add> output = append(output, []byte("\n")...) <ide> } <del> return noCommentsResolvConf <add> return output <ide> } <ide> <ide> func ParseHost(host string, port int, addr string) string {
1
Python
Python
switch python intersphinx link from dev to stable
4f1283028f9f1e271cf4cbedc9ebe044a5c25be5
<ide><path>doc/source/conf.py <ide> def setup(app): <ide> # ----------------------------------------------------------------------------- <ide> intersphinx_mapping = { <ide> 'neps': ('https://numpy.org/neps', None), <del> 'python': ('https://docs.python.org/dev', None), <add> 'python': ('https://docs.python.org/3', None), <ide> 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), <ide> 'matplotlib': ('https://matplotlib.org/stable', None), <ide> 'imageio': ('https://imageio.readthedocs.io/en/stable', None),
1
PHP
PHP
show the last 200 queries instead of the first 200
3e0fa0009bf91651ddebe9f08265095f9cfb31b3
<ide><path>lib/Cake/Model/Datasource/DboSource.php <ide> public function logQuery($sql, $params = array()) { <ide> $this->_queriesCnt++; <ide> $this->_queriesTime += $this->took; <ide> $this->_queriesLog[] = array( <del> 'query' => $sql, <del> 'params' => $params, <del> 'affected' => $this->affected, <del> 'numRows' => $this->numRows, <del> 'took' => $this->took <add> 'query' => $sql, <add> 'params' => $params, <add> 'affected' => $this->affected, <add> 'numRows' => $this->numRows, <add> 'took' => $this->took <ide> ); <ide> if (count($this->_queriesLog) > $this->_queriesLogMax) { <del> array_pop($this->_queriesLog); <add> array_shift($this->_queriesLog); <ide> } <ide> } <ide>
1
Text
Text
add v3.1.3 to changelog
16b8df445f008d34cd56e6395ba9b1cea62a872e
<ide><path>CHANGELOG.md <ide> - [#16462](https://github.com/emberjs/ember.js/pull/16462) [CLEANUP] Remove deprecated `MODEL_FACTORY_INJECTIONS`. <ide> - [emberjs/rfcs#286](https://github.com/emberjs/rfcs/blob/master/text/0286-block-let-template-helper.md) [FEATURE] Enabled block `let` handlebars helper by default. <ide> <add>### v3.1.3 (June 21, 2018) <add>- [#16754](https://github.com/emberjs/ember.js/pull/16754) [BUGFIX] Fix container destroy timing <add> <ide> ### v3.1.2 (May 7, 2018) <ide> - [#16600](https://github.com/emberjs/ember.js/pull/16600) [BUGFIX] Fix SimpleHelper memory leak <ide> - [#16605](https://github.com/emberjs/ember.js/pull/16605) [BUGFIX] Use resetCache on container destroy.
1
Ruby
Ruby
blame controller files #684
8322ea45c1087325fd428a201749791550bc7859
<ide><path>activesupport/lib/active_support/dependencies.rb <ide> def path() [] end <ide> <ide> # Load the source file at the given file path <ide> def load_file!(file_path) <del> root.module_eval(IO.read(file_path), file_path, 1) <add> begin root.module_eval(IO.read(file_path), file_path, 1) <add> rescue Object => exception <add> exception.blame_file! file_path <add> raise <add> end <ide> end <ide> <ide> # Erase all items in this module <ide> def blame_file!(file) <ide> (@blamed_files ||= []).unshift file <ide> end <ide> <del> attr_reader :blamed_files <add> def blamed_files <add> @blamed_files ||= [] <add> end <ide> <ide> def describe_blame <ide> return nil if blamed_files.empty? <del> "This error occured while loading the following files:\n #{blamed_files.join '\n '}" <add> "This error occured while loading the following files:\n #{blamed_files.join "\n "}" <ide> end <del>end <ide>\ No newline at end of file <add>end
1
Javascript
Javascript
use the same coding style as the other lines
df1c4369ebe7201eb6e97504b4acc574090450fc
<ide><path>examples/js/postprocessing/RenderPass.js <ide> THREE.RenderPass.prototype = Object.assign( Object.create( THREE.Pass.prototype <ide> var oldAutoClear = renderer.autoClear; <ide> renderer.autoClear = false; <ide> <del> var oldOverrideMaterial = this.scene.overrideMaterial; <add> var oldClearColor, oldClearAlpha, oldOverrideMaterial; <ide> <ide> if ( this.overrideMaterial !== undefined ) { <ide> <add> oldOverrideMaterial = this.scene.overrideMaterial; <add> <ide> this.scene.overrideMaterial = this.overrideMaterial; <ide> <ide> } <ide> <del> var oldClearColor, oldClearAlpha; <del> <ide> if ( this.clearColor ) { <ide> <ide> oldClearColor = renderer.getClearColor().getHex(); <ide> THREE.RenderPass.prototype = Object.assign( Object.create( THREE.Pass.prototype <ide> <ide> } <ide> <del> this.scene.overrideMaterial = oldOverrideMaterial; <add> if ( this.overrideMaterial !== undefined ) { <add> <add> this.scene.overrideMaterial = oldOverrideMaterial; <add> <add> } <add> <ide> renderer.autoClear = oldAutoClear; <ide> <ide> } <ide><path>examples/jsm/postprocessing/RenderPass.js <ide> RenderPass.prototype = Object.assign( Object.create( Pass.prototype ), { <ide> var oldAutoClear = renderer.autoClear; <ide> renderer.autoClear = false; <ide> <del> this.scene.overrideMaterial = this.overrideMaterial; <ide> <del> var oldClearColor, oldClearAlpha; <add> var oldClearColor, oldClearAlpha, oldOverrideMaterial; <add> <add> if ( this.overrideMaterial !== undefined ) { <add> <add> oldOverrideMaterial = this.scene.overrideMaterial; <add> <add> this.scene.overrideMaterial = this.overrideMaterial; <add> <add> } <ide> <ide> if ( this.clearColor ) { <ide> <ide> RenderPass.prototype = Object.assign( Object.create( Pass.prototype ), { <ide> <ide> } <ide> <del> this.scene.overrideMaterial = null; <add> if ( this.overrideMaterial !== undefined ) { <add> <add> this.scene.overrideMaterial = oldOverrideMaterial; <add> <add> } <add> <ide> renderer.autoClear = oldAutoClear; <ide> <ide> }
2